From 98ebf814a0aaf1603654a4d7a181a49c4a314668 Mon Sep 17 00:00:00 2001 From: Patrick Musau Date: Tue, 20 Jul 2021 12:00:02 -0500 Subject: [PATCH 01/27] Initial changes for ros2 port --- ar_track_alvar/CMakeLists.txt | 284 ++++++++------- .../include/ar_track_alvar/Camera.h | 53 +-- .../include/ar_track_alvar/CvTestbed.h | 26 +- ar_track_alvar/include/ar_track_alvar/Draw.h | 58 +-- .../include/ar_track_alvar/FileFormatUtils.h | 4 +- .../include/ar_track_alvar/Kalman.h | 12 +- ar_track_alvar/include/ar_track_alvar/Line.h | 4 +- .../include/ar_track_alvar/Optimization.h | 4 +- ar_track_alvar/include/ar_track_alvar/Pose.h | 1 + .../include/ar_track_alvar/Rotation.h | 1 + ar_track_alvar/include/ar_track_alvar/Util.h | 9 +- .../ar_track_alvar/filter/kinect_filtering.h | 14 +- .../nodes/.vscode/c_cpp_properties.json | 17 + ar_track_alvar/nodes/.vscode/settings.json | 5 + ar_track_alvar/nodes/FindMarkerBundles.cpp | 105 +++--- .../nodes/FindMarkerBundlesNoKinect.cpp | 43 +-- ar_track_alvar/nodes/IndividualMarkers.cpp | 79 +++-- .../nodes/IndividualMarkersNoKinect.cpp | 61 ++-- ar_track_alvar/nodes/TrainMarkerBundle.cpp | 41 +-- ar_track_alvar/nodes/log/COLCON_IGNORE | 0 ar_track_alvar/nodes/log/latest | 1 + ar_track_alvar/nodes/log/latest_list | 1 + .../list_2021-07-19_11-38-02/logger_all.log | 26 ++ ar_track_alvar/package.xml | 68 ++-- ar_track_alvar/src/Camera.cpp | 334 ++++++++++-------- ar_track_alvar/src/CvTestbed.cpp | 66 ++-- ar_track_alvar/src/Draw.cpp | 118 ++++--- ar_track_alvar/src/Kalman.cpp | 90 +++-- ar_track_alvar/src/Line.cpp | 14 +- ar_track_alvar/src/Marker.cpp | 3 +- ar_track_alvar/src/Optimization.cpp | 5 +- ar_track_alvar/src/Pose.cpp | 1 + ar_track_alvar/src/Rotation.cpp | 4 +- ar_track_alvar/src/Util.cpp | 6 +- ar_track_alvar/src/kinect_filtering.cpp | 73 ++-- ar_track_alvar/test/test_ar.py | 3 +- ar_track_alvar/test/test_kinect_filtering.cpp | 6 +- ar_track_alvar/test/test_points.cpp | 7 +- ar_track_alvar_msgs/CMakeLists.txt | 35 +- ar_track_alvar_msgs/msg/AlvarMarker.msg | 2 +- ar_track_alvar_msgs/msg/AlvarMarkers.msg | 2 +- ar_track_alvar_msgs/package.xml | 27 +- 42 files changed, 968 insertions(+), 745 deletions(-) create mode 100644 ar_track_alvar/nodes/.vscode/c_cpp_properties.json create mode 100644 ar_track_alvar/nodes/.vscode/settings.json create mode 100644 ar_track_alvar/nodes/log/COLCON_IGNORE create mode 120000 ar_track_alvar/nodes/log/latest create mode 120000 ar_track_alvar/nodes/log/latest_list create mode 100644 ar_track_alvar/nodes/log/list_2021-07-19_11-38-02/logger_all.log diff --git a/ar_track_alvar/CMakeLists.txt b/ar_track_alvar/CMakeLists.txt index 7450ac6..5af7012 100644 --- a/ar_track_alvar/CMakeLists.txt +++ b/ar_track_alvar/CMakeLists.txt @@ -1,85 +1,85 @@ -cmake_minimum_required(VERSION 2.8.3) +cmake_minimum_required(VERSION 3.5) project(ar_track_alvar) -set(MSG_DEPS - ar_track_alvar_msgs - std_msgs - sensor_msgs - geometry_msgs - visualization_msgs) - -find_package(catkin COMPONENTS - genmsg - roscpp - tf - tf2 - image_transport - resource_retriever - cv_bridge - pcl_ros - pcl_conversions - message_generation - ${MSG_DEPS} - dynamic_reconfigure - cmake_modules - REQUIRED) +# if(NOT WIN32) +# if(NOT CMAKE_CXX_STANDARD) +# set(CMAKE_CXX_STANDARD 14) +# endif() +# if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") +# add_compile_options(-Wall -Wextra -Wpedantic) +# endif() +# endif() + +find_package(ament_cmake REQUIRED) +find_package(ament_cmake_auto REQUIRED) +ament_auto_find_build_dependencies() + +find_package(OpenCV REQUIRED COMPONENTS calib3d core highgui imgproc) +if(OpenCV_VERSION VERSION_GREATER 4.5.0) + target_compile_definitions(library PUBLIC USE_LEGACY_TRACKING=1) +endif() + + + +find_package(rclcpp REQUIRED) +find_package(tf2_ros REQUIRED) +find_package(tf2 REQUIRED) +find_package(image_transport REQUIRED) +find_package(resource_retriever REQUIRED) +find_package(cv_bridge REQUIRED) +find_package(perception_pcl REQUIRED) +find_package(pcl_ros) +find_package(pcl_conversions REQUIRED) +find_package(ar_track_alvar_msgs) +find_package(std_msgs) +find_package(sensor_msgs) +find_package(geometry_msgs) +find_package(visualization_msgs) +find_package(PCL REQUIRED QUIET COMPONENTS common io) find_package(Eigen3 REQUIRED) -find_package(OpenCV REQUIRED) -find_package(TinyXML REQUIRED) +find_package(TinyXML2 REQUIRED) -include(CheckCXXCompilerFlag) -CHECK_CXX_COMPILER_FLAG("-std=c++11" COMPILER_SUPPORTS_CXX11) -CHECK_CXX_COMPILER_FLAG("-std=c++0x" COMPILER_SUPPORTS_CXX0X) -if(COMPILER_SUPPORTS_CXX11) - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") -elseif(COMPILER_SUPPORTS_CXX0X) - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x") -else() - message(STATUS "The compiler ${CMAKE_CXX_COMPILER} has no C++11 support. Please use a different C++ compiler.") -endif() +include_directories(include) -cmake_policy(SET CMP0046 OLD) + +#set(GENCPP_DEPS ar_track_alvar_msgs_gencpp std_msgs_gencpp sensor_msgs_gencpp geometry_msgs_gencpp visualization_msgs_gencpp) + +# Kinect filtering code +set(KINECT_FILTERING_TARGETS kinect_filtering medianFilter) +set(ALVAR_TARGETS ar_track_alvar individualMarkers individualMarkersNoKinect trainMarkerBundle findMarkerBundles findMarkerBundlesNoKinect createMarker ar_track_alvar) # dynamic reconfigure support -generate_dynamic_reconfigure_options(cfg/Params.cfg) - -catkin_package( - INCLUDE_DIRS include - LIBRARIES ar_track_alvar - CATKIN_DEPENDS - ar_track_alvar_msgs - std_msgs - roscpp - tf - tf2 - message_runtime - image_transport - sensor_msgs - geometry_msgs - visualization_msgs - resource_retriever - cv_bridge - pcl_ros - pcl_conversions - dynamic_reconfigure +#generate_dynamic_reconfigure_options(cfg/Params.cfg) + +set(dependencies + OpenCV + tf2_ros + pcl_ros + pcl_conversions + std_msgs + image_transport + perception_pcl + visualization_msgs + rclcpp + resource_retriever + geometry_msgs + cv_bridge + sensor_msgs ) -include_directories(include - ${catkin_INCLUDE_DIRS} +include_directories(include ${OpenCV_INCLUDE_DIRS} - ${TinyXML_INCLUDE_DIRS} - + ${TinyXML2_INCLUDE_DIRS} + ${PCL_COMMON_INCLUDE_DIRS} ) -set(GENCPP_DEPS ar_track_alvar_msgs_gencpp std_msgs_gencpp sensor_msgs_gencpp geometry_msgs_gencpp visualization_msgs_gencpp) - add_library(ar_track_alvar src/Camera.cpp src/CaptureDevice.cpp src/Pose.cpp - src/Marker.cpp - src/MarkerDetector.cpp + #src/Marker.cpp + #src/MarkerDetector.cpp src/Bitset.cpp src/Rotation.cpp src/CvTestbed.cpp @@ -91,8 +91,9 @@ add_library(ar_track_alvar src/Threads_unix.cpp src/Mutex.cpp src/Mutex_unix.cpp - src/ConnectedComponents.cpp - src/Line.cpp src/Plugin.cpp + #src/ConnectedComponents.cpp + src/Line.cpp + src/Plugin.cpp src/Plugin_unix.cpp src/DirectoryIterator.cpp src/DirectoryIterator_unix.cpp @@ -102,82 +103,107 @@ add_library(ar_track_alvar src/Kalman.cpp src/kinect_filtering.cpp src/Optimization.cpp - src/MultiMarker.cpp - src/MultiMarkerBundle.cpp - src/MultiMarkerInitializer.cpp) -target_link_libraries(ar_track_alvar ${OpenCV_LIBS} ${TinyXML_LIBRARIES} ${catkin_LIBRARIES}) -add_dependencies(ar_track_alvar ${GENCPP_DEPS}) - -# Kinect filtering code -set(KINECT_FILTERING_TARGETS kinect_filtering medianFilter) + # src/MultiMarker.cpp + # src/MultiMarkerBundle.cpp + # src/MultiMarkerInitializer.cpp) +) + +target_link_libraries(${PROJECT_NAME} ${OpenCV_LIBRARIES} ${TinyXML_LIBRARIES}) +ament_target_dependencies(ar_track_alvar ${dependencies}) add_library(kinect_filtering src/kinect_filtering.cpp) -target_link_libraries(kinect_filtering ${catkin_LIBRARIES}) -add_dependencies(kinect_filtering ${GENCPP_DEPS}) +ament_target_dependencies(kinect_filtering ${dependencies}) + add_library(medianFilter src/medianFilter.cpp) -target_link_libraries(medianFilter ar_track_alvar ${catkin_LIBRARIES}) -add_dependencies(medianFilter ${GENCPP_DEPS}) +target_link_libraries(medianFilter ar_track_alvar) +ament_target_dependencies(medianFilter ${dependencies}) -set(ALVAR_TARGETS ar_track_alvar individualMarkers individualMarkersNoKinect trainMarkerBundle findMarkerBundles findMarkerBundlesNoKinect createMarker ar_track_alvar) +# add_executable(individualMarkers nodes/IndividualMarkers.cpp) +# target_link_libraries(individualMarkers ar_track_alvar kinect_filtering) +# ament_target_dependencies(individualMarkers ${dependencies}) -add_executable(individualMarkers nodes/IndividualMarkers.cpp) -target_link_libraries(individualMarkers ar_track_alvar kinect_filtering ${catkin_LIBRARIES}) -add_dependencies(individualMarkers ${PROJECT_NAME}_gencpp ${GENCPP_DEPS} ${PROJECT_NAME}_gencfg) +# add_executable(individualMarkersNoKinect nodes/IndividualMarkersNoKinect.cpp) +# target_link_libraries(individualMarkersNoKinect ar_track_alvar) +# ament_target_dependencies(individualMarkersNoKinect ${dependencies}) -add_executable(individualMarkersNoKinect nodes/IndividualMarkersNoKinect.cpp) -target_link_libraries(individualMarkersNoKinect ar_track_alvar ${catkin_LIBRARIES}) -add_dependencies(individualMarkersNoKinect ${PROJECT_NAME}_gencpp ${GENCPP_DEPS} ${PROJECT_NAME}_gencfg) +# add_executable(trainMarkerBundle nodes/TrainMarkerBundle.cpp) +# target_link_libraries(trainMarkerBundle ar_track_alvar) +# ament_target_dependencies(trainMarkerBundle ${dependencies}) -add_executable(trainMarkerBundle nodes/TrainMarkerBundle.cpp) -target_link_libraries(trainMarkerBundle ar_track_alvar ${catkin_LIBRARIES}) -add_dependencies(trainMarkerBundle ${PROJECT_NAME}_gencpp ${GENCPP_DEPS}) -add_executable(findMarkerBundles nodes/FindMarkerBundles.cpp) -target_link_libraries(findMarkerBundles ar_track_alvar kinect_filtering medianFilter ${catkin_LIBRARIES}) -add_dependencies(findMarkerBundles ${PROJECT_NAME}_gencpp ${GENCPP_DEPS}) +# add_executable(findMarkerBundles nodes/FindMarkerBundles.cpp) +# target_link_libraries(findMarkerBundles ar_track_alvar kinect_filtering medianFilter) +# ament_target_dependencies(findMarkerBundles ${dependencies}) -add_executable(findMarkerBundlesNoKinect nodes/FindMarkerBundlesNoKinect.cpp) -target_link_libraries(findMarkerBundlesNoKinect ar_track_alvar ${catkin_LIBRARIES}) -add_dependencies(findMarkerBundlesNoKinect ${PROJECT_NAME}_gencpp ${GENCPP_DEPS}) +# add_executable(findMarkerBundlesNoKinect nodes/FindMarkerBundlesNoKinect.cpp) +# target_link_libraries(findMarkerBundlesNoKinect ar_track_alvar) +# ament_target_dependencies(findMarkerBundlesNoKinect ${dependencies}) -add_executable(createMarker src/SampleMarkerCreator.cpp) -target_link_libraries(createMarker ar_track_alvar ${catkin_LIBRARIES}) -add_dependencies(createMarker ${PROJECT_NAME}_gencpp ${GENCPP_DEPS}) +# add_executable(createMarker src/SampleMarkerCreator.cpp) +# target_link_libraries(createMarker ar_track_alvar) +# ament_target_dependencies(createMarker ${dependencies}) -install(TARGETS ${ALVAR_TARGETS} ${KINECT_FILTERING_TARGETS} - ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} - LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} - RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} -) -install(DIRECTORY include/${PROJECT_NAME}/ - DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} -) +ament_export_include_directories(include) +ament_export_libraries(ar_track_alvar) +ament_export_dependencies(OpenCV ar_track_alvar_msgs std_msgs rclcpp tf2_ros tf2 message_runtime image_transport sensor_msgs geometry_msgs visualization_msgs resource_retriever cv_bridge perception_pcl pcl_conversions) +ament_package() + +# install(TARGETS ${ALVAR_TARGETS} ${KINECT_FILTERING_TARGETS} +# ARCHIVE DESTINATION lib +# LIBRARY DESTINATION lib +# RUNTIME DESTINATION lib/${PROJECT_NAME} +# ) + +# install(DIRECTORY include/${PROJECT_NAME}/ +# DESTINATION include/${PROJECT_NAME} +# ) + +# install(DIRECTORY launch bundles +# DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/ +# ) +# if(COMPILER_SUPPORTS_CXX11) +# set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") +# set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x") +# elseif(COMPILER_SUPPORTS_CXX0X) +# else() +# message(STATUS "The compiler ${CMAKE_CXX_COMPILER} has no C++11 support. Please use a different C++ compiler.") +# endif() + -install(DIRECTORY launch bundles - DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/ -) -if (CATKIN_ENABLE_TESTING) - find_package(catkin REQUIRED COMPONENTS roslaunch rostest) +#TODO fix these tests +# if(BUILD_TESTING) +# add_subdirectory(test) +# endif() + + +# if (BUILD_TESTING) +# find_package(catkin REQUIRED COMPONENTS roslaunch rostest) +# add_rostest(test/marker_arg_config-basic.test DEPENDENCIES ${PROJECT_NAME}_4markers_tork.bag) +# add_rostest(test/marker_arg_config-full.test DEPENDENCIES ${PROJECT_NAME}_4markers_tork.bag) +# add_rostest(test/marker_param_config-basic.test DEPENDENCIES ${PROJECT_NAME}_4markers_tork.bag) +# add_rostest(test/marker_param_config-full.test DEPENDENCIES ${PROJECT_NAME}_4markers_tork.bag) - file(GLOB LAUNCH_FILES launch/*.launch test/*.test) - foreach(LAUNCH_FILE ${LAUNCH_FILES}) - roslaunch_add_file_check(${LAUNCH_FILE} USE_TEST_DEPENDENCIES) - endforeach() +# catkin_download_test_data( +# ${PROJECT_NAME}_4markers_tork.bag +# http://download.ros.org/data/ar_track_alvar/ar_track_alvar_4markers_tork_2017-02-08-11-21-14.bag +# # Workaround the issue http://answers.ros.org/question/253787/accessing-data-downloaded-via-catkin_download_test_data/ +# # by downloading into source folder. +# #DESTINATION ${CATKIN_DEVEL_PREFIX}/${CATKIN_PACKAGE_SHARE_DESTINATION}/test +# DESTINATION ${CATKIN_DEVEL_PREFIX}/${CATKIN_PACKAGE_SHARE_DESTINATION}/test +# MD5 627aa0316bbfe4334e06023d7c2b4087 +# ) +# foreach(LAUNCH_FILE ${LAUNCH_FILES}) +# roslaunch_add_file_check(${LAUNCH_FILE} USE_TEST_DEPENDENCIES) +# endforeach() - catkin_download_test_data( - ${PROJECT_NAME}_4markers_tork.bag - http://download.ros.org/data/ar_track_alvar/ar_track_alvar_4markers_tork_2017-02-08-11-21-14.bag - # Workaround the issue http://answers.ros.org/question/253787/accessing-data-downloaded-via-catkin_download_test_data/ - # by downloading into source folder. - #DESTINATION ${CATKIN_DEVEL_PREFIX}/${CATKIN_PACKAGE_SHARE_DESTINATION}/test - DESTINATION ${CATKIN_DEVEL_PREFIX}/${CATKIN_PACKAGE_SHARE_DESTINATION}/test - MD5 627aa0316bbfe4334e06023d7c2b4087 - ) - add_rostest(test/marker_arg_config-basic.test DEPENDENCIES ${PROJECT_NAME}_4markers_tork.bag) - add_rostest(test/marker_arg_config-full.test DEPENDENCIES ${PROJECT_NAME}_4markers_tork.bag) - add_rostest(test/marker_param_config-basic.test DEPENDENCIES ${PROJECT_NAME}_4markers_tork.bag) - add_rostest(test/marker_param_config-full.test DEPENDENCIES ${PROJECT_NAME}_4markers_tork.bag) -endif() +# file(GLOB LAUNCH_FILES launch/*.launch test/*.test) +# endif() + +include(CheckCXXCompilerFlag) +CHECK_CXX_COMPILER_FLAG("-std=c++11" COMPILER_SUPPORTS_CXX11) +CHECK_CXX_COMPILER_FLAG("-std=c++0x" COMPILER_SUPPORTS_CXX0X) + +cmake_policy(SET CMP0046 OLD) diff --git a/ar_track_alvar/include/ar_track_alvar/Camera.h b/ar_track_alvar/include/ar_track_alvar/Camera.h index beca09d..16d818a 100644 --- a/ar_track_alvar/include/ar_track_alvar/Camera.h +++ b/ar_track_alvar/include/ar_track_alvar/Camera.h @@ -37,14 +37,16 @@ #include "FileFormat.h" #include -#include -#include -#include -#include -#include -#include -#include +#include "rclcpp/rclcpp.hpp" +#include +#include +#include +#include #include +#include "opencv2/core/types_c.h" +#include +#include + namespace alvar { @@ -54,14 +56,14 @@ struct ALVAR_EXPORT ProjPoints { int height; /** \brief 3D object points corresponding with the detected 2D image points. */ - std::vector object_points; + std::vector object_points; /** \brief Detected 2D object points * If point_counts[0] == 10, then the * first 10 points are detected in the first frame. If * point_counts[1] == 6, then the next 6 of these points are * detected in the next frame... etc. */ - std::vector image_points; + std::vector image_points; /** \brief Vector indicating how many points are detected for each frame */ std::vector point_counts; @@ -69,17 +71,17 @@ struct ALVAR_EXPORT ProjPoints { void Reset(); /** \brief Add elements to \e object_points , \e image_points and \e point_counts using Chessboard pattern */ - bool AddPointsUsingChessboard(IplImage *image, double etalon_square_size, int etalon_rows, int etalon_columns, bool visualize); + bool AddPointsUsingChessboard(cv::Mat* image, double etalon_square_size, int etalon_rows, int etalon_columns, bool visualize); /** \brief Add elements to \e object_points , \e image_points and \e point_counts using detected markers */ - bool AddPointsUsingMarkers(std::vector &marker_corners, std::vector &marker_corners_img, IplImage* image); + bool AddPointsUsingMarkers(std::vector &marker_corners, std::vector &marker_corners_img, cv::Mat* image); }; /** * \brief Simple \e Camera class for calculating distortions, orientation or projections with pre-calibrated camera */ -class ALVAR_EXPORT Camera { +class ALVAR_EXPORT Camera: public rclcpp::Node { public: @@ -93,10 +95,9 @@ class ALVAR_EXPORT Camera { protected: std::string cameraInfoTopic_; - sensor_msgs::CameraInfo cam_info_; - void camInfoCallback (const sensor_msgs::CameraInfoConstPtr &); - ros::Subscriber sub_; - ros::NodeHandle n_; + sensor_msgs::msg::CameraInfo cam_info_; + void camInfoCallback (const sensor_msgs::msg::CameraInfo::SharedPtr cam_info); + rclcpp::Subscription::SharedPtr sub_; private: bool LoadCalibXML(const char *calibfile); @@ -143,10 +144,10 @@ class ALVAR_EXPORT Camera { /** \brief Constructor */ Camera(); - Camera(ros::NodeHandle & n, std::string cam_info_topic); + Camera(std::string cam_info_topic); /** Sets the intrinsic calibration */ - void SetCameraInfo(const sensor_msgs::CameraInfo& camInfo); + void SetCameraInfo(const sensor_msgs::msg::CameraInfo::SharedPtr cam_info); /** \brief Get x-direction FOV in radians */ double GetFovX() { @@ -211,10 +212,10 @@ class ALVAR_EXPORT Camera { void Undistort(PointDouble &point); /** \brief Unapplys the lens distortion for one point on an image plane. */ - void Undistort(CvPoint2D32f& point); + void Undistort(cv::Point2f& point); /** \brief Applys the lens distortion for one point on an image plane. */ - void Distort(CvPoint2D32f& point); + void Distort(cv::Point3f& point); /** \brief Applys the lens distortion for points on image plane. */ void Distort(std::vector& points); @@ -223,11 +224,11 @@ class ALVAR_EXPORT Camera { void Distort(PointDouble &point); /** \brief Calculate exterior orientation */ - void CalcExteriorOrientation(std::vector& pw, std::vector& pi, Pose *pose); + void CalcExteriorOrientation(std::vector& pw, std::vector& pi, Pose *pose); /** \brief Calculate exterior orientation */ - void CalcExteriorOrientation(std::vector& pw, std::vector& pi, + void CalcExteriorOrientation(std::vector& pw, std::vector& pi, CvMat *rodriques, CvMat *tra); /** \brief Calculate exterior orientation @@ -246,13 +247,13 @@ class ALVAR_EXPORT Camera { bool CalcExteriorOrientation(const CvMat* object_points, CvMat* image_points, CvMat *rodriques, CvMat *tra); /** \brief Project one point */ - void ProjectPoint(const CvPoint3D64f pw, const Pose *pose, CvPoint2D64f &pi) const; + void ProjectPoint(const cv::Point3f pw, const Pose *pose, cv::Point2f &pi) const; - /** \brief Project one point */ - void ProjectPoint(const CvPoint3D32f pw, const Pose *pose, CvPoint2D32f &pi) const; + // /** \brief Project one point */ + // void ProjectPoint(const cv::Point3f pw, const Pose *pose, cv::Point2f &pi) const; /** \brief Project points */ - void ProjectPoints(std::vector& pw, Pose *pose, std::vector& pi) const; + void ProjectPoints(std::vector& pw, Pose *pose, std::vector& pi) const; /** \brief Project points */ diff --git a/ar_track_alvar/include/ar_track_alvar/CvTestbed.h b/ar_track_alvar/include/ar_track_alvar/CvTestbed.h index 709605c..2451a3d 100644 --- a/ar_track_alvar/include/ar_track_alvar/CvTestbed.h +++ b/ar_track_alvar/include/ar_track_alvar/CvTestbed.h @@ -4,8 +4,10 @@ #include "Alvar.h" #include #include -#include "cv.h" -#include "highgui.h" +#include +#include +#include +#include #include "CaptureFactory.h" using namespace alvar; @@ -78,7 +80,7 @@ class CvTestbed { bool running; /** \brief Pointer for the user-defined videocallback. */ - void (*videocallback)(IplImage *image); + void (*videocallback)(cv::Mat *image); /** \brief Pointer for the user-defined KEYcallback. */ int (*keycallback)(int key); /** \brief The window title for the video view. */ @@ -87,18 +89,18 @@ class CvTestbed { std::string filename; /** \brief Image structure to store the images internally */ struct Image { - IplImage *ipl; + cv::Mat *ipl; std::string title; bool visible; bool release_at_exit; - Image(IplImage *_ipl, std::string _title, bool _visible, bool _release_at_exit) + Image(cv::Mat *_ipl, std::string _title, bool _visible, bool _release_at_exit) :ipl(_ipl),title(_title),visible(_visible),release_at_exit(_release_at_exit) {} }; /** \brief Vector of images stored internally */ std::vector images; /** \brief Video callback called for every frame. This calls user-defined videocallback if one exists. */ - static void default_videocallback(IplImage *image); + static void default_videocallback(cv::Mat*image); /** \brief \e WaitKeys contains the main loop. */ void WaitKeys(); /** \brief \e ShowVisibleImages is called from the videocallback. This shows the internally stored images which have the visible-flag on */ @@ -114,7 +116,7 @@ class CvTestbed { * \brief Set the videocallback function that will be called for * every frame. */ - void SetVideoCallback(void (*_videocallback)(IplImage *image)); + void SetVideoCallback(void (*_videocallback)(cv::Mat *image)); /** * \brief Sets the keyboard callback function that will be called * when keyboard is pressed. @@ -140,21 +142,21 @@ class CvTestbed { * \param ipl The IplImage to be stored * \param release_at_exit Boolean indicating should \e CvTestbed automatically release the image at exit */ - size_t SetImage(const char *title, IplImage *ipl, bool release_at_exit=false); + size_t SetImage(const char *title, cv::Mat *ipl, bool release_at_exit=false); /** * \brief Creates an image with given size, depth and channels and stores * it with a given 'title' (see \e CvTestbed::SetImage) */ - IplImage *CreateImage(const char *title, CvSize size, int depth, int channels); + cv::Mat *CreateImage(const char *title, cv::Size size, int type); /** * \brief Creates an image based on the given prototype and stores * it with a given 'title' (see \e CvTestbed::SetImage) */ - IplImage *CreateImageWithProto(const char *title, IplImage *proto, int depth=0, int channels=0); + cv::Mat *CreateImageWithProto(const char *title, cv::Mat *proto, int depth=0, int channels=0); /** * \brief Get a pointer for the stored image based on index number */ - IplImage *GetImage(size_t index); + cv::Mat *GetImage(size_t index); /** * \brief Get an index number of the stored image based on title */ @@ -162,7 +164,7 @@ class CvTestbed { /** * \brief Get a pointer for the stored image based on title */ - IplImage *GetImage(const char *title); + cv::Mat *GetImage(const char *title); /** * \brief Toggle the visibility of the stored image */ diff --git a/ar_track_alvar/include/ar_track_alvar/Draw.h b/ar_track_alvar/include/ar_track_alvar/Draw.h index c2b7978..f7f66f5 100644 --- a/ar_track_alvar/include/ar_track_alvar/Draw.h +++ b/ar_track_alvar/include/ar_track_alvar/Draw.h @@ -46,12 +46,12 @@ namespace alvar { * \param label A label to show in the center of the bounding box. */ template -void inline DrawBB(IplImage *image, const std::vector& points, CvScalar color, std::string label="") +void inline DrawBB(cv::Mat *image, const std::vector& points, CvScalar color, std::string label="") { if (points.size() < 2) { return; } - PointType minimum = PointType(image->width, image->height); + PointType minimum = PointType(image->rows, image->cols); PointType maximum = PointType(0, 0); for (int i = 0; i < (int)points.size(); ++i) { PointType current = points.at(i); @@ -60,14 +60,14 @@ void inline DrawBB(IplImage *image, const std::vector& points, CvScal if (current.y < minimum.y) minimum.y = current.y; if (current.y > maximum.y) maximum.y = current.y; } - cvLine(image, cvPoint((int)minimum.x,(int)minimum.y), cvPoint((int)maximum.x,(int)minimum.y), color); - cvLine(image, cvPoint((int)maximum.x,(int)minimum.y), cvPoint((int)maximum.x,(int)maximum.y), color); - cvLine(image, cvPoint((int)maximum.x,(int)maximum.y), cvPoint((int)minimum.x,(int)maximum.y), color); - cvLine(image, cvPoint((int)minimum.x,(int)maximum.y), cvPoint((int)minimum.x,(int)minimum.y), color); - if (!label.empty()) { - CvFont font; - cvInitFont(&font, 0, 0.5, 0.5, 0); - cvPutText(image, label.c_str(), cvPoint((int)minimum.x+1,(int)minimum.y+2), &font, CV_RGB(255,255,0)); + cv::line(*image, cv::Point((int)minimum.x,(int)minimum.y), cv::Point((int)maximum.x,(int)minimum.y), color); + cv::line(*image, cv::Point((int)maximum.x,(int)minimum.y), cv::Point((int)maximum.x,(int)maximum.y), color); + cv::line(*image, cv::Point((int)maximum.x,(int)maximum.y), cv::Point((int)minimum.x,(int)maximum.y), color); + cv::line(*image, cv::Point((int)minimum.x,(int)maximum.y), cv::Point((int)minimum.x,(int)minimum.y), color); + // if (!label.empty()) { + // CvFont font; + //cvInitFont(&font, 0, 0.5, 0.5, 0); + cv::putText(*image, label.c_str(), cv::Point((int)minimum.x+1,(int)minimum.y+2), cv::FONT_HERSHEY_DUPLEX,1,cv::Scalar(0,255,0),2,false); } } @@ -76,7 +76,7 @@ void inline DrawBB(IplImage *image, const std::vector& points, CvScal * \param points Array of CvPoints to be visualzed. * \param color Use CV_RGB(red,green,blue) to determine the color. */ -void ALVAR_EXPORT DrawPoints(IplImage *image, const std::vector& points, CvScalar color); +void ALVAR_EXPORT DrawPoints(cv::Mat *image, const std::vector& points, CvScalar color); /** \brief Draws lines between consecutive points stored in vector (polyline). * \param image Pointer to the destination image. @@ -85,12 +85,12 @@ void ALVAR_EXPORT DrawPoints(IplImage *image, const std::vector& points * \param loop If true, the polyline is closed. */ template -void inline DrawLines(IplImage *image, const std::vector& points, CvScalar color, bool loop=true) +void inline DrawLines(cv::Mat *image, const std::vector& points, CvScalar color, bool loop=true) { for(unsigned i = 1; i < points.size(); ++i) - cvLine(image, cvPoint((int)points[i-1].x,(int)points[i-1].y), cvPoint((int)points[i].x,(int)points[i].y), color); + cv::line(*image, cv::Point((int)points[i-1].x,(int)points[i-1].y), cv::Point((int)points[i].x,(int)points[i].y), color); if (loop) { - cvLine(image, cvPoint((int)points[points.size()-1].x,(int)points[points.size()-1].y), cvPoint((int)points[0].x,(int)points[0].y), color); + cv::line(*image, cv::Point((int)points[points.size()-1].x,(int)points[points.size()-1].y), cv::Point((int)points[0].x,(int)points[0].y), color); } } @@ -99,14 +99,14 @@ void inline DrawLines(IplImage *image, const std::vector& points, CvS * \param line Line struct to be drawn. * \param color Use CV_RGB(red,green,blue) to determine the color. */ -void ALVAR_EXPORT DrawLine(IplImage* image, const Line line, CvScalar color = CV_RGB(0,255,0)); +void ALVAR_EXPORT DrawLine(cv::Mat* image, const alvar::Line line, cv::Scalar color = cv::Scalar(0,255,0)); /** \brief Draws points of the contour that is obtained by \e Labeling class. * \param image Pointer to the destination image. * \param contour Controur sequence. * \param color Use CV_RGB(red,green,blue) to determine the color. */ -void ALVAR_EXPORT DrawPoints(IplImage* image, const CvSeq* contour, CvScalar color = CV_RGB(255,0,0)); +void ALVAR_EXPORT DrawPoints(cv::Mat* image, const CvSeq* contour, cv::Scalar color = cv::Scalar(255,0,0)); /** \brief Draws circles to the contour points that are obtained by \e Labeling class. @@ -115,14 +115,14 @@ void ALVAR_EXPORT DrawPoints(IplImage* image, const CvSeq* contour, CvScalar col * \param radius Circle radius in pixels. * \param color Use CV_RGB(red,green,blue) to determine the color. */ -void ALVAR_EXPORT DrawCircles(IplImage* image, const CvSeq* contour, int radius, CvScalar color = CV_RGB(255,0,0)); +void ALVAR_EXPORT DrawCircles(cv::Mat* image, const CvSeq* contour, int radius, cv::Scalar color = cv::Scalar(255,0,0)); /** \brief Draws lines between consecutive contour points. * \param image Pointer to the destination image. * \param contour Controur sequence. * \param color Use CV_RGB(red,green,blue) to determine the color. */ -void ALVAR_EXPORT DrawLines(IplImage* image, const CvSeq* contour, CvScalar color = CV_RGB(255,0,0)); +void ALVAR_EXPORT DrawLines(cv::Mat* image, const CvSeq* contour, cv::Scalar color = cv::Scalar(255,0,0)); /** \brief Draws circles to the array of points. * \param image Pointer to the destination image. @@ -131,10 +131,10 @@ void ALVAR_EXPORT DrawLines(IplImage* image, const CvSeq* contour, CvScalar colo * \param radius Circle radius in pixels. */ template -void inline DrawPoints(IplImage *image, const std::vector& points, CvScalar color, int radius=1) +void inline DrawPoints(cv::Mat *image, const std::vector& points, cv::Scalar color, int radius=1) { for(unsigned i = 0; i < points.size(); ++i) - cvCircle(image, cvPoint((int)points[i].x,(int)points[i].y), radius, color); + cv::circle(*image, cv::Point((int)points[i].x,(int)points[i].y), radius, color); } /** \brief Draws OpenCV ellipse. @@ -144,7 +144,7 @@ void inline DrawPoints(IplImage *image, const std::vector& points, Cv * \param fill If false, only the outline is drawn. * \param par The ellipse width and height are grown by \e par pixels. */ -void ALVAR_EXPORT DrawCVEllipse(IplImage* image, CvBox2D& ellipse, CvScalar color, bool fill=false, double par=0); +void ALVAR_EXPORT DrawCVEllipse(cv::Mat* image, CvBox2D& ellipse, cv::Scalar color, bool fill=false, double par=0); /** \brief This function is used to construct a texture image which is needed to hide a marker from the original video frame. See \e SampleMarkerHide.cpp for example implementation. * \param image Pointer to the original video frame from where the hiding texture is calculated. @@ -154,9 +154,9 @@ void ALVAR_EXPORT DrawCVEllipse(IplImage* image, CvBox2D& ellipse, CvScalar colo * \param topleft Top left limit of the texture area in marker coordinate frame. * \param botright Bottom right limit of the texture area in marker coordinate frame. */ -void ALVAR_EXPORT BuildHideTexture(IplImage *image, IplImage *hide_texture, - Camera *cam, double gl_modelview[16], - PointDouble topleft, PointDouble botright); +void ALVAR_EXPORT BuildHideTexture(cv::Mat *image, cv::Mat *hide_texture, + alvar::Camera *cam, double gl_modelview[16], + alvar::PointDouble topleft, alvar::PointDouble botright); /** \brief Draws the texture generated by \e BuildHideTexture to given video frame. For better performance, use OpenGL instead. See \e SampleMarkerHide.cpp for example implementation. * \param image Pointer to the destination image where the texture is drawn. @@ -166,10 +166,10 @@ void ALVAR_EXPORT BuildHideTexture(IplImage *image, IplImage *hide_texture, * \param topleft Top left limit of the texture area in marker coordinate frame. * \param botright Bottom right limit of the texture area in marker coordinate frame. */ -void ALVAR_EXPORT DrawTexture(IplImage *image, IplImage *texture, - Camera *cam, double gl_modelview[16], - PointDouble topleft, PointDouble botright); - -} // namespace alvar +void ALVAR_EXPORT DrawTexture(cv::Mat *image, cv::Mat *texture, + alvar::Camera *cam, double gl_modelview[16], + alvar::PointDouble topleft, alvar::PointDouble botright); + +//}// namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/FileFormatUtils.h b/ar_track_alvar/include/ar_track_alvar/FileFormatUtils.h index 0fd66f4..c5e242d 100644 --- a/ar_track_alvar/include/ar_track_alvar/FileFormatUtils.h +++ b/ar_track_alvar/include/ar_track_alvar/FileFormatUtils.h @@ -32,7 +32,9 @@ */ #include "Alvar.h" -#include "cv.h" +#include +#include +#include #include "tinyxml.h" namespace alvar { diff --git a/ar_track_alvar/include/ar_track_alvar/Kalman.h b/ar_track_alvar/include/ar_track_alvar/Kalman.h index cfd3a09..e45f4b8 100644 --- a/ar_track_alvar/include/ar_track_alvar/Kalman.h +++ b/ar_track_alvar/include/ar_track_alvar/Kalman.h @@ -31,7 +31,11 @@ */ #include "Alvar.h" -#include "cxcore.h" +#include +#include "opencv2/core/types_c.h" +#include +#include + namespace alvar { @@ -279,11 +283,11 @@ class ALVAR_EXPORT KalmanVisualize { Kalman *kalman_ext; KalmanSensor *sensor_ext; /** \brief Image collecting visualization of the Kalman filter */ - IplImage *img; + cv::Mat *img; /** \brief Image to show */ - IplImage *img_legend; + cv::Mat *img_legend; /** \brief Image to show */ - IplImage *img_show; + cv::Mat *img_show; /** \brief visualization scale before show */ int img_scale; /** \brief Add matrix to the image */ diff --git a/ar_track_alvar/include/ar_track_alvar/Line.h b/ar_track_alvar/include/ar_track_alvar/Line.h index d93ac4d..a9b1f07 100644 --- a/ar_track_alvar/include/ar_track_alvar/Line.h +++ b/ar_track_alvar/include/ar_track_alvar/Line.h @@ -44,7 +44,7 @@ struct ALVAR_EXPORT Line * \brief Constructor. * \param params params[0] and params[1] are the x and y components of the direction vector, params[2] and params[3] are the x and y coordinates of the line center. */ - Line(float params[4]); + Line(cv::Vec4f params); Line() {} @@ -68,7 +68,7 @@ struct ALVAR_EXPORT Line int ALVAR_EXPORT FitLines(std::vector &lines, const std::vector& corners, const std::vector& edge, - IplImage *grey=0); + cv::Mat *grey=0); /** * \brief Calculates an intersection point of two lines. diff --git a/ar_track_alvar/include/ar_track_alvar/Optimization.h b/ar_track_alvar/include/ar_track_alvar/Optimization.h index 4699329..9ed2a19 100644 --- a/ar_track_alvar/include/ar_track_alvar/Optimization.h +++ b/ar_track_alvar/include/ar_track_alvar/Optimization.h @@ -25,7 +25,9 @@ #define OPTIMIZATION_H #include "Alvar.h" -#include +#include +#include +#include //#include diff --git a/ar_track_alvar/include/ar_track_alvar/Pose.h b/ar_track_alvar/include/ar_track_alvar/Pose.h index f92f5c0..ec51455 100644 --- a/ar_track_alvar/include/ar_track_alvar/Pose.h +++ b/ar_track_alvar/include/ar_track_alvar/Pose.h @@ -26,6 +26,7 @@ #include "Alvar.h" #include "Rotation.h" +#include "opencv2/core/types_c.h" /** * \file Pose.h diff --git a/ar_track_alvar/include/ar_track_alvar/Rotation.h b/ar_track_alvar/include/ar_track_alvar/Rotation.h index 62bde79..1798571 100644 --- a/ar_track_alvar/include/ar_track_alvar/Rotation.h +++ b/ar_track_alvar/include/ar_track_alvar/Rotation.h @@ -34,6 +34,7 @@ #include "Alvar.h" #include "Util.h" +#include "opencv2/core/types_c.h" namespace alvar { diff --git a/ar_track_alvar/include/ar_track_alvar/Util.h b/ar_track_alvar/include/ar_track_alvar/Util.h index 6438724..4bca830 100644 --- a/ar_track_alvar/include/ar_track_alvar/Util.h +++ b/ar_track_alvar/include/ar_track_alvar/Util.h @@ -37,8 +37,11 @@ #include #include #include -#include -#include +#include +#include +#include +#include +#include "opencv2/core/types_c.h" #include //for abs #include #include //Compatibility with OpenCV 3.x @@ -178,7 +181,7 @@ double ALVAR_EXPORT polyLinePointDist(CvPoint *PointList, int nPnts,CvPoint *C, * \param points Vector of points on the ellipse edge. * \param ellipse_box OpenCV struct for the fitted ellipse. */ -void ALVAR_EXPORT FitCVEllipse(const std::vector &points, CvBox2D& ellipse_box); +void ALVAR_EXPORT FitCVEllipse(const std::vector &points, cv::RotatedRect& ellipse_box); int ALVAR_EXPORT exp_filt2(std::vector &v,std:: vector &ret, bool clamp); diff --git a/ar_track_alvar/include/ar_track_alvar/filter/kinect_filtering.h b/ar_track_alvar/include/ar_track_alvar/filter/kinect_filtering.h index 6d19ca1..730377c 100644 --- a/ar_track_alvar/include/ar_track_alvar/filter/kinect_filtering.h +++ b/ar_track_alvar/include/ar_track_alvar/filter/kinect_filtering.h @@ -44,21 +44,21 @@ #include #include -#include -#include +#include +#include "rclcpp/rclcpp.hpp" #include #include #include #include #include -#include +// #include #include #include #include #include -#include +#include namespace ar_track_alvar { @@ -91,18 +91,18 @@ int extractOrientation (const pcl::ModelCoefficients& coeffs, const ARPoint& p1, const ARPoint& p2, const ARPoint& p3, const ARPoint& p4, - geometry_msgs::Quaternion &retQ); + geometry_msgs::msg::Quaternion &retQ); // Like extractOrientation except return value is a btMatrix3x3 int extractFrame (const pcl::ModelCoefficients& coeffs, const ARPoint& p1, const ARPoint& p2, const ARPoint& p3, const ARPoint& p4, - tf::Matrix3x3 &retmat); + tf2::Matrix3x3 &retmat); // Return the centroid (mean) of a point cloud -geometry_msgs::Point centroid (const ARCloud& points); +geometry_msgs::msg::Point centroid (const ARCloud& points); } // namespace #endif // include guard diff --git a/ar_track_alvar/nodes/.vscode/c_cpp_properties.json b/ar_track_alvar/nodes/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..b6f86aa --- /dev/null +++ b/ar_track_alvar/nodes/.vscode/c_cpp_properties.json @@ -0,0 +1,17 @@ +{ + "configurations": [ + { + "browse": { + "databaseFilename": "", + "limitSymbolsToIncludedHeaders": true + }, + "includePath": [ + "/opt/ros/foxy/include/**", + "include/**", + "/usr/include/**" + ], + "name": "ROS" + } + ], + "version": 4 +} \ No newline at end of file diff --git a/ar_track_alvar/nodes/.vscode/settings.json b/ar_track_alvar/nodes/.vscode/settings.json new file mode 100644 index 0000000..6b40311 --- /dev/null +++ b/ar_track_alvar/nodes/.vscode/settings.json @@ -0,0 +1,5 @@ +{ + "python.autoComplete.extraPaths": [ + "/opt/ros/foxy/lib/python3.8/site-packages" + ] +} \ No newline at end of file diff --git a/ar_track_alvar/nodes/FindMarkerBundles.cpp b/ar_track_alvar/nodes/FindMarkerBundles.cpp index e7f83e0..6397d9e 100644 --- a/ar_track_alvar/nodes/FindMarkerBundles.cpp +++ b/ar_track_alvar/nodes/FindMarkerBundles.cpp @@ -39,20 +39,21 @@ #include "ar_track_alvar/MultiMarkerInitializer.h" #include "ar_track_alvar/Shared.h" #include -#include -#include -#include +#include +#include +#include "tf2_ros/buffer.h" +#include "tf2_ros/transform_listener.h" #include -#include +#include #include #include #include #include -#include -#include -#include +#include +#include +#include "rclcpp/rclcpp.hpp" #include #include #include @@ -85,12 +86,16 @@ using boost::make_shared; Camera *cam; cv_bridge::CvImagePtr cv_ptr_; image_transport::Subscriber cam_sub_; -ros::Subscriber cloud_sub_; -ros::Publisher arMarkerPub_; -ros::Publisher rvizMarkerPub_; -ros::Publisher rvizMarkerPub2_; -ar_track_alvar_msgs::AlvarMarkers arPoseMarkers_; -tf::TransformListener *tf_listener; + +rclcpp::Publisher::SharedPtr arMarkerPub_; +rclcpp::Publisher::SharedPtr rvizMarkerPub_; +rclcpp::Publisher::SharedPtr rvizMarkerPub2_; + + +rclcpp::Subscription::SharedPtr cloud_sub_; + +ar_track_alvar_msgs::msg::AlvarMarkers arPoseMarkers_; +tf2_ros::TransformListener *tf_listener; tf::TransformBroadcaster *tf_broadcaster; MarkerDetector marker_detector; MultiMarkerBundle **multi_marker_bundles=NULL; @@ -115,10 +120,10 @@ int n_bundles = 0; //Debugging utility function void draw3dPoints(ARCloud::Ptr cloud, string frame, int color, int id, double rad) { - visualization_msgs::Marker rvizMarker; + visualization_msgs::msg::Marker rvizMarker; rvizMarker.header.frame_id = frame; - rvizMarker.header.stamp = ros::Time::now(); + rvizMarker.header.stamp = rclcpp::Time::now(); rvizMarker.id = id; rvizMarker.ns = "3dpts"; @@ -126,8 +131,8 @@ void draw3dPoints(ARCloud::Ptr cloud, string frame, int color, int id, double ra rvizMarker.scale.y = rad; rvizMarker.scale.z = rad; - rvizMarker.type = visualization_msgs::Marker::SPHERE_LIST; - rvizMarker.action = visualization_msgs::Marker::ADD; + rvizMarker.type = visualization_msgs::msg::Marker::SPHERE_LIST; + rvizMarker.action = visualization_msgs::msg::Marker::ADD; if(color==1){ rvizMarker.color.r = 0.0f; @@ -156,17 +161,17 @@ void draw3dPoints(ARCloud::Ptr cloud, string frame, int color, int id, double ra rvizMarker.points.push_back(p); } - rvizMarker.lifetime = ros::Duration (1.0); + rvizMarker.lifetime = rclcpp::Duration (1.0); rvizMarkerPub2_.publish (rvizMarker); } void drawArrow(gm::Point start, tf::Matrix3x3 mat, string frame, int color, int id) { - visualization_msgs::Marker rvizMarker; + visualization_msgs::msg::Marker rvizMarker; rvizMarker.header.frame_id = frame; - rvizMarker.header.stamp = ros::Time::now(); + rvizMarker.header.stamp = rclcpp::Time::now(); rvizMarker.id = id; rvizMarker.ns = "arrow"; @@ -174,8 +179,8 @@ void drawArrow(gm::Point start, tf::Matrix3x3 mat, string frame, int color, int rvizMarker.scale.y = 0.01; rvizMarker.scale.z = 0.1; - rvizMarker.type = visualization_msgs::Marker::ARROW; - rvizMarker.action = visualization_msgs::Marker::ADD; + rvizMarker.type = visualization_msgs::msg::Marker::ARROW; + rvizMarker.action = visualization_msgs::msg::Marker::ADD; for(int i=0; i<3; i++){ rvizMarker.points.clear(); @@ -186,7 +191,7 @@ void drawArrow(gm::Point start, tf::Matrix3x3 mat, string frame, int color, int end.z = start.z + mat[2][i]; rvizMarker.points.push_back(end); rvizMarker.id += 10*i; - rvizMarker.lifetime = ros::Duration (1.0); + rvizMarker.lifetime = rclcpp::Duration (1.0); if(color==1){ rvizMarker.color.r = 1.0f; @@ -261,11 +266,11 @@ int InferCorners(const ARCloud &cloud, MultiMarkerBundle &master, ARCloud &bund_ p.point.z = corner_coord.z()/100.0; try{ - tf_listener->waitForTransform(cloud.header.frame_id, marker_frame, ros::Time(0), ros::Duration(0.1)); + tf_listener->waitForTransform(cloud.header.frame_id, marker_frame, rclcpp::Time(0), rclcpp::Duration(0.1)); tf_listener->transformPoint(cloud.header.frame_id, p, output_p); } catch (tf::TransformException ex){ - ROS_ERROR("ERROR InferCorners: %s",ex.what()); + RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "ERROR InferCorners: %s",ex.what()); return -1; } @@ -424,7 +429,7 @@ void GetMultiMarkerPoses(IplImage *image, ARCloud &cloud) { } } else - ROS_ERROR("FindMarkerBundles: Bad Orientation: %i for ID: %i", ori, id); + RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "FindMarkerBundles: Bad Orientation: %i for ID: %i", ori, id); //Check if we have spotted a master tag int master_ind = -1; @@ -501,7 +506,7 @@ void GetMultiMarkerPoses(IplImage *image, ARCloud &cloud) { // Given the pose of a marker, builds the appropriate ROS messages for later publishing -void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::ImageConstPtr image_msg, tf::StampedTransform &CamToOutput, visualization_msgs::Marker *rvizMarker, ar_track_alvar_msgs::AlvarMarker *ar_pose_marker, int confidence){ +void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::msg::Image::ConstSharedPtr image_msg, tf2::StampedTransform &CamToOutput, visualization_msgs::msg::Marker *rvizMarker, ar_track_alvar_msgs::AlvarMarker *ar_pose_marker, int confidence){ double px,py,pz,qx,qy,qz,qw; px = p.translation[0]/100.0; @@ -527,7 +532,7 @@ void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::ImageConstPtr image_ out << id; std::string id_string = out.str(); markerFrame += id_string; - tf::StampedTransform camToMarker (t, image_msg->header.stamp, image_msg->header.frame_id, markerFrame.c_str()); + tf2::StampedTransform camToMarker (t, image_msg->header.stamp, image_msg->header.frame_id, markerFrame.c_str()); tf_broadcaster->sendTransform(camToMarker); //Create the rviz visualization message @@ -546,8 +551,8 @@ void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::ImageConstPtr image_ rvizMarker->ns = "basic_shapes"; - rvizMarker->type = visualization_msgs::Marker::CUBE; - rvizMarker->action = visualization_msgs::Marker::ADD; + rvizMarker->type = visualization_msgs::msg::Marker::CUBE; + rvizMarker->action = visualization_msgs::msg::Marker::ADD; //Determine a color and opacity, based on marker type if(type==MAIN_MARKER){ @@ -569,7 +574,7 @@ void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::ImageConstPtr image_ rvizMarker->color.a = 0.5; } - rvizMarker->lifetime = ros::Duration (0.1); + rvizMarker->lifetime = rclcpp::Duration (0.1); // Only publish the pose of the master tag in each bundle, since that's all we really care about aside from visualization if(type==MAIN_MARKER){ @@ -590,26 +595,26 @@ void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::ImageConstPtr image_ //Callback to handle getting kinect point clouds and processing them -void getPointCloudCallback (const sensor_msgs::PointCloud2ConstPtr &msg) +void getPointCloudCallback (const sensor_msgs::msg::PointCloud2::ConstSharedPtr &msg) { - sensor_msgs::ImagePtr image_msg(new sensor_msgs::Image); + sensor_msgs::msg::ImagePtr image_msg(new sensor_msgs::msg::Image); //If we've already gotten the cam info, then go ahead if(cam->getCamInfo_){ try{ //Get the transformation from the Camera to the output frame for this image capture - tf::StampedTransform CamToOutput; + tf2::StampedTransform CamToOutput; try{ - tf_listener->waitForTransform(output_frame, msg->header.frame_id, msg->header.stamp, ros::Duration(1.0)); + tf_listener->waitForTransform(output_frame, msg->header.frame_id, msg->header.stamp, rclcpp::Duration(1.0)); tf_listener->lookupTransform(output_frame, msg->header.frame_id, msg->header.stamp, CamToOutput); } catch (tf::TransformException ex){ - ROS_ERROR("%s",ex.what()); + RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "%s",ex.what()); } //Init and clear visualization markers - visualization_msgs::Marker rvizMarker; - ar_track_alvar_msgs::AlvarMarker ar_pose_marker; + visualization_msgs::msg::Marker rvizMarker; + ar_track_alvar_msgs::msg::AlvarMarker ar_pose_marker; arPoseMarkers_.markers.clear (); //Convert cloud to PCL @@ -768,8 +773,9 @@ int calcAndSaveMasterCoords(MultiMarkerBundle &master) int main(int argc, char *argv[]) { - ros::init (argc, argv, "marker_detect"); - ros::NodeHandle n; + + rclcpp::init(argc, argv); + auto node = rclcpp::Node::make_shared("marker_detect"); if(argc < 9){ std::cout << std::endl; @@ -823,21 +829,24 @@ int main(int argc, char *argv[]) // Set up camera, listeners, and broadcasters cam = new Camera(n, cam_info_topic); - tf_listener = new tf::TransformListener(n); + tf_listener = new tf2_ros::TransformListener(n); tf_broadcaster = new tf::TransformBroadcaster(); - arMarkerPub_ = n.advertise < ar_track_alvar_msgs::AlvarMarkers > ("ar_pose_marker", 0); - rvizMarkerPub_ = n.advertise < visualization_msgs::Marker > ("visualization_marker", 0); - rvizMarkerPub2_ = n.advertise < visualization_msgs::Marker > ("ARmarker_points", 0); + + arMarkerPub_ = node->create_publisher ("ar_pose_marker", 0); + rvizMarkerPub_ = node->create_publisher ("visualization_marker", 0); + rvizMarkerPub2_ = node->create_publisher ("ARmarker_points", 0); //Give tf a chance to catch up before the camera callback starts asking for transforms - ros::Duration(1.0).sleep(); - ros::spinOnce(); + rclcpp::Duration(1.0).sleep(); + rclcpp::spin_some(node); //Subscribe to topics and set up callbacks ROS_INFO ("Subscribing to image topic"); - cloud_sub_ = n.subscribe(cam_image_topic, 1, &getPointCloudCallback); - ros::spin(); + cloud_sub_ = this->create_subscription(cam_image_topic, 1, getPointCloudCallback); + + rclcpp::spin(node); + rclcpp::shutdown(); return 0; } diff --git a/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp b/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp index 013588b..bc0ca94 100644 --- a/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp +++ b/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp @@ -43,7 +43,8 @@ #include #include #include -#include +#include "tf2_ros/buffer.h" +#include "tf2_ros/transform_listener.h" #include #include @@ -60,7 +61,7 @@ image_transport::Subscriber cam_sub_; ros::Publisher arMarkerPub_; ros::Publisher rvizMarkerPub_; ar_track_alvar_msgs::AlvarMarkers arPoseMarkers_; -tf::TransformListener *tf_listener; +tf2_ros::TransformListener *tf_listener; tf::TransformBroadcaster *tf_broadcaster; MarkerDetector marker_detector; MultiMarkerBundle **multi_marker_bundles=NULL; @@ -79,8 +80,8 @@ std::string output_frame; int n_bundles = 0; void GetMultiMarkerPoses(IplImage *image); -void getCapCallback (const sensor_msgs::ImageConstPtr & image_msg); -void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::ImageConstPtr image_msg, tf::StampedTransform &CamToOutput, visualization_msgs::Marker *rvizMarker, ar_track_alvar_msgs::AlvarMarker *ar_pose_marker); +void getCapCallback (const sensor_msgs::msg::Image::ConstSharedPtr & image_msg); +void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::msg::Image::ConstSharedPtr image_msg, tf2::StampedTransform &CamToOutput, visualization_msgs::msg::Marker *rvizMarker, ar_track_alvar_msgs::AlvarMarker *ar_pose_marker); // Updates the bundlePoses of the multi_marker_bundles by detecting markers and using all markers in a bundle to infer the master tag's position @@ -101,7 +102,7 @@ void GetMultiMarkerPoses(IplImage *image) { // Given the pose of a marker, builds the appropriate ROS messages for later publishing -void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::ImageConstPtr image_msg, tf::StampedTransform &CamToOutput, visualization_msgs::Marker *rvizMarker, ar_track_alvar_msgs::AlvarMarker *ar_pose_marker){ +void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::msg::Image::ConstSharedPtr image_msg, tf2::StampedTransform &CamToOutput, visualization_msgs::msg::Marker *rvizMarker, ar_track_alvar_msgs::AlvarMarker *ar_pose_marker){ double px,py,pz,qx,qy,qz,qw; px = p.translation[0]/100.0; @@ -128,7 +129,7 @@ void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::ImageConstPtr image_ out << id; std::string id_string = out.str(); markerFrame += id_string; - tf::StampedTransform camToMarker (t, image_msg->header.stamp, image_msg->header.frame_id, markerFrame.c_str()); + tf2::StampedTransform camToMarker (t, image_msg->header.stamp, image_msg->header.frame_id, markerFrame.c_str()); tf_broadcaster->sendTransform(camToMarker); } @@ -148,8 +149,8 @@ void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::ImageConstPtr image_ rvizMarker->ns = "basic_shapes"; - rvizMarker->type = visualization_msgs::Marker::CUBE; - rvizMarker->action = visualization_msgs::Marker::ADD; + rvizMarker->type = visualization_msgs::msg::Marker::CUBE; + rvizMarker->action = visualization_msgs::msg::Marker::ADD; //Determine a color and opacity, based on marker type if(type==MAIN_MARKER){ @@ -171,7 +172,7 @@ void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::ImageConstPtr image_ rvizMarker->color.a = 0.5; } - rvizMarker->lifetime = ros::Duration (1.0); + rvizMarker->lifetime = rclcpp::Duration (1.0); // Only publish the pose of the master tag in each bundle, since that's all we really care about aside from visualization if(type==MAIN_MARKER){ @@ -190,22 +191,22 @@ void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::ImageConstPtr image_ //Callback to handle getting video frames and processing them -void getCapCallback (const sensor_msgs::ImageConstPtr & image_msg) +void getCapCallback (const sensor_msgs::msg::Image::ConstSharedPtr & image_msg) { //If we've already gotten the cam info, then go ahead if(cam->getCamInfo_){ try{ //Get the transformation from the Camera to the output frame for this image capture - tf::StampedTransform CamToOutput; + tf2::StampedTransform CamToOutput; try{ - tf_listener->waitForTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp, ros::Duration(1.0)); - tf_listener->lookupTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp, CamToOutput); + tf_listener->waitForTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp, rclcpp::Duration(1.0)); + tf_listener->lookupTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp, CamToOutput); } catch (tf::TransformException ex){ - ROS_ERROR("%s",ex.what()); + RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "%s",ex.what()); } - visualization_msgs::Marker rvizMarker; + visualization_msgs::msg::Marker rvizMarker; ar_track_alvar_msgs::AlvarMarker ar_pose_marker; arPoseMarkers_.markers.clear (); @@ -279,7 +280,7 @@ void getCapCallback (const sensor_msgs::ImageConstPtr & image_msg) int main(int argc, char *argv[]) { ros::init (argc, argv, "marker_detect"); - ros::NodeHandle n; + rclcpp::Node n; if(argc < 8){ std::cout << std::endl; @@ -325,21 +326,21 @@ int main(int argc, char *argv[]) // Set up camera, listeners, and broadcasters cam = new Camera(n, cam_info_topic); - tf_listener = new tf::TransformListener(n); + tf_listener = new tf2_ros::TransformListener(n); tf_broadcaster = new tf::TransformBroadcaster(); arMarkerPub_ = n.advertise < ar_track_alvar_msgs::AlvarMarkers > ("ar_pose_marker", 0); - rvizMarkerPub_ = n.advertise < visualization_msgs::Marker > ("visualization_marker", 0); + rvizMarkerPub_ = n.advertise < visualization_msgs::msg::Marker > ("visualization_marker", 0); //Give tf a chance to catch up before the camera callback starts asking for transforms - ros::Duration(1.0).sleep(); - ros::spinOnce(); + rclcpp::Duration(1.0).sleep(); + rclcpp::spin_some(node); //Subscribe to topics and set up callbacks ROS_INFO ("Subscribing to image topic"); image_transport::ImageTransport it_(n); cam_sub_ = it_.subscribe (cam_image_topic, 1, &getCapCallback); - ros::spin(); + rclcpp::spin(node); return 0; } diff --git a/ar_track_alvar/nodes/IndividualMarkers.cpp b/ar_track_alvar/nodes/IndividualMarkers.cpp index a44a23f..80d1940 100644 --- a/ar_track_alvar/nodes/IndividualMarkers.cpp +++ b/ar_track_alvar/nodes/IndividualMarkers.cpp @@ -41,7 +41,8 @@ #include #include #include -#include +#include "tf2_ros/buffer.h" +#include "tf2_ros/transform_listener.h" #include #include #include @@ -68,8 +69,8 @@ ros::Publisher arMarkerPub_; ros::Publisher rvizMarkerPub_; ros::Publisher rvizMarkerPub2_; ar_track_alvar_msgs::AlvarMarkers arPoseMarkers_; -visualization_msgs::Marker rvizMarker_; -tf::TransformListener *tf_listener; +visualization_msgs::msg::Marker rvizMarker_; +tf2_ros::TransformListener *tf_listener; tf::TransformBroadcaster *tf_broadcaster; MarkerDetector marker_detector; @@ -90,10 +91,10 @@ int marker_margin = 2; // default marker margin //Debugging utility function void draw3dPoints(ARCloud::Ptr cloud, string frame, int color, int id, double rad) { - visualization_msgs::Marker rvizMarker; + visualization_msgs::msg::Marker rvizMarker; rvizMarker.header.frame_id = frame; - rvizMarker.header.stamp = ros::Time::now(); + rvizMarker.header.stamp = rclcpp::Time::now(); rvizMarker.id = id; rvizMarker.ns = "3dpts"; @@ -101,8 +102,8 @@ void draw3dPoints(ARCloud::Ptr cloud, string frame, int color, int id, double ra rvizMarker.scale.y = rad; rvizMarker.scale.z = rad; - rvizMarker.type = visualization_msgs::Marker::SPHERE_LIST; - rvizMarker.action = visualization_msgs::Marker::ADD; + rvizMarker.type = visualization_msgs::msg::Marker::SPHERE_LIST; + rvizMarker.action = visualization_msgs::msg::Marker::ADD; if(color==1){ rvizMarker.color.r = 0.0f; @@ -131,17 +132,17 @@ void draw3dPoints(ARCloud::Ptr cloud, string frame, int color, int id, double ra rvizMarker.points.push_back(p); } - rvizMarker.lifetime = ros::Duration (1.0); + rvizMarker.lifetime = rclcpp::Duration (1.0); rvizMarkerPub2_.publish (rvizMarker); } void drawArrow(gm::Point start, tf::Matrix3x3 mat, string frame, int color, int id) { - visualization_msgs::Marker rvizMarker; + visualization_msgs::msg::Marker rvizMarker; rvizMarker.header.frame_id = frame; - rvizMarker.header.stamp = ros::Time::now(); + rvizMarker.header.stamp = rclcpp::Time::now(); rvizMarker.id = id; rvizMarker.ns = "arrow"; @@ -149,8 +150,8 @@ void drawArrow(gm::Point start, tf::Matrix3x3 mat, string frame, int color, int rvizMarker.scale.y = 0.01; rvizMarker.scale.z = 0.1; - rvizMarker.type = visualization_msgs::Marker::ARROW; - rvizMarker.action = visualization_msgs::Marker::ADD; + rvizMarker.type = visualization_msgs::msg::Marker::ARROW; + rvizMarker.action = visualization_msgs::msg::Marker::ADD; for(int i=0; i<3; i++){ rvizMarker.points.clear(); @@ -161,7 +162,7 @@ void drawArrow(gm::Point start, tf::Matrix3x3 mat, string frame, int color, int end.z = start.z + mat[2][i]; rvizMarker.points.push_back(end); rvizMarker.id += 10*i; - rvizMarker.lifetime = ros::Duration (1.0); + rvizMarker.lifetime = rclcpp::Duration (1.0); if(color==1){ rvizMarker.color.r = 1.0f; @@ -302,7 +303,7 @@ void GetMarkerPoses(IplImage *image, ARCloud &cloud) { } } else - ROS_ERROR("FindMarkerBundles: Bad Orientation: %i for ID: %i", ori, id); + RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "FindMarkerBundles: Bad Orientation: %i for ID: %i", ori, id); //Get the 3D marker points BOOST_FOREACH (const PointDouble& p, m->ros_marker_points_img) @@ -317,9 +318,9 @@ void GetMarkerPoses(IplImage *image, ARCloud &cloud) { -void getPointCloudCallback (const sensor_msgs::PointCloud2ConstPtr &msg) +void getPointCloudCallback (const sensor_msgs::msg::PointCloud2::ConstSharedPtr &msg) { - sensor_msgs::ImagePtr image_msg(new sensor_msgs::Image); + sensor_msgs::msg::ImagePtr image_msg(new sensor_msgs::msg::Image); // If desired, use the frame in the message's header. if (output_frame_from_msg) @@ -352,17 +353,17 @@ void getPointCloudCallback (const sensor_msgs::PointCloud2ConstPtr &msg) Pose ret_pose; GetMarkerPoses(&ipl_image, cloud); - tf::StampedTransform CamToOutput; + tf2::StampedTransform CamToOutput; if (image_msg->header.frame_id == output_frame) { CamToOutput.setIdentity(); } else { try { tf_listener->waitForTransform(output_frame, image_msg->header.frame_id, - image_msg->header.stamp, ros::Duration(1.0)); + image_msg->header.stamp, rclcpp::Duration(1.0)); tf_listener->lookupTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp, CamToOutput); } catch (tf::TransformException ex) { - ROS_ERROR("%s",ex.what()); + RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "%s",ex.what()); } } @@ -397,7 +398,7 @@ void getPointCloudCallback (const sensor_msgs::PointCloud2ConstPtr &msg) out << id; std::string id_string = out.str(); markerFrame += id_string; - tf::StampedTransform camToMarker (t, image_msg->header.stamp, image_msg->header.frame_id, markerFrame.c_str()); + tf2::StampedTransform camToMarker (t, image_msg->header.stamp, image_msg->header.frame_id, markerFrame.c_str()); tf_broadcaster->sendTransform(camToMarker); //Create the rviz visualization messages @@ -410,8 +411,8 @@ void getPointCloudCallback (const sensor_msgs::PointCloud2ConstPtr &msg) rvizMarker_.scale.y = 1.0 * marker_size/100.0; rvizMarker_.scale.z = 0.2 * marker_size/100.0; rvizMarker_.ns = "basic_shapes"; - rvizMarker_.type = visualization_msgs::Marker::CUBE; - rvizMarker_.action = visualization_msgs::Marker::ADD; + rvizMarker_.type = visualization_msgs::msg::Marker::CUBE; + rvizMarker_.action = visualization_msgs::msg::Marker::ADD; switch (id) { case 0: @@ -451,7 +452,7 @@ void getPointCloudCallback (const sensor_msgs::PointCloud2ConstPtr &msg) rvizMarker_.color.a = 1.0; break; } - rvizMarker_.lifetime = ros::Duration (1.0); + rvizMarker_.lifetime = rclcpp::Duration (1.0); rvizMarkerPub_.publish (rvizMarker_); //Get the pose of the tag in the camera frame, then the output frame (usually torso) @@ -476,7 +477,7 @@ void getPointCloudCallback (const sensor_msgs::PointCloud2ConstPtr &msg) void configCallback(ar_track_alvar::ParamsConfig &config, uint32_t level) { - ROS_INFO("AR tracker reconfigured: %s %.2f %.2f %.2f %.2f", config.enabled ? "ENABLED" : "DISABLED", + RCLCPP_INFO(rclcpp::get_logger("ArTrackAlvar"), "AR tracker reconfigured: %s %.2f %.2f %.2f %.2f", config.enabled ? "ENABLED" : "DISABLED", config.max_frequency, config.marker_size, config.max_new_marker_error, config.max_track_error); enableSwitched = enabled != config.enabled; @@ -491,10 +492,10 @@ void configCallback(ar_track_alvar::ParamsConfig &config, uint32_t level) int main(int argc, char *argv[]) { ros::init (argc, argv, "marker_detect"); - ros::NodeHandle n, pn("~"); + rclcpp::Node n, pn("~"); if(argc > 1) { - ROS_WARN("Command line arguments are deprecated. Consider using ROS parameters and remappings."); + RCLCPP_WARN(rclcpp::get_logger("ArTrackAlvar"), "Command line arguments are deprecated. Consider using ROS parameters and remappings."); if(argc < 7){ std::cout << std::endl; @@ -534,7 +535,7 @@ int main(int argc, char *argv[]) pn.param("output_frame_from_msg", output_frame_from_msg, false); if (!output_frame_from_msg && !pn.getParam("output_frame", output_frame)) { - ROS_ERROR("Param 'output_frame' has to be set if the output frame is not " + RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "Param 'output_frame' has to be set if the output frame is not " "derived from the point cloud message."); exit(EXIT_FAILURE); } @@ -555,12 +556,12 @@ int main(int argc, char *argv[]) if (!output_frame_from_msg) { // TF listener is only required when output frame != camera frame. - tf_listener = new tf::TransformListener(n); + tf_listener = new tf2_ros::TransformListener(n); } tf_broadcaster = new tf::TransformBroadcaster(); arMarkerPub_ = n.advertise < ar_track_alvar_msgs::AlvarMarkers > ("ar_pose_marker", 0); - rvizMarkerPub_ = n.advertise < visualization_msgs::Marker > ("visualization_marker", 0); - rvizMarkerPub2_ = n.advertise < visualization_msgs::Marker > ("ARmarker_points", 0); + rvizMarkerPub_ = n.advertise < visualization_msgs::msg::Marker > ("visualization_marker", 0); + rvizMarkerPub2_ = n.advertise < visualization_msgs::msg::Marker > ("ARmarker_points", 0); // Prepare dynamic reconfiguration dynamic_reconfigure::Server < ar_track_alvar::ParamsConfig > server; @@ -571,29 +572,29 @@ int main(int argc, char *argv[]) //Give tf a chance to catch up before the camera callback starts asking for transforms // It will also reconfigure parameters for the first time, setting the default values - ros::Duration(1.0).sleep(); - ros::spinOnce(); + rclcpp::Duration(1.0).sleep(); + rclcpp::spin_some(node); if (enabled == true) { // This always happens, as enable is true by default - ROS_INFO("Subscribing to image topic"); + RCLCPP_INFO(rclcpp::get_logger("ArTrackAlvar"), "Subscribing to image topic"); cloud_sub_ = n.subscribe(cam_image_topic, 1, &getPointCloudCallback); } // Run at the configured rate, discarding pointcloud msgs if necessary - ros::Rate rate(max_frequency); + rclcpp::Rate rate(max_frequency); - while (ros::ok()) + while (rclcpp::ok()) { - ros::spinOnce(); + rclcpp::spin_some(node); rate.sleep(); - if (std::abs((rate.expectedCycleTime() - ros::Duration(1.0 / max_frequency)).toSec()) > 0.001) + if (std::abs((rate.expectedCycleTime() - rclcpp::Duration(1.0 / max_frequency)).toSec()) > 0.001) { // Change rate dynamically; if must be above 0, as 0 will provoke a segfault on next spinOnce - ROS_DEBUG("Changing frequency from %.2f to %.2f", 1.0 / rate.expectedCycleTime().toSec(), max_frequency); - rate = ros::Rate(max_frequency); + RCLCPP_DEBUG(rclcpp::get_logger("ArTrackAlvar"), "Changing frequency from %.2f to %.2f", 1.0 / rate.expectedCycleTime().toSec(), max_frequency); + rate = rclcpp::Rate(max_frequency); } if (enableSwitched == true) diff --git a/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp b/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp index d5a439e..6b0e2d9 100644 --- a/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp +++ b/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp @@ -35,14 +35,15 @@ */ -#include +#include #include "ar_track_alvar/CvTestbed.h" #include "ar_track_alvar/MarkerDetector.h" #include "ar_track_alvar/Shared.h" #include #include #include -#include +#include "tf2_ros/buffer.h" +#include "tf2_ros/transform_listener.h" #include #include #include @@ -58,8 +59,8 @@ image_transport::Subscriber cam_sub_; ros::Publisher arMarkerPub_; ros::Publisher rvizMarkerPub_; ar_track_alvar_msgs::AlvarMarkers arPoseMarkers_; -visualization_msgs::Marker rvizMarker_; -tf::TransformListener *tf_listener; +visualization_msgs::msg::Marker rvizMarker_; +tf2_ros::TransformListener *tf_listener; tf::TransformBroadcaster *tf_broadcaster; MarkerDetector marker_detector; @@ -75,21 +76,21 @@ std::string output_frame; int marker_resolution = 5; // default marker resolution int marker_margin = 2; // default marker margin -void getCapCallback (const sensor_msgs::ImageConstPtr & image_msg); +void getCapCallback (const sensor_msgs::msg::Image::ConstSharedPtr & image_msg); -void getCapCallback (const sensor_msgs::ImageConstPtr & image_msg) +void getCapCallback (const sensor_msgs::msg::Image::ConstSharedPtr & image_msg) { //If we've already gotten the cam info, then go ahead if(cam->getCamInfo_){ try{ - tf::StampedTransform CamToOutput; + tf2::StampedTransform CamToOutput; try{ - tf_listener->waitForTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp, ros::Duration(1.0)); + tf_listener->waitForTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp, rclcpp::Duration(1.0)); tf_listener->lookupTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp, CamToOutput); } catch (tf::TransformException ex){ - ROS_ERROR("%s",ex.what()); + RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "%s",ex.what()); } @@ -126,7 +127,7 @@ void getCapCallback (const sensor_msgs::ImageConstPtr & image_msg) tf::Transform markerPose = t * m; // marker pose in the camera frame tf::Vector3 z_axis_cam = tf::Transform(rotation, tf::Vector3(0,0,0)) * tf::Vector3(0, 0, 1); -// ROS_INFO("%02i Z in cam frame: %f %f %f",id, z_axis_cam.x(), z_axis_cam.y(), z_axis_cam.z()); +// RCLCPP_INFO(rclcpp::get_logger("ArTrackAlvar"), "%02i Z in cam frame: %f %f %f",id, z_axis_cam.x(), z_axis_cam.y(), z_axis_cam.z()); /// as we can't see through markers, this one is false positive detection if (z_axis_cam.z() > 0) { @@ -139,7 +140,7 @@ void getCapCallback (const sensor_msgs::ImageConstPtr & image_msg) out << id; std::string id_string = out.str(); markerFrame += id_string; - tf::StampedTransform camToMarker (t, image_msg->header.stamp, image_msg->header.frame_id, markerFrame.c_str()); + tf2::StampedTransform camToMarker (t, image_msg->header.stamp, image_msg->header.frame_id, markerFrame.c_str()); tf_broadcaster->sendTransform(camToMarker); //Create the rviz visualization messages @@ -152,8 +153,8 @@ void getCapCallback (const sensor_msgs::ImageConstPtr & image_msg) rvizMarker_.scale.y = 1.0 * marker_size/100.0; rvizMarker_.scale.z = 0.2 * marker_size/100.0; rvizMarker_.ns = "basic_shapes"; - rvizMarker_.type = visualization_msgs::Marker::CUBE; - rvizMarker_.action = visualization_msgs::Marker::ADD; + rvizMarker_.type = visualization_msgs::msg::Marker::CUBE; + rvizMarker_.action = visualization_msgs::msg::Marker::ADD; switch (id) { case 0: @@ -193,7 +194,7 @@ void getCapCallback (const sensor_msgs::ImageConstPtr & image_msg) rvizMarker_.color.a = 1.0; break; } - rvizMarker_.lifetime = ros::Duration (1.0); + rvizMarker_.lifetime = rclcpp::Duration (1.0); rvizMarkerPub_.publish (rvizMarker_); //Get the pose of the tag in the camera frame, then the output frame (usually torso) @@ -217,7 +218,7 @@ void getCapCallback (const sensor_msgs::ImageConstPtr & image_msg) void configCallback(ar_track_alvar::ParamsConfig &config, uint32_t level) { - ROS_INFO("AR tracker reconfigured: %s %.2f %.2f %.2f %.2f", config.enabled ? "ENABLED" : "DISABLED", + RCLCPP_INFO(rclcpp::get_logger("ArTrackAlvar"), "AR tracker reconfigured: %s %.2f %.2f %.2f %.2f", config.enabled ? "ENABLED" : "DISABLED", config.max_frequency, config.marker_size, config.max_new_marker_error, config.max_track_error); enableSwitched = enabled != config.enabled; @@ -229,7 +230,7 @@ void configCallback(ar_track_alvar::ParamsConfig &config, uint32_t level) max_track_error = config.max_track_error; } -void enableCallback(const std_msgs::BoolConstPtr& msg) +void enableCallback(const std_msgs::msg::Bool::ConstSharedPtr& msg) { enableSwitched = enabled != msg->data; enabled = msg->data; @@ -238,10 +239,10 @@ void enableCallback(const std_msgs::BoolConstPtr& msg) int main(int argc, char *argv[]) { ros::init (argc, argv, "marker_detect"); - ros::NodeHandle n, pn("~"); + rclcpp::Node n, pn("~"); if(argc > 1) { - ROS_WARN("Command line arguments are deprecated. Consider using ROS parameters and remappings."); + RCLCPP_WARN(rclcpp::get_logger("ArTrackAlvar"), "Command line arguments are deprecated. Consider using ROS parameters and remappings."); if(argc < 7){ std::cout << std::endl; @@ -279,7 +280,7 @@ int main(int argc, char *argv[]) pn.param("marker_resolution", marker_resolution, 5); pn.param("marker_margin", marker_margin, 2); if (!pn.getParam("output_frame", output_frame)) { - ROS_ERROR("Param 'output_frame' has to be set."); + RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "Param 'output_frame' has to be set."); exit(EXIT_FAILURE); } @@ -296,10 +297,10 @@ int main(int argc, char *argv[]) marker_detector.SetMarkerSize(marker_size, marker_resolution, marker_margin); cam = new Camera(n, cam_info_topic); - tf_listener = new tf::TransformListener(n); + tf_listener = new tf2_ros::TransformListener(n); tf_broadcaster = new tf::TransformBroadcaster(); arMarkerPub_ = n.advertise < ar_track_alvar_msgs::AlvarMarkers > ("ar_pose_marker", 0); - rvizMarkerPub_ = n.advertise < visualization_msgs::Marker > ("visualization_marker", 0); + rvizMarkerPub_ = n.advertise < visualization_msgs::msg::Marker > ("visualization_marker", 0); // Prepare dynamic reconfiguration dynamic_reconfigure::Server < ar_track_alvar::ParamsConfig > server; @@ -310,29 +311,29 @@ int main(int argc, char *argv[]) //Give tf a chance to catch up before the camera callback starts asking for transforms // It will also reconfigure parameters for the first time, setting the default values - ros::Duration(1.0).sleep(); - ros::spinOnce(); + rclcpp::Duration(1.0).sleep(); + rclcpp::spin_some(node); image_transport::ImageTransport it_(n); // Run at the configured rate, discarding pointcloud msgs if necessary - ros::Rate rate(max_frequency); + rclcpp::Rate rate(max_frequency); /// Subscriber for enable-topic so that a user can turn off the detection if it is not used without /// having to use the reconfigure where he has to know all parameters - ros::Subscriber enable_sub_ = pn.subscribe("enable_detection", 1, &enableCallback); + auto enable_sub_ = pn.subscribe("enable_detection", 1, &enableCallback); enableSwitched = true; - while (ros::ok()) + while (rclcpp::ok()) { - ros::spinOnce(); + rclcpp::spin_some(node); rate.sleep(); - if (std::abs((rate.expectedCycleTime() - ros::Duration(1.0 / max_frequency)).toSec()) > 0.001) + if (std::abs((rate.expectedCycleTime() - rclcpp::Duration(1.0 / max_frequency)).toSec()) > 0.001) { // Change rate dynamically; if must be above 0, as 0 will provoke a segfault on next spinOnce - ROS_DEBUG("Changing frequency from %.2f to %.2f", 1.0 / rate.expectedCycleTime().toSec(), max_frequency); - rate = ros::Rate(max_frequency); + RCLCPP_DEBUG(rclcpp::get_logger("ArTrackAlvar"), "Changing frequency from %.2f to %.2f", 1.0 / rate.expectedCycleTime().toSec(), max_frequency); + rate = rclcpp::Rate(max_frequency); } if (enableSwitched) diff --git a/ar_track_alvar/nodes/TrainMarkerBundle.cpp b/ar_track_alvar/nodes/TrainMarkerBundle.cpp index ac15a41..5b8fe3a 100644 --- a/ar_track_alvar/nodes/TrainMarkerBundle.cpp +++ b/ar_track_alvar/nodes/TrainMarkerBundle.cpp @@ -43,7 +43,8 @@ #include #include #include -#include +#include "tf2_ros/buffer.h" +#include "tf2_ros/transform_listener.h" #include #include #include @@ -61,7 +62,7 @@ image_transport::Subscriber cam_sub_; ros::Publisher arMarkerPub_; ros::Publisher rvizMarkerPub_; ar_track_alvar_msgs::AlvarMarkers arPoseMarkers_; -tf::TransformListener *tf_listener; +tf2_ros::TransformListener *tf_listener; tf::TransformBroadcaster *tf_broadcaster; MarkerDetector marker_detector; MultiMarkerInitializer *multi_marker_init=NULL; @@ -83,9 +84,9 @@ std::string output_frame; int nof_markers; double GetMultiMarkerPose(IplImage *image, Pose &pose); -void getCapCallback (const sensor_msgs::ImageConstPtr & image_msg); +void getCapCallback (const sensor_msgs::msg::Image::ConstSharedPtr & image_msg); int keyCallback(int key); -void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::ImageConstPtr image_msg, tf::StampedTransform &CamToOutput, visualization_msgs::Marker *rvizMarker, ar_track_alvar_msgs::AlvarMarker *ar_pose_marker); +void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::msg::Image::ConstSharedPtr image_msg, tf2::StampedTransform &CamToOutput, visualization_msgs::msg::Marker *rvizMarker, ar_track_alvar_msgs::AlvarMarker *ar_pose_marker); double GetMultiMarkerPose(IplImage *image, Pose &pose) { @@ -165,7 +166,7 @@ double GetMultiMarkerPose(IplImage *image, Pose &pose) { return error; } -void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::ImageConstPtr image_msg, tf::StampedTransform &CamToOutput, visualization_msgs::Marker *rvizMarker, ar_track_alvar_msgs::AlvarMarker *ar_pose_marker){ +void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::msg::Image::ConstSharedPtr image_msg, tf2::StampedTransform &CamToOutput, visualization_msgs::msg::Marker *rvizMarker, ar_track_alvar_msgs::AlvarMarker *ar_pose_marker){ double px,py,pz,qx,qy,qz,qw; px = p.translation[0]/100.0; @@ -192,7 +193,7 @@ void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::ImageConstPtr image_ out << id; std::string id_string = out.str(); markerFrame += id_string; - tf::StampedTransform camToMarker (t, image_msg->header.stamp, image_msg->header.frame_id, markerFrame.c_str()); + tf2::StampedTransform camToMarker (t, image_msg->header.stamp, image_msg->header.frame_id, markerFrame.c_str()); tf_broadcaster->sendTransform(camToMarker); } @@ -206,8 +207,8 @@ void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::ImageConstPtr image_ rvizMarker->scale.y = 1.0 * marker_size/100.0; rvizMarker->scale.z = 0.2 * marker_size/100.0; rvizMarker->ns = "basic_shapes"; - rvizMarker->type = visualization_msgs::Marker::CUBE; - rvizMarker->action = visualization_msgs::Marker::ADD; + rvizMarker->type = visualization_msgs::msg::Marker::CUBE; + rvizMarker->action = visualization_msgs::msg::Marker::ADD; //Determine a color and opacity, based on marker type if(type==MAIN_MARKER){ @@ -229,7 +230,7 @@ void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::ImageConstPtr image_ rvizMarker->color.a = 0.5; } - rvizMarker->lifetime = ros::Duration (1.0); + rvizMarker->lifetime = rclcpp::Duration (1.0); //Get the pose of the tag in the camera frame, then convert to the output frame (usually torso) tf::Transform tagPoseOutput = CamToOutput * markerPose; @@ -242,7 +243,7 @@ void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::ImageConstPtr image_ } -void getCapCallback (const sensor_msgs::ImageConstPtr & image_msg) +void getCapCallback (const sensor_msgs::msg::Image::ConstSharedPtr & image_msg) { //Check if automatic measurement collection should be triggered if(auto_collect){ @@ -256,16 +257,16 @@ void getCapCallback (const sensor_msgs::ImageConstPtr & image_msg) if(cam->getCamInfo_){ try{ //Get the transformation from the Camera to the output frame for this image capture - tf::StampedTransform CamToOutput; + tf2::StampedTransform CamToOutput; try{ - tf_listener->waitForTransform(image_msg->header.frame_id, output_frame, image_msg->header.stamp, ros::Duration(1.0)); + tf_listener->waitForTransform(image_msg->header.frame_id, output_frame, image_msg->header.stamp, rclcpp::Duration(1.0)); tf_listener->lookupTransform(image_msg->header.frame_id, output_frame, image_msg->header.stamp, CamToOutput); } catch (tf::TransformException ex){ - ROS_ERROR("%s",ex.what()); + RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "%s",ex.what()); } - visualization_msgs::Marker rvizMarker; + visualization_msgs::msg::Marker rvizMarker; ar_track_alvar_msgs::AlvarMarker ar_pose_marker; arPoseMarkers_.markers.clear (); @@ -373,7 +374,7 @@ int keyProcess(int key) int main(int argc, char *argv[]) { ros::init (argc, argv, "marker_detect"); - ros::NodeHandle n; + rclcpp::Node n; if(argc < 8){ std::cout << std::endl; @@ -394,14 +395,14 @@ int main(int argc, char *argv[]) marker_detector.SetMarkerSize(marker_size); cam = new Camera(n, cam_info_topic); - tf_listener = new tf::TransformListener(n); + tf_listener = new tf2_ros::TransformListener(n); tf_broadcaster = new tf::TransformBroadcaster(); arMarkerPub_ = n.advertise < ar_track_alvar_msgs::AlvarMarkers > ("ar_pose_marker", 0); - rvizMarkerPub_ = n.advertise < visualization_msgs::Marker > ("visualization_marker", 0); + rvizMarkerPub_ = n.advertise < visualization_msgs::msg::Marker > ("visualization_marker", 0); //Give tf a chance to catch up before the camera callback starts asking for transforms - ros::Duration(1.0).sleep(); - ros::spinOnce(); + rclcpp::Duration(1.0).sleep(); + rclcpp::spin_some(node); //Subscribe to camera message ROS_INFO ("Subscribing to image topic"); @@ -428,7 +429,7 @@ int main(int argc, char *argv[]) int key = cvWaitKey(20); if(key >= 0) keyProcess(key); - ros::spinOnce(); + rclcpp::spin_some(node); } return 0; diff --git a/ar_track_alvar/nodes/log/COLCON_IGNORE b/ar_track_alvar/nodes/log/COLCON_IGNORE new file mode 100644 index 0000000..e69de29 diff --git a/ar_track_alvar/nodes/log/latest b/ar_track_alvar/nodes/log/latest new file mode 120000 index 0000000..1715667 --- /dev/null +++ b/ar_track_alvar/nodes/log/latest @@ -0,0 +1 @@ +latest_list \ No newline at end of file diff --git a/ar_track_alvar/nodes/log/latest_list b/ar_track_alvar/nodes/log/latest_list new file mode 120000 index 0000000..6017113 --- /dev/null +++ b/ar_track_alvar/nodes/log/latest_list @@ -0,0 +1 @@ +list_2021-07-19_11-38-02 \ No newline at end of file diff --git a/ar_track_alvar/nodes/log/list_2021-07-19_11-38-02/logger_all.log b/ar_track_alvar/nodes/log/list_2021-07-19_11-38-02/logger_all.log new file mode 100644 index 0000000..b7e4e06 --- /dev/null +++ b/ar_track_alvar/nodes/log/list_2021-07-19_11-38-02/logger_all.log @@ -0,0 +1,26 @@ +[0.403s] DEBUG:colcon:Command line arguments: ['/usr/bin/colcon', 'list', '-p', '--base-paths', '/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes'] +[0.403s] DEBUG:colcon:Parsed command line arguments: Namespace(base_paths=['/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes'], build_base='build', ignore_user_meta=False, log_base=None, log_level=None, main=>, metas=['./colcon.meta'], names_only=False, packages_above=None, packages_above_and_dependencies=None, packages_above_depth=None, packages_end=None, packages_ignore=None, packages_ignore_regex=None, packages_select=None, packages_select_build_failed=False, packages_select_by_dep=None, packages_select_regex=None, packages_select_test_failures=False, packages_skip=None, packages_skip_build_finished=False, packages_skip_by_dep=None, packages_skip_regex=None, packages_skip_test_passed=False, packages_skip_up_to=None, packages_start=None, packages_up_to=None, packages_up_to_regex=None, paths=None, paths_only=True, topological_graph=False, topological_graph_density=False, topological_graph_dot=False, topological_graph_dot_cluster=False, topological_graph_dot_include_skipped=False, topological_graph_legend=False, topological_order=False, verb_extension=, verb_name='list', verb_parser=) +[0.434s] Level 1:colcon.colcon_core.package_discovery:discover_packages(colcon_meta) check parameters +[0.434s] Level 1:colcon.colcon_core.package_discovery:discover_packages(recursive) check parameters +[0.434s] Level 1:colcon.colcon_core.package_discovery:discover_packages(ignore) check parameters +[0.434s] Level 1:colcon.colcon_core.package_discovery:discover_packages(path) check parameters +[0.434s] Level 1:colcon.colcon_core.package_discovery:discover_packages(recursive) discover +[0.434s] INFO:colcon.colcon_core.package_discovery:Crawling recursively for packages in '/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes' +[0.435s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extensions ['ignore', 'ignore_ament_install'] +[0.435s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extension 'ignore' +[0.435s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extension 'ignore_ament_install' +[0.435s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extensions ['colcon_pkg'] +[0.435s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extension 'colcon_pkg' +[0.435s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extensions ['colcon_meta'] +[0.435s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extension 'colcon_meta' +[0.435s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extensions ['ros'] +[0.435s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extension 'ros' +[0.450s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extensions ['cmake', 'python'] +[0.450s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extension 'cmake' +[0.450s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extension 'python' +[0.450s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extensions ['python_setup_py'] +[0.450s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extension 'python_setup_py' +[0.451s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes/log) by extensions ['ignore', 'ignore_ament_install'] +[0.451s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes/log) by extension 'ignore' +[0.451s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes/log) ignored +[0.451s] Level 1:colcon.colcon_core.package_discovery:discover_packages(recursive) using defaults diff --git a/ar_track_alvar/package.xml b/ar_track_alvar/package.xml index e5184c0..65e1674 100644 --- a/ar_track_alvar/package.xml +++ b/ar_track_alvar/package.xml @@ -1,4 +1,4 @@ - + ar_track_alvar 0.7.1 @@ -10,48 +10,32 @@ This package is a ROS wrapper for Alvar, an open source AR tag tracking library. LGPL-2.1 http://ros.org/wiki/ar_track_alvar - catkin + ament_cmake +ar_track_alvar_msgs +cv_bridge +geometry_msgs +image_transport +pcl_conversions +perception_pcl +rclcpp +rclpy +resource_retriever +sensor_msgs +std_msgs +tf2 +tf2_ros +tinyxml2 +visualization_msgs +libopencv-dev +eigen +libpcl-all-dev + - ar_track_alvar_msgs - cmake_modules - cv_bridge - geometry_msgs - image_transport - message_generation - pcl_ros - pcl_conversions - resource_retriever - rospy - roscpp - sensor_msgs - std_msgs - tf - tf2 - tinyxml - visualization_msgs - dynamic_reconfigure - ar_track_alvar_msgs - cv_bridge - geometry_msgs - image_transport - message_runtime - pcl_ros - pcl_conversions - resource_retriever - rospy - roscpp - sensor_msgs - std_msgs - tf - tf2 - tinyxml - visualization_msgs - dynamic_reconfigure - - rosbag - rostest +ament_cmake_gtest +rosbag2 + +ament_cmake + - - diff --git a/ar_track_alvar/src/Camera.cpp b/ar_track_alvar/src/Camera.cpp index e96f2f4..1d05c13 100644 --- a/ar_track_alvar/src/Camera.cpp +++ b/ar_track_alvar/src/Camera.cpp @@ -25,7 +25,11 @@ #include "ar_track_alvar/Camera.h" #include "ar_track_alvar/FileFormatUtils.h" #include +#include "rclcpp/rclcpp.hpp" +#include +#include +using std::placeholders::_1; using namespace std; namespace alvar { @@ -39,56 +43,75 @@ void ProjPoints::Reset() { } // TODO: Does it matter what the etalon_square_size is??? -bool ProjPoints::AddPointsUsingChessboard(IplImage *image, double etalon_square_size, int etalon_rows, int etalon_columns, bool visualize) { - if (image->width == 0) return false; - IplImage *gray = cvCreateImage(cvSize(image->width, image->height), IPL_DEPTH_8U, 1); - CvPoint2D32f *corners = new CvPoint2D32f[etalon_rows*etalon_columns]; - if (image->nChannels == 1) - cvCopy(image, gray); +bool ProjPoints::AddPointsUsingChessboard(cv::Mat *image, double etalon_square_size, int etalon_rows, int etalon_columns, bool visualize) { + if (image->rows == 0) return false; + //IplImage *gray = cvCreateImage(cvSize(image->width, image->height), IPL_DEPTH_8U, 1); + + + cv::Mat gray = cv::Mat(cv::Size(image->rows, image->cols), CV_MAKETYPE(CV_8U,1)); + + + // CvPoint2D32f *corners = new CvPoint2D32f[etalon_rows*etalon_columns]; + + cv::Size patternsize(etalon_rows,etalon_columns); + vector centers; + + if (image->channels() == 1) + gray = image->clone(); else - cvCvtColor(image, gray, CV_RGB2GRAY); - width = image->width; - height = image->height; + cv::cvtColor(*image, gray, cv::COLOR_BGR2GRAY); + width = image->rows; + height = image->cols; int point_count = 0; - int pattern_was_found = cvFindChessboardCorners(gray, cvSize(etalon_rows, etalon_columns), corners, &point_count); + //int pattern_was_found = cvFindChessboardCorners(gray, cvSize(etalon_rows, etalon_columns), corners, &point_count); + + bool pattern_was_found = findChessboardCorners(gray,patternsize,centers, cv::CALIB_CB_ADAPTIVE_THRESH); + point_count = centers.size(); + + if (!pattern_was_found) point_count=0; + + if (point_count > 0) { - cvFindCornerSubPix(gray, corners, point_count, cvSize(5,5), cvSize(-1,-1), - cvTermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 10, 0.01f)); + + cv::TermCriteria criteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 30, 0.001); + cv::cornerSubPix(gray,centers,cv::Size(5,5), cv::Size(-1,-1),criteria); + + // Displaying the detected corner points on the checker board + if(visualize) + { + cv::drawChessboardCorners(*image, cv::Size(etalon_rows, etalon_columns), centers, pattern_was_found); + } + for (int i=0; i 0) return true; return false; } -bool ProjPoints::AddPointsUsingMarkers(vector &marker_corners, vector &marker_corners_img, IplImage* image) +bool ProjPoints::AddPointsUsingMarkers(vector &marker_corners, vector &marker_corners_img, cv::Mat* image) { - width = image->width; - height = image->height; + width = image->rows; + height = image->cols; if ((marker_corners.size() == marker_corners_img.size()) && (marker_corners.size() == 4)) { for (size_t p=0; p &marker_corners, vect return true; } -Camera::Camera() { +Camera::Camera(): Node("camera_node") { calib_K = cvMat(3, 3, CV_64F, calib_K_data); calib_D = cvMat(4, 1, CV_64F, calib_D_data); memset(calib_K_data,0,sizeof(double)*3*3); @@ -120,7 +143,7 @@ Camera::Camera() { } -Camera::Camera(ros::NodeHandle & n, std::string cam_info_topic):n_(n) +Camera::Camera(std::string cam_info_topic):Node("camera_node") { calib_K = cvMat(3, 3, CV_64F, calib_K_data); calib_D = cvMat(4, 1, CV_64F, calib_D_data); @@ -136,30 +159,13 @@ Camera::Camera(ros::NodeHandle & n, std::string cam_info_topic):n_(n) x_res = 640; y_res = 480; cameraInfoTopic_ = cam_info_topic; - ROS_INFO ("Subscribing to info topic"); - sub_ = n_.subscribe (cameraInfoTopic_, 1, &Camera::camInfoCallback, this); + + RCLCPP_INFO(this->get_logger(),"Subscribing to info topic"); + sub_ = this->create_subscription(cameraInfoTopic_, 1,std::bind(&Camera::camInfoCallback, this, _1)); getCamInfo_ = false; } - -// -//Camera::Camera(int w, int h) { -// calib_K = cvMat(3, 3, CV_64F, calib_K_data); -// calib_D = cvMat(4, 1, CV_64F, calib_D_data); -// memset(calib_K_data,0,sizeof(double)*3*3); -// memset(calib_D_data,0,sizeof(double)*4); -// calib_K_data[0][0] = w/2; -// calib_K_data[1][1] = w/2; -// calib_K_data[0][2] = w/2; -// calib_K_data[1][2] = h/2; -// calib_K_data[2][2] = 1; -// calib_x_res = w; -// calib_y_res = h; -// x_res = w; -// y_res = h; -//} - void Camera::SetSimpleCalib(int _x_res, int _y_res, double f_fac) { memset(calib_K_data,0,sizeof(double)*3*3); @@ -187,37 +193,64 @@ bool Camera::LoadCalibXML(const char *calibfile) { bool Camera::LoadCalibOpenCV(const char *calibfile) { cvSetErrMode(CV_ErrModeSilent); - CvFileStorage* fs = cvOpenFileStorage(calibfile, 0, CV_STORAGE_READ); + // CvFileStorage* fs = cvOpenFileStorage(calibfile, 0, CV_STORAGE_READ); + + cv::FileStorage cvfs(calibfile, cv::FileStorage::READ); // CV_STORAGE_READ); + + /* read data from file */ + cvSetErrMode(CV_ErrModeLeaf); - if(fs){ - CvFileNode* root_node = cvGetRootFileNode(fs); + if(cvfs.isOpened()){ + //CvFileNode* root_node = cvGetRootFileNode(fs); + + cv::FileNode root_node = cvfs.getFirstTopLevelNode(); // K Intrinsic - CvFileNode* intrinsic_mat_node = cvGetFileNodeByName(fs, root_node, "intrinsic_matrix"); - CvMat* intrinsic_mat = reinterpret_cast(cvRead(fs, intrinsic_mat_node)); - cvmSet(&calib_K, 0, 0, cvmGet(intrinsic_mat, 0, 0)); - cvmSet(&calib_K, 0, 1, cvmGet(intrinsic_mat, 0, 1)); - cvmSet(&calib_K, 0, 2, cvmGet(intrinsic_mat, 0, 2)); - cvmSet(&calib_K, 1, 0, cvmGet(intrinsic_mat, 1, 0)); - cvmSet(&calib_K, 1, 1, cvmGet(intrinsic_mat, 1, 1)); - cvmSet(&calib_K, 1, 2, cvmGet(intrinsic_mat, 1, 2)); - cvmSet(&calib_K, 2, 0, cvmGet(intrinsic_mat, 2, 0)); - cvmSet(&calib_K, 2, 1, cvmGet(intrinsic_mat, 2, 1)); - cvmSet(&calib_K, 2, 2, cvmGet(intrinsic_mat, 2, 2)); + + //cv::FileNode intrinsic_mat_node = root_node[std::string("intrinsic_matrix")]; + + + //CvFileNode* intrinsic_mat_node = cvGetFileNodeByName(fs, root_node, "intrinsic_matrix"); + + cv::Mat intrinsic_mat; + cvfs["intrinsic_matrix"] >> intrinsic_mat; + + CvMat intrinsic_mat2 = cvMat(intrinsic_mat); + //= intrinsic_mat_node.read(); //(cvRead(fs, intrinsic_mat_node)); + // reinterpret_cast + cvmSet(&calib_K, 0, 0, cvmGet(&intrinsic_mat2, 0, 0)); + cvmSet(&calib_K, 0, 1, cvmGet(&intrinsic_mat2, 0, 1)); + cvmSet(&calib_K, 0, 2, cvmGet(&intrinsic_mat2, 0, 2)); + cvmSet(&calib_K, 1, 0, cvmGet(&intrinsic_mat2, 1, 0)); + cvmSet(&calib_K, 1, 1, cvmGet(&intrinsic_mat2, 1, 1)); + cvmSet(&calib_K, 1, 2, cvmGet(&intrinsic_mat2, 1, 2)); + cvmSet(&calib_K, 2, 0, cvmGet(&intrinsic_mat2, 2, 0)); + cvmSet(&calib_K, 2, 1, cvmGet(&intrinsic_mat2, 2, 1)); + cvmSet(&calib_K, 2, 2, cvmGet(&intrinsic_mat2, 2, 2)); + + + //CvFileNode* dist_mat_node = cvGetFileNodeByName(fs, root_node, "distortion"); // D Distortion - CvFileNode* dist_mat_node = cvGetFileNodeByName(fs, root_node, "distortion"); - CvMat* dist_mat = reinterpret_cast(cvRead(fs, dist_mat_node)); - cvmSet(&calib_D, 0, 0, cvmGet(dist_mat, 0, 0)); - cvmSet(&calib_D, 1, 0, cvmGet(dist_mat, 1, 0)); - cvmSet(&calib_D, 2, 0, cvmGet(dist_mat, 2, 0)); - cvmSet(&calib_D, 3, 0, cvmGet(dist_mat, 3, 0)); + cv::Mat dist_mat; + cvfs["distortion"] >> dist_mat; + CvMat dist_mat2 = cvMat(dist_mat); + + + //reinterpret_cast(cvRead(fs, dist_mat_node)); + cvmSet(&calib_D, 0, 0, cvmGet(&dist_mat2, 0, 0)); + cvmSet(&calib_D, 1, 0, cvmGet(&dist_mat2, 1, 0)); + cvmSet(&calib_D, 2, 0, cvmGet(&dist_mat2, 2, 0)); + cvmSet(&calib_D, 3, 0, cvmGet(&dist_mat2, 3, 0)); // Resolution - CvFileNode* width_node = cvGetFileNodeByName(fs, root_node, "width"); - CvFileNode* height_node = cvGetFileNodeByName(fs, root_node, "height"); - calib_x_res = width_node->data.i; - calib_y_res = height_node->data.i; - cvReleaseFileStorage(&fs); + // cv::FileNode width_node = root_node["width"]; + // cv::FileNode height_node = root_node["height"]; + // calib_x_res = width_node->data.i; + // calib_y_res = height_node->data.i; + + cvfs["width"] >> calib_x_res; + cvfs["height"] >> calib_y_res; + //cvReleaseFileStorage(&fs); return true; } // reset error status @@ -225,30 +258,30 @@ bool Camera::LoadCalibOpenCV(const char *calibfile) { return false; } -void Camera::SetCameraInfo(const sensor_msgs::CameraInfo &camInfo) +void Camera::SetCameraInfo(const sensor_msgs::msg::CameraInfo::SharedPtr cam_info) { - cam_info_ = camInfo; + //cam_info_ = *cam_info; - calib_x_res = cam_info_.width; - calib_y_res = cam_info_.height; + calib_x_res = cam_info->width; + calib_y_res = cam_info->height; x_res = calib_x_res; y_res = calib_y_res; - cvmSet(&calib_K, 0, 0, cam_info_.K[0]); - cvmSet(&calib_K, 0, 1, cam_info_.K[1]); - cvmSet(&calib_K, 0, 2, cam_info_.K[2]); - cvmSet(&calib_K, 1, 0, cam_info_.K[3]); - cvmSet(&calib_K, 1, 1, cam_info_.K[4]); - cvmSet(&calib_K, 1, 2, cam_info_.K[5]); - cvmSet(&calib_K, 2, 0, cam_info_.K[6]); - cvmSet(&calib_K, 2, 1, cam_info_.K[7]); - cvmSet(&calib_K, 2, 2, cam_info_.K[8]); - - if (cam_info_.D.size() >= 4) { - cvmSet(&calib_D, 0, 0, cam_info_.D[0]); - cvmSet(&calib_D, 1, 0, cam_info_.D[1]); - cvmSet(&calib_D, 2, 0, cam_info_.D[2]); - cvmSet(&calib_D, 3, 0, cam_info_.D[3]); + cvmSet(&calib_K, 0, 0, cam_info_.k[0]); + cvmSet(&calib_K, 0, 1, cam_info_.k[1]); + cvmSet(&calib_K, 0, 2, cam_info_.k[2]); + cvmSet(&calib_K, 1, 0, cam_info_.k[3]); + cvmSet(&calib_K, 1, 1, cam_info_.k[4]); + cvmSet(&calib_K, 1, 2, cam_info_.k[5]); + cvmSet(&calib_K, 2, 0, cam_info_.k[6]); + cvmSet(&calib_K, 2, 1, cam_info_.k[7]); + cvmSet(&calib_K, 2, 2, cam_info_.k[8]); + + if (cam_info_.d.size() >= 4) { + cvmSet(&calib_D, 0, 0, cam_info_.d[0]); + cvmSet(&calib_D, 1, 0, cam_info_.d[1]); + cvmSet(&calib_D, 2, 0, cam_info_.d[2]); + cvmSet(&calib_D, 3, 0, cam_info_.d[3]); } else { cvmSet(&calib_D, 0, 0, 0); cvmSet(&calib_D, 1, 0, 0); @@ -257,13 +290,13 @@ void Camera::SetCameraInfo(const sensor_msgs::CameraInfo &camInfo) } } -void Camera::camInfoCallback (const sensor_msgs::CameraInfoConstPtr & cam_info) +void Camera::camInfoCallback (const sensor_msgs::msg::CameraInfo::SharedPtr cam_info) { if (!getCamInfo_) { - SetCameraInfo(*cam_info); + SetCameraInfo(cam_info); getCamInfo_ = true; - sub_.shutdown(); + sub_.reset(); } } @@ -316,16 +349,24 @@ bool Camera::SaveCalibXML(const char *calibfile) { bool Camera::SaveCalibOpenCV(const char *calibfile) { cvSetErrMode(CV_ErrModeSilent); - CvFileStorage* fs = cvOpenFileStorage(calibfile, 0, CV_STORAGE_WRITE); + cv::FileStorage cvfs(calibfile, cv::FileStorage::WRITE); + //cvFileStorage* fs = cvOpenFileStorage(calibfile, 0, CV_STORAGE_WRITE); cvSetErrMode(CV_ErrModeLeaf); - if(fs){ - cvWrite(fs, "intrinsic_matrix", &calib_K, cvAttrList(0,0)); - cvWrite(fs, "distortion", &calib_D, cvAttrList(0,0)); - //cvWriteReal(fs, "fov_x", data.fov_x); - //cvWriteReal(fs, "fov_y", data.fov_y); - cvWriteInt(fs, "width", calib_x_res); - cvWriteInt(fs, "height", calib_y_res); - cvReleaseFileStorage(&fs); + if(cvfs.isOpened()){ + cv::write(cvfs, "intrinsic_matrix", cv::cvarrToMat(&calib_K)); + cv::write(cvfs, "distortion", cv::cvarrToMat(&calib_D)); + cv::write(cvfs, "width", calib_x_res); + cv::write(cvfs, "width", calib_x_res); + // cv::write(cvfs, "fov_x", data.fov_x); + // cv::write(cvfs, "fov_y", data.fov_y); + + + + // cvWrite(fs, "intrinsic_matrix", &calib_K, cvAttrList(0,0)); + // cvWrite(fs, "distortion", &calib_D, cvAttrList(0,0)); + // cvWriteInt(fs, "width", calib_x_res); + // cvWriteInt(fs, "height", calib_y_res); + //cvReleaseFileStorage(&fs); return true; } // reset error status @@ -360,9 +401,14 @@ void Camera::Calibrate(ProjPoints &pp) image_points->data.fl[i*2+0] = (float)pp.image_points[i].x; image_points->data.fl[i*2+1] = (float)pp.image_points[i].y; } - cvCalibrateCamera2(object_points, image_points, &point_counts, - cvSize(pp.width, pp.height), - &calib_K, &calib_D, 0, 0, CV_CALIB_USE_INTRINSIC_GUESS); + + cv::Mat cameraMatrix,distCoeffs,R,T; + + cv::calibrateCamera(cv::cvarrToMat(object_points), cv::cvarrToMat(image_points), cv::Size(pp.width, pp.height), cameraMatrix, distCoeffs, R, T); + + + calib_K = cvMat(cameraMatrix); + calib_D = cvMat(distCoeffs); calib_x_res = pp.width; calib_y_res = pp.height; @@ -488,7 +534,7 @@ void Camera::Undistort(vector& points) */ } -void Camera::Undistort(CvPoint2D32f& point) +void Camera::Undistort(cv::Point2f& point) { /* // focal length @@ -608,7 +654,7 @@ void Camera::Distort(PointDouble & point) */ } -void Camera::Distort(CvPoint2D32f & point) +void Camera::Distort(cv::Point3f & point) { /* double u0 = cvmGet(&calib_K, 0, 2), v0 = cvmGet(&calib_K, 1, 2); // cx, cy @@ -636,7 +682,7 @@ void Camera::Distort(CvPoint2D32f & point) */ } -void Camera::CalcExteriorOrientation(vector& pw, vector& pi, +void Camera::CalcExteriorOrientation(vector& pw, vector& pi, Pose *pose) { double ext_rodriques[3]; @@ -653,14 +699,17 @@ void Camera::CalcExteriorOrientation(vector& pw, vectordata.fl[i*2+1] = (float)pi[i].y; } //cvFindExtrinsicCameraParams2(object_points, image_points, &calib_K, &calib_D, &ext_rodriques_mat, &ext_translate_mat); - cvFindExtrinsicCameraParams2(object_points, image_points, &calib_K, NULL, &ext_rodriques_mat, &ext_translate_mat); + + //cv::sfm::computeOrientation(cv::cvarrToMat(object_points),cv::cvarrToMat(image_points),) + + cv::solvePnP(cv::cvarrToMat(object_points), cv::cvarrToMat(image_points), cv::cvarrToMat(&calib_K), cv::cvarrToMat(&calib_D), cv::cvarrToMat(&ext_rodriques_mat), cv::cvarrToMat(&ext_translate_mat)); pose->SetRodriques(&ext_rodriques_mat); pose->SetTranslation(&ext_translate_mat); cvReleaseMat(&object_points); cvReleaseMat(&image_points); } -void Camera::CalcExteriorOrientation(vector& pw, vector& pi, +void Camera::CalcExteriorOrientation(vector& pw, vector& pi, CvMat *rodriques, CvMat *tra) { //assert(pw.size() == pi.size()); @@ -689,7 +738,14 @@ void Camera::CalcExteriorOrientation(vector& pw, vector& pw, vector pw3; + vector pw3; pw3.resize(size); for (int i=0; i& pw, vector& pw, Pose *pose, vector& pi) const { +void Camera::ProjectPoints(vector& pw, Pose *pose, vector& pi) const { double ext_rodriques[3]; double ext_translate[3]; CvMat ext_rodriques_mat = cvMat(3, 1, CV_64F, ext_rodriques); @@ -753,7 +813,7 @@ void Camera::ProjectPoints(vector& pw, Pose *pose, vectordata.fl[i*3+1] = (float)pw[i].y; object_points->data.fl[i*3+2] = (float)pw[i].z; } - cvProjectPoints2(object_points, &ext_rodriques_mat, &ext_translate_mat, &calib_K, &calib_D, image_points); + cv::projectPoints(cv::cvarrToMat(object_points), cv::cvarrToMat(&ext_rodriques_mat), cv::cvarrToMat(&ext_translate_mat), cv::cvarrToMat(&calib_K), cv::cvarrToMat(&calib_D), cv::cvarrToMat(image_points)); for (size_t i=0; idata.fl[i*2+0]; pi[i].y = image_points->data.fl[i*2+1]; @@ -766,7 +826,7 @@ void Camera::ProjectPoints(const CvMat* object_points, const CvMat* rotation_vec const CvMat* translation_vector, CvMat* image_points) const { // Project points - cvProjectPoints2(object_points, rotation_vector, translation_vector, &calib_K, &calib_D, image_points); + cv::projectPoints(cv::cvarrToMat(object_points), cv::cvarrToMat(rotation_vector), cv::cvarrToMat(translation_vector), cv::cvarrToMat(&calib_K), cv::cvarrToMat(&calib_D), cv::cvarrToMat(image_points)); } void Camera::ProjectPoints(const CvMat* object_points, const Pose* pose, CvMat* image_points) const @@ -777,7 +837,7 @@ void Camera::ProjectPoints(const CvMat* object_points, const Pose* pose, CvMat* CvMat ext_translate_mat = cvMat(3, 1, CV_64F, ext_translate); pose->GetRodriques(&ext_rodriques_mat); pose->GetTranslation(&ext_translate_mat); - cvProjectPoints2(object_points, &ext_rodriques_mat, &ext_translate_mat, &calib_K, &calib_D, image_points); + cv::projectPoints(cv::cvarrToMat(object_points), cv::cvarrToMat(&ext_rodriques_mat), cv::cvarrToMat(&ext_translate_mat), cv::cvarrToMat(&calib_K), cv::cvarrToMat(&calib_D), cv::cvarrToMat(image_points)); } void Camera::ProjectPoints(const CvMat* object_points, double gl[16], CvMat* image_points) const @@ -811,7 +871,7 @@ void Camera::ProjectPoints(const CvMat* object_points, double gl[16], CvMat* ima ProjectPoints(object_points, &rod_mat, &tra_mat, image_points); } -void Camera::ProjectPoint(const CvPoint3D64f pw, const Pose *pose, CvPoint2D64f &pi) const { +void Camera::ProjectPoint(const cv::Point3f pw, const Pose *pose, cv::Point2f &pi) const { float object_points_data[3] = {(float)pw.x, (float)pw.y, (float)pw.z}; float image_points_data[2] = {0}; CvMat object_points = cvMat(1, 1, CV_32FC3, object_points_data); @@ -821,21 +881,21 @@ void Camera::ProjectPoint(const CvPoint3D64f pw, const Pose *pose, CvPoint2D64f pi.y = image_points.data.fl[1]; } -void Camera::ProjectPoint(const CvPoint3D32f pw, const Pose *pose, CvPoint2D32f &pi) const { - float object_points_data[3] = {(float)pw.x, (float)pw.y, (float)pw.z}; - float image_points_data[2] = {0}; - CvMat object_points = cvMat(1, 1, CV_32FC3, object_points_data); - CvMat image_points = cvMat(1, 1, CV_32FC2, image_points_data); - ProjectPoints(&object_points, pose, &image_points); - pi.x = image_points.data.fl[0]; - pi.y = image_points.data.fl[1]; +// void Camera::ProjectPoint(const cv::Point3f pw, const Pose *pose, cv::Point2f &pi) const { +// float object_points_data[3] = {(float)pw.x, (float)pw.y, (float)pw.z}; +// float image_points_data[2] = {0}; +// CvMat object_points = cvMat(1, 1, CV_32FC3, object_points_data); +// CvMat image_points = cvMat(1, 1, CV_32FC2, image_points_data); +// ProjectPoints(&object_points, pose, &image_points); +// pi.x = image_points.data.fl[0]; +// pi.y = image_points.data.fl[1]; } -Homography::Homography() { +alvar::Homography::Homography() { cvInitMatHeader(&H, 3, 3, CV_64F, H_data); } -void Homography::Find(const vector& pw, const vector& pi) +void alvar::Homography::Find(const vector& pw, const vector& pi) { assert(pw.size() == pi.size()); int size = (int)pi.size(); @@ -855,17 +915,13 @@ void Homography::Find(const vector& pw, const vector& from, vector& to) +void alvar::Homography::ProjectPoints(const vector< alvar::PointDouble>& from, vector< alvar::PointDouble>& to) { int size = (int)from.size(); @@ -888,7 +944,7 @@ void Homography::ProjectPoints(const vector& from, vector& from, vectorcaptureImage(); - if (frame) { - default_videocallback(frame); + cv::Mat frame = cv::cvarrToMat(cap->captureImage()); + if (!frame.empty()) { + default_videocallback(&frame); if (wintitle.size() > 0) { - cvShowImage(wintitle.c_str(), frame); + cv::imshow(wintitle.c_str(), frame); } } } int key; #ifdef WIN32 - if( (key = cvWaitKey(1)) >= 0 ) { + if( (key = cv::waitKey(1)) >= 0 ) { #else - if( (key = cvWaitKey(20)) >= 0 ) { + if( (key = cv::waitKey(20)) >= 0 ) { #endif if (keycallback) { key = keycallback(key); @@ -80,7 +80,7 @@ void CvTestbed::WaitKeys() { void CvTestbed::ShowVisibleImages() { for (size_t i=0; idepth; - if (channels == 0) channels = proto->nChannels; - IplImage *ipl= cvCreateImage(cvSize(proto->width, proto->height), depth, channels); +cv::Mat *CvTestbed::CreateImageWithProto(const char *title, cv::Mat *proto, int depth /* =0 */, int channels /* =0 */) { + cv::Mat *ipl= cv::Mat::zeros(cv::Size(proto->cols, proto->rows), proto.type); if (!ipl) return NULL; ipl->origin = proto->origin; SetImage(title, ipl, true); return ipl; } -IplImage *CvTestbed::GetImage(size_t index) { +cv::Mat *CvTestbed::GetImage(size_t index) { if (index < 0) return NULL; if (index >= images.size()) return NULL; - return images[index].ipl; + return images[index]; } size_t CvTestbed::GetImageIndex(const char *title) { @@ -173,7 +171,7 @@ size_t CvTestbed::GetImageIndex(const char *title) { return (size_t)-1; } -IplImage *CvTestbed::GetImage(const char *title) { +cv::Mat *CvTestbed::GetImage(const char *title) { return GetImage(GetImageIndex(title)); } @@ -181,12 +179,12 @@ bool CvTestbed::ToggleImageVisible(size_t index, int flags) { if (index >= images.size()) return false; if (images[index].visible == false) { images[index].visible=true; - cvNamedWindow(images[index].title.c_str(), flags); + cv::namedWindow(images[index].title.c_str(), flags); return true; } else { images[index].visible=false; - cvDestroyWindow(images[index].title.c_str()); + cv::destroyWindow(images[index].title.c_str()); return false; } } diff --git a/ar_track_alvar/src/Draw.cpp b/ar_track_alvar/src/Draw.cpp index 4a3e891..50b61fa 100644 --- a/ar_track_alvar/src/Draw.cpp +++ b/ar_track_alvar/src/Draw.cpp @@ -29,74 +29,76 @@ using namespace std; namespace alvar { using namespace std; -void DrawPoints(IplImage *image, const vector& points, CvScalar color) +void DrawPoints(cv::Mat *image, const vector& points, cv::Scalar color) { for(unsigned i = 0; i < points.size(); ++i) - cvLine(image, cvPoint(points[i].x,points[i].y), cvPoint(points[i].x,points[i].y), color); + cv::line(*image, cv::Point(points[i].x,points[i].y), cv::Point(points[i].x,points[i].y), color); } -void DrawLine(IplImage* image, const Line line, CvScalar color) +void DrawLine(cv::Mat* image, const Line line, cv::Scalar color) { double len = 100; - CvPoint p1, p2; + cv::Point p1, p2; p1.x = int(line.c.x); p1.y = int(line.c.y); p2.x = int(line.c.x+line.s.x*len); p2.y = int(line.c.y+line.s.y*len); - cvLine(image, p1, p2, color); + cv::line(*image, p1, p2, color); p1.x = int(line.c.x); p1.y = int(line.c.y); p2.x = int(line.c.x-line.s.x*len); p2.y = int(line.c.y-line.s.y*len); - cvLine(image, p1, p2, color); + cv::line(*image, p1, p2, color); } -void DrawPoints(IplImage* image, const CvSeq* contour, CvScalar color) +void DrawPoints(cv::Mat* image, const CvSeq* contour, cv::Scalar color) { for(int i = 0; i < contour->total; ++i) { - CvPoint* pt = (CvPoint*)cvGetSeqElem( contour, i); - cvLine(image, cvPoint(pt->x, pt->y), cvPoint(pt->x, pt->y), color); + CvPoint* pt = (CvPoint*)cv::getSeqElem( contour, i); + cv::line(*image, cvPoint(pt->x, pt->y), cv::Point(pt->x, pt->y), color); } } -void DrawCircles(IplImage* image, const CvSeq* contour, int radius, CvScalar color) +void DrawCircles(cv::Mat* image, const CvSeq* contour, int radius, cv::Scalar color) { for(int i = 0; i < contour->total; ++i) { - CvPoint* pt = (CvPoint*)cvGetSeqElem( contour, i); - cvCircle(image, cvPoint(pt->x, pt->y), radius, color); + CvPoint* pt = (CvPoint*)cv::getSeqElem( contour, i); + cv::circle(*image, cvPoint(pt->x, pt->y), radius, color); } } -void DrawLines(IplImage* image, const CvSeq* contour, CvScalar color) +void DrawLines(cv::Mat* image, const CvSeq* contour, cv::Scalar color) { if(contour->total >= 2) { for(int i = 0; i < contour->total; ++i) { - CvPoint* pt1 = (CvPoint*)cvGetSeqElem( contour, i); - CvPoint* pt2 = (CvPoint*)cvGetSeqElem( contour, (i+1)%(contour->total)); - cvLine(image, cvPoint(pt1->x, pt1->y), cvPoint(pt2->x, pt2->y), color); + cv::Point* pt1 = (cv::Point*)cv::getSeqElem( contour, i); + cv::Point* pt2 = (cv::Point*)cv::getSeqElem( contour, (i+1)%(contour->total)); + cv::line(*image, cv::Point(pt1->x, pt1->y), cv::Point(pt2->x, pt2->y), color); } } } -void DrawCVEllipse(IplImage* image, CvBox2D& ellipse, CvScalar color, bool fill/*=false*/, double par) +void DrawCVEllipse(cv::Mat* image, CvBox2D& ellipse, cv::Scalar color, bool fill/*=false*/, double par) { - CvPoint center; + cv::Point center; center.x = static_cast(ellipse.center.x); center.y = static_cast(ellipse.center.y); int type = 1; if(fill) - type = CV_FILLED; + type = cv::FILLED; //cout<(par+ellipse.size.width/2), static_cast(par+ellipse.size.height/2)), -ellipse.angle, 0, 360, color, type); + cv::ellipse(*image, center, cv::Size(static_cast(par+ellipse.size.width/2), static_cast(par+ellipse.size.height/2)), -ellipse.angle, 0, 360, color, type); } -void BuildHideTexture(IplImage *image, IplImage *hide_texture, +void BuildHideTexture(CvMat *image, CvMat *hide_texture, Camera *cam, double gl_modelview[16], PointDouble topleft, PointDouble botright) { - assert(image->origin == 0); // Currently only top-left origin supported + + + //assert(image->origin == 0); // Currently only top-left origin supported double kx=1.0; double ky=1.0; @@ -210,16 +212,18 @@ void BuildHideTexture(IplImage *image, IplImage *hide_texture, } } -void DrawTexture(IplImage *image, IplImage *texture, +void DrawTexture(cv::Mat *image, cv::Mat *texture, Camera *cam, double gl_modelview[16], PointDouble topleft, PointDouble botright) { - assert(image->origin == 0); // Currently only top-left origin supported + //assert(image->origin == 0); // Currently only top-left origin supported double width = abs(botright.x - topleft.x); double height = abs(botright.y - topleft.y); double objx = width/2; double objy = height/2; + + // Project corners double points3d[4][3] = { -objx, -objy, 0, @@ -235,45 +239,55 @@ void DrawTexture(IplImage *image, IplImage *texture, // Warp texture and mask using the perspective that is based on the corners double map[9]; - CvMat map_mat = cvMat(3, 3, CV_64F, map); - CvPoint2D32f src[4] = { + cv::Mat map_mat = cv::Mat(3, 3, CV_64F, map); + cv::Point2f src[4] = { { 0, 0 }, - { 0, float(texture->height-1) }, - { float(texture->width-1), float(texture->height-1) }, - { float(texture->width-1), 0 }, + { 0, float(texture->cols-1) }, + { float(texture->rows-1), float(texture->cols-1) }, + { float(texture->rows-1), 0 }, }; - CvPoint2D32f dst[4] = { + cv::Point2f dst[4] = { { float(points2d[0][0]), float(points2d[0][1]) }, { float(points2d[1][0]), float(points2d[1][1]) }, { float(points2d[2][0]), float(points2d[2][1]) }, { float(points2d[3][0]), float(points2d[3][1]) }, }; - cvGetPerspectiveTransform(src, dst, &map_mat); - IplImage *img = cvCloneImage(image); - IplImage *img2 = cvCloneImage(image); - IplImage *mask = cvCreateImage(cvSize(image->width, image->height), 8, 1); - IplImage *mask2 = cvCreateImage(cvSize(image->width, image->height), 8, 1); - cvZero(img); - cvZero(img2); - cvZero(mask); - cvZero(mask2); - for (int j=0; jheight; j++) { //ttesis: why must we copy the texture first? - for (int i=0; iwidth; i++) { - CvScalar s = cvGet2D(texture, j, i); - cvSet2D(img, j, i, s); - if ((i>0) && (j>0) && (i<(texture->width-1)) && (j<(texture->height-1))) - cvSet2D(mask, j, i, cvScalar(1)); //ttesis: why are edges not included? + + map_mat = cv::getPerspectiveTransform(src, dst); + cv::Mat img = image->clone(); + cv::Mat img2 = image->clone(); + cv::Mat mask = cv::Mat::zeros(image->rows, image->cols, CV_8U); + cv::Mat mask2 = cv::Mat::zeros(image->rows, image->cols, CV_8U); + // cvZero(img); + // cvZero(img2); + // cvZero(mask); + // cvZero(mask2); + + CvMat texture2 = cvMat(*texture); + CvMat mask3 = cvMat(mask); + CvMat img3 = cvMat(img); + for (int j=0; jcols; j++) { //ttesis: why must we copy the texture first? + for (int i=0; irows; i++) { + CvScalar s = cvGet2D(&texture2, j, i); + cvSet2D(&img3, j, i, s); + // img.at(j,i) = s; + // //image.at + if ((i>0) && (j>0) && (i<(texture->rows-1)) && (j<(texture->cols-1))) + cvSet2D(&mask3, j, i, cvScalar(1)); //ttesis: why are edges not included? } } - cvWarpPerspective(img, img2, &map_mat); - cvWarpPerspective(mask, mask2, &map_mat, 0); + cv::warpPerspective(cv::cvarrToMat(&img3), img2, map_mat,cv::cvarrToMat(&img3).size()); + cv::warpPerspective(cv::cvarrToMat(&mask3), mask2, map_mat,cv::cvarrToMat(&mask3).size());//, cv::InterpolationFlags::INTER_LINEAR , cv::BORDER_CONSTANT, cv::Scalar(0)); - cvCopy(img2, image, mask2); - cvReleaseImage(&img); - cvReleaseImage(&img2); - cvReleaseImage(&mask); - cvReleaseImage(&mask2); + img2.copyTo(*image,mask2); + + //cvCopy(CvMat(img2), image, mask2); + + // cvReleaseImage(&img); + // cvReleaseImage(&img2); + // cvReleaseImage(&mask); + // cvReleaseImage(&mask2); } } // namespace alvar diff --git a/ar_track_alvar/src/Kalman.cpp b/ar_track_alvar/src/Kalman.cpp index 33656c6..381180c 100644 --- a/ar_track_alvar/src/Kalman.cpp +++ b/ar_track_alvar/src/Kalman.cpp @@ -23,9 +23,7 @@ #include #include // for std::max -#include "cxcore.h" -#include "cv.h" -#include "highgui.h" +#include #include "ar_track_alvar/Kalman.h" #include "ar_track_alvar/Util.h" #include "ar_track_alvar/Alvar.h" @@ -295,7 +293,13 @@ KalmanEkf::~KalmanEkf() { } void KalmanVisualize::img_matrix(CvMat *mat, int top, int left) { - cvSetImageROI(img, cvRect(top, left, mat->cols, mat->rows)); + + cv::Rect roi(top, left, mat->cols, mat->rows); + cv::Mat im = img->clone(); + cv::Mat image_roi = im(roi); + CvMat img2 = cvMat(image_roi); + + //cvSetImageROI(img, cvRect(top, left, mat->cols, mat->rows)); for (int j=0; jrows; j++) { for (int i=0; icols; i++) { double d = cvGet2D(mat, j, i).val[0]; @@ -316,13 +320,13 @@ void KalmanVisualize::img_matrix(CvMat *mat, int top, int left) { c1 = 255; c2 = 255; c3 = 255; } if (d < 0) { - cvSet2D(img, j, i, cvScalar(c3, c2, c1)); // BRG + cvSet2D(&img2, j, i, cvScalar(c3, c2, c1)); // BRG } else { - cvSet2D(img, j, i, cvScalar(c2, c1, c3)); // BGR + cvSet2D(&img2, j, i, cvScalar(c2, c1, c3)); // BGR } } } - cvResetImageROI(img); + //cvResetImageROI(img); } void KalmanVisualize::Init() { @@ -330,26 +334,49 @@ void KalmanVisualize::Init() { m = sensor->get_m(); int img_width = std::max(3+n+3+n+5+m+5, 1+n+1+n+1+n+1+m+1+n+1); int img_height = 1+n+1+std::max(n, m+1+m)+1; - img = cvCreateImage(cvSize(img_width, img_height), IPL_DEPTH_8U, 3); - cvSet(img, cvScalar(64,64,64)); - img_legend = cvLoadImage("Legend.png"); + cv::Mat img_r = cv::Mat(cv::Size(img_width, img_height),CV_8UC3); + img_r = cv::Scalar(64,64,64); + img = &img_r; + //cvSet(img, ); + cv::Mat load_im = cv::imread("Legend.png"); + img_legend = &load_im; + CvMat img_legend2 = cvMat(load_im); + if (img_legend) { for (img_scale=1; img_scale<50; img_scale++) { - if (img_scale*img_width > img_legend->width) { + if (img_scale*img_width > img_legend2.width) { break; } } - img_show = cvCreateImage(cvSize(img_width*img_scale, img_legend->height + img_height*img_scale), IPL_DEPTH_8U, 3); - cvSet(img_show, cvScalar(64,64,64)); - cvSetImageROI(img_show, cvRect(0, 0, img_legend->width, img_legend->height)); - cvCopy(img_legend, img_show); - cvResetImageROI(img_show); - cvNamedWindow("KalmanVisualize"); + + + //img_show = cvCreateImage(cvSize(img_width*img_scale, img_legend2.height + img_height*img_scale), IPL_DEPTH_8U, 3); + + cv::Mat img_temp = cv::Mat(cv::Size(img_width*img_scale, img_legend2.height + img_height*img_scale),CV_8UC3); + img_show = &img_temp; + + CvMat img_show2 = cvMat(img_temp); + cvSet(&img_show2, cvScalar(64,64,64)); + + + cv::Rect roi2(0, 0, img_legend2.width, img_legend2.height); + cv::Mat im2 = img_show->clone(); + cv::Mat image_roi2 = im2(roi2); + CvMat img3 = cvMat(image_roi2); + + //cvSetImageROI(&img_show2, cvRect(0, 0, img_legend2.width, img_legend2.height)); + + + cvCopy(&img_legend2, &img3); + //cvResetImageROI(img_show); + cv::namedWindow("KalmanVisualize"); } else { img_scale = 1; - img_show = cvCreateImage(cvSize(img_width*img_scale, img_height*img_scale), IPL_DEPTH_8U, 3); - cvSet(img_show, cvScalar(64,64,64)); - cvNamedWindow("KalmanVisualize",0); + cv::Mat img_temp = cv::Mat(cv::Size(img_width*img_scale, img_legend2.height + img_height*img_scale),CV_8UC3); + img_show = &img_temp; + CvMat img_show2 = cvMat(img_temp); + cvSet(&img_show2, cvScalar(64,64,64)); + cv::namedWindow("KalmanVisualize",0); } } @@ -395,7 +422,7 @@ KalmanVisualize::KalmanVisualize(KalmanCore *_kalman, KalmanSensorCore *_sensor) } KalmanVisualize::~KalmanVisualize() { - cvReleaseImage(&img); + // cvReleaseImage(&img); } void KalmanVisualize::update_pre() { @@ -421,20 +448,29 @@ void KalmanVisualize::update_post() { img_matrix(kalman_ext->Q, 2+n, y); // n img_matrix(kalman_ext->P_pred, 3+n+n, y); // n img_matrix(sensor_ext->R, 4+n+n+n, y); // m - img_matrix(kalman_ext->P, img->width - 1 - n, y); // n + img_matrix(kalman_ext->P, img->rows - 1 - n, y); // n } if (img_legend) { - cvSetImageROI(img_show, cvRect(0, img_legend->height, img->width * img_scale, img->height * img_scale)); - cvResize(img, img_show, CV_INTER_NN); - cvResetImageROI(img_show); + + cv::Rect roi(0, img_legend->cols, img->rows * img_scale, img->cols * img_scale); + cv::Mat im2 = img_show->clone(); + cv::Mat image_roi2 = im2(roi); + CvMat img3 = cvMat(image_roi2); + + //cvSetImageROI(img_show, cvRect(0, img_legend->height, img->width * img_scale, img->height * img_scale)); + //cvResize(img, img_show, CV_INTER_NN); + //cvResetImageROI(img_show); + + + } else { - cvResize(img, img_show, CV_INTER_NN); + //cvResize(img, img_show, CV_INTER_NN); } } void KalmanVisualize::show() { //out_matrix(sensor->K, "K"); - cvShowImage("KalmanVisualize", img_show); + cv::imshow("KalmanVisualize", *img_show); } } // namespace alvar diff --git a/ar_track_alvar/src/Line.cpp b/ar_track_alvar/src/Line.cpp index 5d3be21..5275468 100644 --- a/ar_track_alvar/src/Line.cpp +++ b/ar_track_alvar/src/Line.cpp @@ -22,6 +22,7 @@ */ #include "ar_track_alvar/Line.h" +#include using namespace std; @@ -44,7 +45,7 @@ void FitLines(vector& lines) int FitLines(vector &lines, const vector& corners, const vector& edge, - IplImage *grey /*=0*/) // grey image for future sub pixel accuracy + cv::Mat *grey /*=0*/) // grey image for future sub pixel accuracy { lines.clear(); for(unsigned j = 0; j < corners.size(); ++j) @@ -71,7 +72,7 @@ int FitLines(vector &lines, double* data = new double[2*len]; // OpenCV routine... - CvMat* line_data = cvCreateMat(1, len, CV_32FC2); + cv::Mat line_data = cv::Mat(1, len, CV_32FC2); for(int i = 0; i < len; ++i) { ind = i + start; @@ -80,15 +81,16 @@ int FitLines(vector &lines, double px = double(edge[ind].x); double py = double(edge[ind].y); - CV_MAT_ELEM(*line_data, CvPoint2D32f, 0, i) = cvPoint2D32f(px, py); + line_data.at(0,i) = cv::Point2f(px, py); } - float params[4] = {0}; - cvFitLine(line_data, CV_DIST_L2, 0, 0.01, 0.01, params); + //float params[4] = {0}; + cv::Vec4f params; + cv::fitLine(line_data, params, CV_DIFF_L2, 0, 0.01, 0.01); lines.push_back(Line(params)); delete [] data; - cvReleaseMat(&line_data); + //cvReleaseMat(&line_data); } diff --git a/ar_track_alvar/src/Marker.cpp b/ar_track_alvar/src/Marker.cpp index fdaf503..1bf6f38 100644 --- a/ar_track_alvar/src/Marker.cpp +++ b/ar_track_alvar/src/Marker.cpp @@ -23,7 +23,8 @@ #include "ar_track_alvar/Alvar.h" #include "ar_track_alvar/Marker.h" -#include "highgui.h" +#include +#include "opencv2/core/types_c.h" template class ALVAR_EXPORT alvar::MarkerIteratorImpl; template class ALVAR_EXPORT alvar::MarkerIteratorImpl; diff --git a/ar_track_alvar/src/Optimization.cpp b/ar_track_alvar/src/Optimization.cpp index ea60c2a..a749524 100644 --- a/ar_track_alvar/src/Optimization.cpp +++ b/ar_track_alvar/src/Optimization.cpp @@ -24,7 +24,10 @@ #include "ar_track_alvar/Alvar.h" #include "ar_track_alvar/Optimization.h" #include "time.h" -#include "highgui.h" +#include +#include +#include +#include #include using namespace std; diff --git a/ar_track_alvar/src/Pose.cpp b/ar_track_alvar/src/Pose.cpp index 6809e57..9e726db 100644 --- a/ar_track_alvar/src/Pose.cpp +++ b/ar_track_alvar/src/Pose.cpp @@ -22,6 +22,7 @@ */ #include "ar_track_alvar/Pose.h" +#include "opencv2/core/types_c.h" using namespace std; diff --git a/ar_track_alvar/src/Rotation.cpp b/ar_track_alvar/src/Rotation.cpp index bc96009..583305c 100644 --- a/ar_track_alvar/src/Rotation.cpp +++ b/ar_track_alvar/src/Rotation.cpp @@ -101,7 +101,7 @@ void Rotation::Mat9ToRod(double *mat, double *rod) CvMat mat_m, rod_m; cvInitMatHeader(&mat_m, 3, 3, CV_64F, mat); cvInitMatHeader(&rod_m, 3, 1, CV_64F, rod); - cvRodrigues2(&mat_m, &rod_m); + cv::Rodrigues(cv::cvarrToMat(&mat_m), cv::cvarrToMat(&rod_m)); } void Rotation::RodToMat9(double *rod, double *mat) @@ -109,7 +109,7 @@ void Rotation::RodToMat9(double *rod, double *mat) CvMat mat_m, rod_m; cvInitMatHeader(&mat_m, 3, 3, CV_64F, mat); cvInitMatHeader(&rod_m, 3, 1, CV_64F, rod); - cvRodrigues2(&rod_m, &mat_m, 0); + cv::Rodrigues(cv::cvarrToMat(&rod_m), cv::cvarrToMat(&mat_m)); } void Rotation::QuatInv(const double *q, double *qi) diff --git a/ar_track_alvar/src/Util.cpp b/ar_track_alvar/src/Util.cpp index 5a7e6f7..dfcd54a 100644 --- a/ar_track_alvar/src/Util.cpp +++ b/ar_track_alvar/src/Util.cpp @@ -114,7 +114,7 @@ double polyLinePointDist(CvPoint *PointList, int nPnts,CvPoint *C, int *index, i //ttesis -void FitCVEllipse(const vector &points, CvBox2D& ellipse_box) +void FitCVEllipse(const vector &points, cv::RotatedRect& ellipse_box) { if(points.size() < 8) return; @@ -123,8 +123,8 @@ void FitCVEllipse(const vector &points, CvBox2D& ellipse_box) { CV_MAT_ELEM(*vector, CvPoint2D64f, 0, i) = (CvPoint2D64f)points[i]; } - ellipse_box = cvFitEllipse2(vector); - cvReleaseMat(&vector); + ellipse_box = cv::fitEllipse(cv::cvarrToMat(&vector)); + //cvReleaseMat(&vector); } int exp_filt2(vector &v, vector &ret, bool clamp) diff --git a/ar_track_alvar/src/kinect_filtering.cpp b/ar_track_alvar/src/kinect_filtering.cpp index ec4773c..c7e3f20 100644 --- a/ar_track_alvar/src/kinect_filtering.cpp +++ b/ar_track_alvar/src/kinect_filtering.cpp @@ -31,11 +31,12 @@ -//#include +//#include "rclcpp/rclcpp.hpp" #include #include -#include -#include +// #include +#include +#include namespace ar_track_alvar @@ -78,14 +79,14 @@ namespace ar_track_alvar ARCloud::Ptr filterCloud (const ARCloud& cloud, const vector >& pixels) { ARCloud::Ptr out(new ARCloud()); - //ROS_INFO(" Filtering %zu pixels", pixels.size()); + //RCLCPP_INFO(rclcpp::get_logger("ArTrackAlvar"), " Filtering %zu pixels", pixels.size()); //for (const cv::Point& p : pixels) for(size_t i=0; ipoints.push_back(pt); @@ -93,9 +94,9 @@ namespace ar_track_alvar return out; } - gm::Point centroid (const ARCloud& points) + gm::msg::Point centroid (const ARCloud& points) { - gm::Point sum; + gm::msg::Point sum; sum.x = 0; sum.y = 0; sum.z = 0; @@ -107,7 +108,7 @@ namespace ar_track_alvar sum.z += points[i].z; } - gm::Point center; + gm::msg::Point center; const size_t n = points.size(); center.x = sum.x/n; center.y = sum.y/n; @@ -115,11 +116,11 @@ namespace ar_track_alvar return center; } - // Helper function to construct a geometry_msgs::Quaternion + // Helper function to construct a geometry_msgs::msg::Quaternion inline - gm::Quaternion makeQuaternion (double x, double y, double z, double w) + gm::msg::Quaternion makeQuaternion (double x, double y, double z, double w) { - gm::Quaternion q; + gm::msg::Quaternion q; q.x = x; q.y = y; q.z = z; @@ -145,14 +146,14 @@ namespace ar_track_alvar } // Project point onto plane - tf::Vector3 project (const ARPoint& p, const double a, const double b, + tf2::Vector3 project (const ARPoint& p, const double a, const double b, const double c, const double d) { const double t = a*p.x + b*p.y + c*p.z + d; - return tf::Vector3(p.x-t*a, p.y-t*b, p.z-t*c); + return tf2::Vector3(p.x-t*a, p.y-t*b, p.z-t*c); } - ostream& operator<< (ostream& str, const tf::Matrix3x3& m) + ostream& operator<< (ostream& str, const tf2::Matrix3x3& m) { str << "[" << m[0][0] << ", " << m[0][1] << ", " << m[0][2] << "; " << m[1][0] << ", " << m[1][1] << ", " << m[1][2] << "; " @@ -160,14 +161,14 @@ namespace ar_track_alvar return str; } - ostream& operator<< (ostream& str, const tf::Quaternion& q) + ostream& operator<< (ostream& str, const tf2::Quaternion& q) { str << "[(" << q.x() << ", " << q.y() << ", " << q.z() << "), " << q.w() << "]"; return str; } - ostream& operator<< (ostream& str, const tf::Vector3& v) + ostream& operator<< (ostream& str, const tf2::Vector3& v) { str << "(" << v.x() << ", " << v.y() << ", " << v.z() << ")"; return str; @@ -176,30 +177,30 @@ namespace ar_track_alvar int extractFrame (const pcl::ModelCoefficients& coeffs, const ARPoint& p1, const ARPoint& p2, const ARPoint& p3, const ARPoint& p4, - tf::Matrix3x3 &retmat) + tf2::Matrix3x3 &retmat) { // Get plane coeffs and project points onto the plane double a=0, b=0, c=0, d=0; if(getCoeffs(coeffs, &a, &b, &c, &d) < 0) return -1; - const tf::Vector3 q1 = project(p1, a, b, c, d); - const tf::Vector3 q2 = project(p2, a, b, c, d); - const tf::Vector3 q3 = project(p3, a, b, c, d); - const tf::Vector3 q4 = project(p4, a, b, c, d); + const tf2::Vector3 q1 = project(p1, a, b, c, d); + const tf2::Vector3 q2 = project(p2, a, b, c, d); + const tf2::Vector3 q3 = project(p3, a, b, c, d); + const tf2::Vector3 q4 = project(p4, a, b, c, d); // Make sure points aren't the same so things are well-defined if((q2-q1).length() < 1e-3) return -1; // (inverse) matrix with the given properties - const tf::Vector3 v = (q2-q1).normalized(); - const tf::Vector3 n(a, b, c); - const tf::Vector3 w = -v.cross(n); - tf::Matrix3x3 m(v[0], v[1], v[2], w[0], w[1], w[2], n[0], n[1], n[2]); + const tf2::Vector3 v = (q2-q1).normalized(); + const tf2::Vector3 n(a, b, c); + const tf2::Vector3 w = -v.cross(n); + tf2::Matrix3x3 m(v[0], v[1], v[2], w[0], w[1], w[2], n[0], n[1], n[2]); // Possibly flip things based on third point - const tf::Vector3 diff = (q4-q3).normalized(); + const tf2::Vector3 diff = (q4-q3).normalized(); //ROS_INFO_STREAM("w = " << w << " and d = " << diff); if (w.dot(diff)<0) { @@ -216,12 +217,12 @@ namespace ar_track_alvar } - int getQuaternion (const tf::Matrix3x3& m, tf::Quaternion &retQ) + int getQuaternion (const tf2::Matrix3x3& m, tf2::Quaternion &retQ) { if(m.determinant() <= 0) return -1; - //tfScalar y=0, p=0, r=0; + //tf2Scalar y=0, p=0, r=0; //m.getEulerZYX(y, p, r); //retQ.setEulerZYX(y, p, r); @@ -235,11 +236,11 @@ namespace ar_track_alvar Eigen::Quaternion eig_quat(eig_m); // Translate back to bullet - tfScalar ex = eig_quat.x(); - tfScalar ey = eig_quat.y(); - tfScalar ez = eig_quat.z(); - tfScalar ew = eig_quat.w(); - tf::Quaternion quat(ex,ey,ez,ew); + tf2Scalar ex = eig_quat.x(); + tf2Scalar ey = eig_quat.y(); + tf2Scalar ez = eig_quat.z(); + tf2Scalar ew = eig_quat.w(); + tf2::Quaternion quat(ex,ey,ez,ew); retQ = quat.normalized(); return 0; @@ -249,12 +250,12 @@ namespace ar_track_alvar int extractOrientation (const pcl::ModelCoefficients& coeffs, const ARPoint& p1, const ARPoint& p2, const ARPoint& p3, const ARPoint& p4, - gm::Quaternion &retQ) + gm::msg::Quaternion &retQ) { - tf::Matrix3x3 m; + tf2::Matrix3x3 m; if(extractFrame(coeffs, p1, p2, p3, p4, m) < 0) return -1; - tf::Quaternion q; + tf2::Quaternion q; if(getQuaternion(m,q) < 0) return -1; retQ.x = q.x(); diff --git a/ar_track_alvar/test/test_ar.py b/ar_track_alvar/test/test_ar.py index 287ba86..9aa3838 100755 --- a/ar_track_alvar/test/test_ar.py +++ b/ar_track_alvar/test/test_ar.py @@ -49,7 +49,8 @@ class TestArAlvarRos(unittest.TestCase): ''' def setUp(self): - rospy.init_node('test_armarker_ros_detect') + rclpy.init() + node = rclpy.create_node('test_armarker_ros_detect') self.tflistener = tf.TransformListener() def tearDown(self): diff --git a/ar_track_alvar/test/test_kinect_filtering.cpp b/ar_track_alvar/test/test_kinect_filtering.cpp index a7bbf4e..1baeb36 100644 --- a/ar_track_alvar/test/test_kinect_filtering.cpp +++ b/ar_track_alvar/test/test_kinect_filtering.cpp @@ -95,7 +95,7 @@ int main (int argc, char** argv) a::ARCloud::ConstPtr cloud = generateCloud(px, py, pz, vx, vy, vz, wx, wy, wz); const size_t n = cloud->size(); - ROS_INFO("Generated cloud with %zu points such as (%.4f, %.4f, %.4f)" + RCLCPP_INFO(rclcpp::get_logger("ArTrackAlvar"), "Generated cloud with %zu points such as (%.4f, %.4f, %.4f)" " and (%.4f, %.4f, %.4f)", n, (*cloud)[0].x, (*cloud)[0].y, (*cloud)[0].z, (*cloud)[n-1].x, (*cloud)[n-1].y, (*cloud)[n-1].z); @@ -105,11 +105,11 @@ int main (int argc, char** argv) a::ARPoint p1 = (*cloud)[i1]; a::ARPoint p2 = (*cloud)[i2]; a::ARPoint p3 = (*cloud)[i3]; - ROS_INFO("Points are (%.4f, %.4f, %.4f) and (%.4f, %.4f, %.4f)", + RCLCPP_INFO(rclcpp::get_logger("ArTrackAlvar"), "Points are (%.4f, %.4f, %.4f) and (%.4f, %.4f, %.4f)", p1.x, p1.y, p1.z, p2.x, p2.y, p2.z); a::PlaneFitResult res = a::fitPlane(cloud); - ROS_INFO("Plane equation is %.3fx + %.3fy + %.3fz + %.3f = 0", + RCLCPP_INFO(rclcpp::get_logger("ArTrackAlvar"), "Plane equation is %.3fx + %.3fy + %.3fz + %.3f = 0", res.coeffs.values[0], res.coeffs.values[1], res.coeffs.values[2], res.coeffs.values[3]); diff --git a/ar_track_alvar/test/test_points.cpp b/ar_track_alvar/test/test_points.cpp index 6e1df52..ed725c9 100644 --- a/ar_track_alvar/test/test_points.cpp +++ b/ar_track_alvar/test/test_points.cpp @@ -78,7 +78,8 @@ a::ARCloud::Ptr generateCloud(const double px, const double py, const double pz, int main (int argc, char** argv) { - ros::init(argc, argv, "test_points"); + rclcpp::init(argc, argv); + auto node = rclcpp::Node::make_shared("test_points"); ifstream f("points"); a::ARCloud::Ptr cloud(new a::ARCloud()); while (!f.eof()) @@ -87,7 +88,7 @@ int main (int argc, char** argv) f >> pt.x >> pt.y >> pt.z; cloud->points.push_back(pt); } - ROS_INFO("Cloud has %zu points such as (%.2f, %.2f, %.2f)", + RCLCPP_INFO(rclcpp::get_logger("ArTrackAlvar"), "Cloud has %zu points such as (%.2f, %.2f, %.2f)", cloud->points.size(), cloud->points[0].x, cloud->points[0].y, cloud->points[0].z); a::ARPoint p1, p2, p3; @@ -102,7 +103,7 @@ int main (int argc, char** argv) p3.z = 88; a::PlaneFitResult res = a::fitPlane(cloud); - ROS_INFO("Plane equation is %.3fx + %.3fy + %.3fz + %.3f = 0", + RCLCPP_INFO(rclcpp::get_logger("ArTrackAlvar"), "Plane equation is %.3fx + %.3fy + %.3fz + %.3f = 0", res.coeffs.values[0], res.coeffs.values[1], res.coeffs.values[2], res.coeffs.values[3]); diff --git a/ar_track_alvar_msgs/CMakeLists.txt b/ar_track_alvar_msgs/CMakeLists.txt index be775f6..50aa8cb 100644 --- a/ar_track_alvar_msgs/CMakeLists.txt +++ b/ar_track_alvar_msgs/CMakeLists.txt @@ -1,18 +1,27 @@ -cmake_minimum_required(VERSION 2.8.3) +cmake_minimum_required(VERSION 3.5) project(ar_track_alvar_msgs) -set(MSG_DEPS - std_msgs - geometry_msgs - ) +if(NOT WIN32) + if(NOT CMAKE_CXX_STANDARD) + set(CMAKE_CXX_STANDARD 14) + endif() + if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") + add_compile_options(-Wall -Wextra -Wpedantic) + endif() +endif() -find_package(catkin REQUIRED COMPONENTS - message_generation - ${MSG_DEPS} -) +find_package(ament_cmake REQUIRED) +find_package(std_msgs REQUIRED) +find_package(geometry_msgs REQUIRED) +find_package(rosidl_default_generators REQUIRED) -set(MSG_FILES AlvarMarker.msg AlvarMarkers.msg) -add_message_files(DIRECTORY msg FILES ${MSG_FILES}) -generate_messages(DEPENDENCIES ${MSG_DEPS}) +rosidl_generate_interfaces( + ${PROJECT_NAME} + msg/AlvarMarkers.msg + msg/AlvarMarker.msg + DEPENDENCIES + geometry_msgs +) -catkin_package(CATKIN_DEPENDS message_runtime ${MSG_DEPS}) +ament_export_dependencies(rosidl_default_runtime) +ament_package() diff --git a/ar_track_alvar_msgs/msg/AlvarMarker.msg b/ar_track_alvar_msgs/msg/AlvarMarker.msg index b1db8c6..e6e3efa 100644 --- a/ar_track_alvar_msgs/msg/AlvarMarker.msg +++ b/ar_track_alvar_msgs/msg/AlvarMarker.msg @@ -1,4 +1,4 @@ -Header header +std_msgs/Header header uint32 id uint32 confidence geometry_msgs/PoseStamped pose diff --git a/ar_track_alvar_msgs/msg/AlvarMarkers.msg b/ar_track_alvar_msgs/msg/AlvarMarkers.msg index d820821..29b9fe6 100644 --- a/ar_track_alvar_msgs/msg/AlvarMarkers.msg +++ b/ar_track_alvar_msgs/msg/AlvarMarkers.msg @@ -1,2 +1,2 @@ -Header header +std_msgs/Header header AlvarMarker[] markers diff --git a/ar_track_alvar_msgs/package.xml b/ar_track_alvar_msgs/package.xml index 22e5810..6d03b07 100644 --- a/ar_track_alvar_msgs/package.xml +++ b/ar_track_alvar_msgs/package.xml @@ -1,5 +1,8 @@ - + + ar_track_alvar_msgs 0.7.1 @@ -10,11 +13,19 @@ BSD http://ros.org/wiki/ar_track_alvar - catkin - message_generation - geometry_msgs - std_msgs - message_runtime - geometry_msgs - std_msgs + ament_cmake + rosidl_default_generators + geometry_msgs + std_msgs + + rosidl_default_runtime + ament_cmake_gtest + ament_lint_auto + ament_lint_common + + rosidl_interface_packages + + + ament_cmake + From ebb0cfa6e625b2b837f2cfd6719b698ab91eb7fc Mon Sep 17 00:00:00 2001 From: Patrick Musau Date: Tue, 20 Jul 2021 12:04:18 -0500 Subject: [PATCH 02/27] don't add logs --- ar_track_alvar/.vscode/c_cpp_properties.json | 17 ++++++++++++ ar_track_alvar/.vscode/settings.json | 8 ++++++ ar_track_alvar/nodes/log/COLCON_IGNORE | 0 ar_track_alvar/nodes/log/latest | 1 - ar_track_alvar/nodes/log/latest_list | 1 - .../list_2021-07-19_11-38-02/logger_all.log | 26 ------------------- 6 files changed, 25 insertions(+), 28 deletions(-) create mode 100644 ar_track_alvar/.vscode/c_cpp_properties.json create mode 100644 ar_track_alvar/.vscode/settings.json delete mode 100644 ar_track_alvar/nodes/log/COLCON_IGNORE delete mode 120000 ar_track_alvar/nodes/log/latest delete mode 120000 ar_track_alvar/nodes/log/latest_list delete mode 100644 ar_track_alvar/nodes/log/list_2021-07-19_11-38-02/logger_all.log diff --git a/ar_track_alvar/.vscode/c_cpp_properties.json b/ar_track_alvar/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..b8106d0 --- /dev/null +++ b/ar_track_alvar/.vscode/c_cpp_properties.json @@ -0,0 +1,17 @@ +{ + "configurations": [ + { + "browse": { + "databaseFilename": "", + "limitSymbolsToIncludedHeaders": true + }, + "includePath": [ + "/opt/ros/foxy/include/**", + "/home/musaup/Documents/Research/OSRF/ros2_porting/ar_track_alvar/ar_track_alvar/include/**", + "/usr/include/**" + ], + "name": "ROS" + } + ], + "version": 4 +} \ No newline at end of file diff --git a/ar_track_alvar/.vscode/settings.json b/ar_track_alvar/.vscode/settings.json new file mode 100644 index 0000000..8f66d01 --- /dev/null +++ b/ar_track_alvar/.vscode/settings.json @@ -0,0 +1,8 @@ +{ + "python.autoComplete.extraPaths": [ + "/opt/ros/foxy/lib/python3.8/site-packages" + ], + "python.analysis.extraPaths": [ + "/opt/ros/foxy/lib/python3.8/site-packages" + ] +} \ No newline at end of file diff --git a/ar_track_alvar/nodes/log/COLCON_IGNORE b/ar_track_alvar/nodes/log/COLCON_IGNORE deleted file mode 100644 index e69de29..0000000 diff --git a/ar_track_alvar/nodes/log/latest b/ar_track_alvar/nodes/log/latest deleted file mode 120000 index 1715667..0000000 --- a/ar_track_alvar/nodes/log/latest +++ /dev/null @@ -1 +0,0 @@ -latest_list \ No newline at end of file diff --git a/ar_track_alvar/nodes/log/latest_list b/ar_track_alvar/nodes/log/latest_list deleted file mode 120000 index 6017113..0000000 --- a/ar_track_alvar/nodes/log/latest_list +++ /dev/null @@ -1 +0,0 @@ -list_2021-07-19_11-38-02 \ No newline at end of file diff --git a/ar_track_alvar/nodes/log/list_2021-07-19_11-38-02/logger_all.log b/ar_track_alvar/nodes/log/list_2021-07-19_11-38-02/logger_all.log deleted file mode 100644 index b7e4e06..0000000 --- a/ar_track_alvar/nodes/log/list_2021-07-19_11-38-02/logger_all.log +++ /dev/null @@ -1,26 +0,0 @@ -[0.403s] DEBUG:colcon:Command line arguments: ['/usr/bin/colcon', 'list', '-p', '--base-paths', '/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes'] -[0.403s] DEBUG:colcon:Parsed command line arguments: Namespace(base_paths=['/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes'], build_base='build', ignore_user_meta=False, log_base=None, log_level=None, main=>, metas=['./colcon.meta'], names_only=False, packages_above=None, packages_above_and_dependencies=None, packages_above_depth=None, packages_end=None, packages_ignore=None, packages_ignore_regex=None, packages_select=None, packages_select_build_failed=False, packages_select_by_dep=None, packages_select_regex=None, packages_select_test_failures=False, packages_skip=None, packages_skip_build_finished=False, packages_skip_by_dep=None, packages_skip_regex=None, packages_skip_test_passed=False, packages_skip_up_to=None, packages_start=None, packages_up_to=None, packages_up_to_regex=None, paths=None, paths_only=True, topological_graph=False, topological_graph_density=False, topological_graph_dot=False, topological_graph_dot_cluster=False, topological_graph_dot_include_skipped=False, topological_graph_legend=False, topological_order=False, verb_extension=, verb_name='list', verb_parser=) -[0.434s] Level 1:colcon.colcon_core.package_discovery:discover_packages(colcon_meta) check parameters -[0.434s] Level 1:colcon.colcon_core.package_discovery:discover_packages(recursive) check parameters -[0.434s] Level 1:colcon.colcon_core.package_discovery:discover_packages(ignore) check parameters -[0.434s] Level 1:colcon.colcon_core.package_discovery:discover_packages(path) check parameters -[0.434s] Level 1:colcon.colcon_core.package_discovery:discover_packages(recursive) discover -[0.434s] INFO:colcon.colcon_core.package_discovery:Crawling recursively for packages in '/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes' -[0.435s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extensions ['ignore', 'ignore_ament_install'] -[0.435s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extension 'ignore' -[0.435s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extension 'ignore_ament_install' -[0.435s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extensions ['colcon_pkg'] -[0.435s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extension 'colcon_pkg' -[0.435s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extensions ['colcon_meta'] -[0.435s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extension 'colcon_meta' -[0.435s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extensions ['ros'] -[0.435s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extension 'ros' -[0.450s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extensions ['cmake', 'python'] -[0.450s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extension 'cmake' -[0.450s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extension 'python' -[0.450s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extensions ['python_setup_py'] -[0.450s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes) by extension 'python_setup_py' -[0.451s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes/log) by extensions ['ignore', 'ignore_ament_install'] -[0.451s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes/log) by extension 'ignore' -[0.451s] Level 1:colcon.colcon_core.package_identification:_identify(/home/musaup/Documents/Research/OSRF/ros2_porting/foxy_test/src/ar_track_alvar/ar_track_alvar/nodes/log) ignored -[0.451s] Level 1:colcon.colcon_core.package_discovery:discover_packages(recursive) using defaults From d3cbb1e596af9d1de3a19c6fca506cc9d717d7e9 Mon Sep 17 00:00:00 2001 From: Patrick Musau Date: Tue, 20 Jul 2021 13:57:09 -0500 Subject: [PATCH 03/27] successful partial build --- ar_track_alvar/src/CvTestbed.cpp | 12 +++++++----- ar_track_alvar/src/Line.cpp | 2 +- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/ar_track_alvar/src/CvTestbed.cpp b/ar_track_alvar/src/CvTestbed.cpp index 93902a8..700a2aa 100644 --- a/ar_track_alvar/src/CvTestbed.cpp +++ b/ar_track_alvar/src/CvTestbed.cpp @@ -135,22 +135,24 @@ size_t CvTestbed::SetImage(const char *title, cv::Mat *ipl, bool release_at_exit // if (images[index].release_at_exit) { // cvReleaseImage(&(images[index])); // } - images[index] = ipl; + images[index].ipl = ipl; // images[index].release_at_exit = release_at_exit; return index; } cv::Mat *CvTestbed::CreateImage(const char *title, cv::Size size, int type) { - cv::Mat *ipl=cv::Mat::zeros((size,type); + cv::Mat temp = cv::Mat::zeros(size,type); + cv::Mat *ipl = &temp; if (!ipl) return NULL; SetImage(title, ipl, true); return ipl; } cv::Mat *CvTestbed::CreateImageWithProto(const char *title, cv::Mat *proto, int depth /* =0 */, int channels /* =0 */) { - cv::Mat *ipl= cv::Mat::zeros(cv::Size(proto->cols, proto->rows), proto.type); + cv::Mat temp = cv::Mat::zeros(cv::Size(proto->cols, proto->rows), proto->type()); + cv::Mat *ipl= &temp; if (!ipl) return NULL; - ipl->origin = proto->origin; + //ipl->origin = proto->origin; SetImage(title, ipl, true); return ipl; } @@ -158,7 +160,7 @@ cv::Mat *CvTestbed::CreateImageWithProto(const char *title, cv::Mat *proto, int cv::Mat *CvTestbed::GetImage(size_t index) { if (index < 0) return NULL; if (index >= images.size()) return NULL; - return images[index]; + return images[index].ipl; } size_t CvTestbed::GetImageIndex(const char *title) { diff --git a/ar_track_alvar/src/Line.cpp b/ar_track_alvar/src/Line.cpp index 5275468..e5aa15e 100644 --- a/ar_track_alvar/src/Line.cpp +++ b/ar_track_alvar/src/Line.cpp @@ -29,7 +29,7 @@ using namespace std; namespace alvar { using namespace std; -Line::Line(float params[4]) +Line::Line(cv::Vec4f params) { c.x = params[2]; c.y = params[3]; From d19cad6e1b621fda8bf75c81883933c1b77baeaa Mon Sep 17 00:00:00 2001 From: Patrick Musau Date: Wed, 21 Jul 2021 09:52:44 -0500 Subject: [PATCH 04/27] fixing marker detect --- ar_track_alvar/CMakeLists.txt | 8 +- .../include/ar_track_alvar/Camera.h | 2 + .../ar_track_alvar/ConnectedComponents.h | 10 +- .../include/ar_track_alvar/Marker.h | 26 +-- .../include/ar_track_alvar/MarkerDetector.h | 7 +- .../include/ar_track_alvar/MultiMarker.h | 18 +- ar_track_alvar/include/ar_track_alvar/Util.h | 4 +- ar_track_alvar/nodes/.vscode/settings.json | 16 +- ar_track_alvar/src/ConnectedComponents.cpp | 147 +++++++++------- ar_track_alvar/src/Marker.cpp | 163 ++++++++++-------- ar_track_alvar/src/MarkerDetector.cpp | 10 +- ar_track_alvar/src/MultiMarker.cpp | 34 ++-- ar_track_alvar/src/MultiMarkerBundle.cpp | 4 +- ar_track_alvar/src/MultiMarkerInitializer.cpp | 6 +- ar_track_alvar/src/Util.cpp | 2 +- 15 files changed, 262 insertions(+), 195 deletions(-) diff --git a/ar_track_alvar/CMakeLists.txt b/ar_track_alvar/CMakeLists.txt index 5af7012..71c39e7 100644 --- a/ar_track_alvar/CMakeLists.txt +++ b/ar_track_alvar/CMakeLists.txt @@ -78,8 +78,8 @@ add_library(ar_track_alvar src/Camera.cpp src/CaptureDevice.cpp src/Pose.cpp - #src/Marker.cpp - #src/MarkerDetector.cpp + src/Marker.cpp + src/MarkerDetector.cpp src/Bitset.cpp src/Rotation.cpp src/CvTestbed.cpp @@ -105,8 +105,8 @@ add_library(ar_track_alvar src/Optimization.cpp # src/MultiMarker.cpp # src/MultiMarkerBundle.cpp - # src/MultiMarkerInitializer.cpp) -) + # src/MultiMarkerInitializer.cpp + ) target_link_libraries(${PROJECT_NAME} ${OpenCV_LIBRARIES} ${TinyXML_LIBRARIES}) ament_target_dependencies(ar_track_alvar ${dependencies}) diff --git a/ar_track_alvar/include/ar_track_alvar/Camera.h b/ar_track_alvar/include/ar_track_alvar/Camera.h index 16d818a..64f6e93 100644 --- a/ar_track_alvar/include/ar_track_alvar/Camera.h +++ b/ar_track_alvar/include/ar_track_alvar/Camera.h @@ -46,6 +46,8 @@ #include "opencv2/core/types_c.h" #include #include +#include +#include "opencv2/imgproc/types_c.h" namespace alvar { diff --git a/ar_track_alvar/include/ar_track_alvar/ConnectedComponents.h b/ar_track_alvar/include/ar_track_alvar/ConnectedComponents.h index 8228e86..8b0e579 100644 --- a/ar_track_alvar/include/ar_track_alvar/ConnectedComponents.h +++ b/ar_track_alvar/include/ar_track_alvar/ConnectedComponents.h @@ -61,11 +61,11 @@ public : /** * \brief Pointer to grayscale image that is thresholded for labeling. */ - IplImage *gray; + cv::Mat *gray; /** * \brief Pointer to binary image that is then labeled. */ - IplImage *bw; + cv::Mat *bw; /** * \brief Vector of 4-length vectors where the corners of detected blobs are stored. @@ -95,7 +95,7 @@ public : /** * \brief Labels image and filters blobs to obtain square-shaped objects from the scene. */ - virtual void LabelSquares(IplImage* image, bool visualize=false) = 0; + virtual void LabelSquares(cv::Mat* image, bool visualize=false) = 0; bool CheckBorder(CvSeq* contour, int width, int height); @@ -128,10 +128,10 @@ protected : void SetOptions(bool _detect_pose_grayscale=false); - void LabelSquares(IplImage* image, bool visualize=false); + void LabelSquares(cv::Mat* image, bool visualize=false); // TODO: Releases memory inside, cannot return CvSeq* - CvSeq* LabelImage(IplImage* image, int min_size, bool approx=false); + CvSeq* LabelImage(cv::Mat* image, int min_size, bool approx=false); }; } // namespace alvar diff --git a/ar_track_alvar/include/ar_track_alvar/Marker.h b/ar_track_alvar/include/ar_track_alvar/Marker.h index 0313833..8240b36 100644 --- a/ar_track_alvar/include/ar_track_alvar/Marker.h +++ b/ar_track_alvar/include/ar_track_alvar/Marker.h @@ -52,10 +52,10 @@ namespace alvar { class ALVAR_EXPORT Marker { protected: - void VisualizeMarkerPose(IplImage *image, Camera *cam, double visualize2d_points[12][2], CvScalar color=CV_RGB(255,0,0)) const; - virtual void VisualizeMarkerContent(IplImage *image, Camera *cam, double datatext_point[2], double content_point[2]) const; - virtual void VisualizeMarkerError(IplImage *image, Camera *cam, double errortext_point[2]) const; - bool UpdateContentBasic(std::vector > &_marker_corners_img, IplImage *gray, Camera *cam, int frame_no = 0); + void VisualizeMarkerPose(cv::Mat *image, Camera *cam, double visualize2d_points[12][2], cv::Scalar color=cv::Scalar(255,0,0)) const; + virtual void VisualizeMarkerContent(cv::Mat *image, Camera *cam, double datatext_point[2], double content_point[2]) const; + virtual void VisualizeMarkerError(cv::Mat *image, Camera *cam, double errortext_point[2]) const; + bool UpdateContentBasic(std::vector > &_marker_corners_img, cv::Mat *gray, Camera *cam, int frame_no = 0); public: EIGEN_MAKE_ALIGNED_OPERATOR_NEW @@ -67,16 +67,16 @@ namespace alvar { * Returns the marker orientation and an error value describing the pixel error * relative to the marker diameter. */ - void CompareCorners(std::vector > &_marker_corners_img, int *orientation, double *error); + void CompareCorners(std::vector &_marker_corners_img, int *orientation, double *error); /** \brief Compares the marker corners with the previous match. */ - void CompareContent(std::vector > &_marker_corners_img, IplImage *gray, Camera *cam, int *orientation) const; + void CompareContent(std::vector &_marker_corners_img, cv::Mat *gray, Camera *cam, int *orientation) const; /** \brief Updates the \e marker_content from the image using \e Homography */ - virtual bool UpdateContent(std::vector > &_marker_corners_img, IplImage *gray, Camera *cam, int frame_no = 0); + virtual bool UpdateContent(std::vector &_marker_corners_img, cv::Mat *gray, Camera *cam, int frame_no = 0); /** \brief Updates the markers \e pose estimation */ - void UpdatePose(std::vector > &_marker_corners_img, Camera *cam, int orientation, int frame_no = 0, bool update_pose = true); + void UpdatePose(std::vector &_marker_corners_img, Camera *cam, int orientation, int frame_no = 0, bool update_pose = true); /** \brief Decodes the marker content. Please call \e UpdateContent before this. * This virtual method is meant to be implemented by heirs. */ @@ -92,10 +92,10 @@ namespace alvar { void SaveMarkerImage(const char *filename, int save_res = 0) const; /** \brief Draw the marker filling the ROI in the given image */ - void ScaleMarkerToImage(IplImage *image) const; + void ScaleMarkerToImage(cv::Mat *image) const; /** \brief Visualize the marker */ - void Visualize(IplImage *image, Camera *cam, CvScalar color=CV_RGB(255,0,0)) const; + void Visualize(cv::Mat *image, Camera *cam, cv::Scalar color=cv::Scalar(255,0,0)) const; /** \brief Method for resizing the marker dimensions */ void SetMarkerSize(double _edge_length = 0, int _res = 0, double _margin = 0); /** \brief Get edge length (to support different size markers */ @@ -221,13 +221,13 @@ namespace alvar { class ALVAR_EXPORT MarkerData : public Marker { protected: - virtual void VisualizeMarkerContent(IplImage *image, Camera *cam, double datatext_point[2], double content_point[2]) const; + virtual void VisualizeMarkerContent(cv::Mat *image, Camera *cam, double datatext_point[2], double content_point[2]) const; void DecodeOrientation(int *error, int *total, int *orientation); int DecodeCode(int orientation, BitsetExt *bs, int *erroneous, int *total, unsigned char* content_type); void Read6bitStr(BitsetExt *bs, char *s, size_t s_max_len); void Add6bitStr(BitsetExt *bs, char *s); int UsableDataBits(int marker_res, int hamming); - bool DetectResolution(std::vector > &_marker_corners_img, IplImage *gray, Camera *cam); + bool DetectResolution(std::vector > &_marker_corners_img, cv::Mat *gray, Camera *cam); public: EIGEN_MAKE_ALIGNED_OPERATOR_NEW @@ -263,7 +263,7 @@ namespace alvar { * Compared to the basic implementation in \e Marker this will also detect the marker * resolution automatically when the marker resolution is specified to be 0. */ - virtual bool UpdateContent(std::vector > &_marker_corners_img, IplImage *gray, Camera *cam, int frame_no = 0); + virtual bool UpdateContent(std::vector > &_marker_corners_img, cv::Mat *gray, Camera *cam, int frame_no = 0); /** \brief \e DecodeContent should be called after \e UpdateContent to fill \e content_type, \e decode_error and \e data */ bool DecodeContent(int *orientation); diff --git a/ar_track_alvar/include/ar_track_alvar/MarkerDetector.h b/ar_track_alvar/include/ar_track_alvar/MarkerDetector.h index c00a2ad..724f9e6 100644 --- a/ar_track_alvar/include/ar_track_alvar/MarkerDetector.h +++ b/ar_track_alvar/include/ar_track_alvar/MarkerDetector.h @@ -39,13 +39,14 @@ #include "Rotation.h" #include "Line.h" #include -using std::rotate; #include #include #include #include #include +using std::rotate; + namespace alvar { /** @@ -119,7 +120,7 @@ class ALVAR_EXPORT MarkerDetectorImpl { * - The marker points are read from inside the margins starting from top-left * and reading the bits first left-to-right one line at a time. */ - int Detect(IplImage *image, + int Detect(cv::Mat *image, Camera *cam, bool track=false, bool visualize=false, @@ -128,7 +129,7 @@ class ALVAR_EXPORT MarkerDetectorImpl { LabelingMethod labeling_method=CVSEQ, bool update_pose=true); - int DetectAdditional(IplImage *image, Camera *cam, bool visualize=false, double max_track_error=0.2); + int DetectAdditional(cv::Mat *image, Camera *cam, bool visualize=false, double max_track_error=0.2); }; /** diff --git a/ar_track_alvar/include/ar_track_alvar/MultiMarker.h b/ar_track_alvar/include/ar_track_alvar/MultiMarker.h index ade1d2d..c55848f 100644 --- a/ar_track_alvar/include/ar_track_alvar/MultiMarker.h +++ b/ar_track_alvar/include/ar_track_alvar/MultiMarker.h @@ -36,7 +36,7 @@ #include "Camera.h" #include "Filter.h" #include "FileFormat.h" -#include +#include #include namespace alvar { @@ -60,17 +60,17 @@ class ALVAR_EXPORT MultiMarker { // One idea is that the same 'pointcloud' could contain feature // points after marker-corner-points. This way they would be // optimized simultaneously with marker corners... - std::map pointcloud; + std::map pointcloud; std::vector marker_indices; // The marker id's to be used in marker field (first being the base) std::vector marker_status; // 0: not in point cloud, 1: in point cloud, 2: used in GetPose() - std::vector< std::vector > rel_corners; //The coords of the master marker relative to each child marker in marker_indices + std::vector< std::vector > rel_corners; //The coords of the master marker relative to each child marker in marker_indices int pointcloud_index(int marker_id, int marker_corner, bool add_if_missing=false); int get_id_index(int id, bool add_if_missing=false); - double _GetPose(MarkerIterator &begin, MarkerIterator &end, Camera* cam, Pose& pose, IplImage* image); + double _GetPose(MarkerIterator &begin, MarkerIterator &end, Camera* cam, Pose& pose, cv::Mat* image); - int _SetTrackMarkers(MarkerDetectorImpl &marker_detector, Camera* cam, Pose& pose, IplImage *image); + int _SetTrackMarkers(MarkerDetectorImpl &marker_detector, Camera* cam, Pose& pose, cv::Mat *image); int master_id; //The id of the first marker specified in the XML file @@ -106,7 +106,7 @@ class ALVAR_EXPORT MultiMarker { \param image If != 0 some visualizations are drawn. */ template - double GetPose(const std::vector >* markers, Camera* cam, Pose& pose, IplImage* image = 0) + double GetPose(const std::vector >* markers, Camera* cam, Pose& pose, cv::Mat* image = 0) { MarkerIteratorImpl begin(markers->begin()); MarkerIteratorImpl end(markers->end()); @@ -117,7 +117,7 @@ class ALVAR_EXPORT MultiMarker { /** \brief Calls GetPose to obtain camera pose. */ template - double Update(const std::vector >* markers, Camera* cam, Pose& pose, IplImage* image = 0) + double Update(const std::vector >* markers, Camera* cam, Pose& pose, cv::Mat* image = 0) { if(markers->size() < 1) return -1.0; @@ -134,7 +134,7 @@ class ALVAR_EXPORT MultiMarker { \param pose Current pose of the camera. \param corners Resulted 3D corner points are stored here. */ - void PointCloudCorners3d(double edge_length, Pose &pose, CvPoint3D64f corners[4]); + void PointCloudCorners3d(double edge_length, Pose &pose, cv::Point3f corners[4]); /** \brief Adds marker corners to 3D point cloud of multi marker. \param marker_id Id of the marker to be added. @@ -188,7 +188,7 @@ class ALVAR_EXPORT MultiMarker { * also these markers. */ template - int SetTrackMarkers(MarkerDetector &marker_detector, Camera* cam, Pose& pose, IplImage *image=0) { + int SetTrackMarkers(MarkerDetector &marker_detector, Camera* cam, Pose& pose, cv::Mat *image=0) { return _SetTrackMarkers(marker_detector, cam, pose, image); } }; diff --git a/ar_track_alvar/include/ar_track_alvar/Util.h b/ar_track_alvar/include/ar_track_alvar/Util.h index 4bca830..9dcb0fc 100644 --- a/ar_track_alvar/include/ar_track_alvar/Util.h +++ b/ar_track_alvar/include/ar_track_alvar/Util.h @@ -103,12 +103,12 @@ struct ALVAR_EXPORT Point : public C /** * \brief The default integer point type. */ -typedef ALVAR_EXPORT Point PointInt; +typedef ALVAR_EXPORT Point PointInt; /** * \brief The default double point type. */ -typedef ALVAR_EXPORT Point PointDouble; +typedef ALVAR_EXPORT Point PointDouble; /** \brief Returns the squared distance of two points. * \param p1 First point. diff --git a/ar_track_alvar/nodes/.vscode/settings.json b/ar_track_alvar/nodes/.vscode/settings.json index 6b40311..c62e128 100644 --- a/ar_track_alvar/nodes/.vscode/settings.json +++ b/ar_track_alvar/nodes/.vscode/settings.json @@ -1,5 +1,19 @@ { "python.autoComplete.extraPaths": [ "/opt/ros/foxy/lib/python3.8/site-packages" - ] + ], + "files.associations": { + "complex": "cpp", + "*.ipp": "cpp", + "array": "cpp", + "deque": "cpp", + "list": "cpp", + "string": "cpp", + "unordered_map": "cpp", + "vector": "cpp", + "string_view": "cpp", + "initializer_list": "cpp", + "bitset": "cpp", + "utility": "cpp" + } } \ No newline at end of file diff --git a/ar_track_alvar/src/ConnectedComponents.cpp b/ar_track_alvar/src/ConnectedComponents.cpp index 1ceaeac..4d0585f 100644 --- a/ar_track_alvar/src/ConnectedComponents.cpp +++ b/ar_track_alvar/src/ConnectedComponents.cpp @@ -41,10 +41,6 @@ Labeling::Labeling() Labeling::~Labeling() { - if(gray) - cvReleaseImage(&gray); - if(bw) - cvReleaseImage(&bw); } bool Labeling::CheckBorder(CvSeq* contour, int width, int height) @@ -52,7 +48,7 @@ bool Labeling::CheckBorder(CvSeq* contour, int width, int height) bool ret = true; for(int i = 0; i < contour->total; ++i) { - CvPoint* pt = (CvPoint*)cvGetSeqElem(contour, i); + CvPoint* pt = (CvPoint*)cv::getSeqElem(contour, i); if((pt->x <= 1) || (pt->x >= width-2) || (pt->y <= 1) || (pt->y >= height-2)) ret = false; } return ret; @@ -74,39 +70,55 @@ void LabelingCvSeq::SetOptions(bool _detect_pose_grayscale) { detect_pose_grayscale = _detect_pose_grayscale; } -void LabelingCvSeq::LabelSquares(IplImage* image, bool visualize) +void LabelingCvSeq::LabelSquares(cv::Mat* image, bool visualize) { - if (gray && ((gray->width != image->width) || (gray->height != image->height))) { - cvReleaseImage(&gray); gray=NULL; - if (bw) cvReleaseImage(&bw); bw=NULL; + if (gray && ((gray->rows != image->rows) || (gray->cols != image->cols))) { + // cvReleaseImage(&gray); + gray=NULL; + // if (bw) cvReleaseImage(&bw); + bw=NULL; } if (gray == NULL) { - gray = cvCreateImage(cvSize(image->width, image->height), IPL_DEPTH_8U, 1); - gray->origin = image->origin; - bw = cvCreateImage(cvSize(image->width, image->height), IPL_DEPTH_8U, 1); - bw->origin = image->origin; + + cv::Mat temp = cv::Mat(cv::Size(image->rows, image->cols),CV_8UC1); + + gray = &temp; + + cv::Mat temp2 = cv::Mat(cv::Size(image->rows, image->cols),CV_8UC1); + //gray->origin = image->origin; + bw = &temp2; + //bw->origin = image->origin; } // Convert grayscale and threshold - if(image->nChannels == 4) - cvCvtColor(image, gray, CV_RGBA2GRAY); - else if(image->nChannels == 3) - cvCvtColor(image, gray, CV_RGB2GRAY); - else if(image->nChannels == 1) - cvCopy(image, gray); - else { + if(image->channels() == 4) + { + cv::cvtColor(*image, *gray, cv::COLOR_RGB2GRAY); + } + else if(image->channels() == 3) + { + cv::cvtColor(*image, *gray, cv::COLOR_RGB2GRAY); + } + else if(image->channels() == 1) + { + cv::Mat tmp = image->clone(); + gray = &tmp; + //cvCopy(image, gray); + } + else + { cerr<<"Unsupported image format"<total == 4 && CheckBorder(result, image->width, image->height) && + if( result->total == 4 && CheckBorder(result, image->rows, image->cols) && fabs(cvContourArea(result,CV_WHOLE_SEQ)) > _min_area && // TODO check limits cvCheckContourConvexity(result) ) // ttehop: Changed to 'contours' instead of 'result' { @@ -138,16 +150,16 @@ void LabelingCvSeq::LabelSquares(IplImage* image, bool visualize) { vector fitted_lines(4); blob_corners[i].resize(4); - CvSeq* sq = (CvSeq*)cvGetSeqElem(squares, i); - CvSeq* square_contour = (CvSeq*)cvGetSeqElem(square_contours, i); + CvSeq* sq = (CvSeq*)cv::getSeqElem(squares, i); + CvSeq* square_contour = (CvSeq*)cv::getSeqElem(square_contours, i); for(int j = 0; j < 4; ++j) { - CvPoint* pt0 = (CvPoint*)cvGetSeqElem(sq, j); - CvPoint* pt1 = (CvPoint*)cvGetSeqElem(sq, (j+1)%4); + CvPoint* pt0 = (CvPoint*)cv::getSeqElem(sq, j); + CvPoint* pt1 = (CvPoint*)cv::getSeqElem(sq, (j+1)%4); int k0=-1, k1=-1; for (int k = 0; ktotal; k++) { - CvPoint* pt2 = (CvPoint*)cvGetSeqElem(square_contour, k); + CvPoint* pt2 = (CvPoint*)cv::getSeqElem(square_contour, k); if ((pt0->x == pt2->x) && (pt0->y == pt2->y)) k0=k; if ((pt1->x == pt2->x) && (pt1->y == pt2->y)) k1=k; } @@ -159,7 +171,7 @@ void LabelingCvSeq::LabelSquares(IplImage* image, bool visualize) CvMat* line_data = cvCreateMat(1, len, CV_32FC2); for (int l=0; ltotal; - CvPoint* p = (CvPoint*)cvGetSeqElem(square_contour, ll); + CvPoint* p = (CvPoint*)cv::getSeqElem(square_contour, ll); CvPoint2D32f pp; pp.x = float(p->x); pp.y = float(p->y); @@ -225,10 +237,10 @@ void LabelingCvSeq::LabelSquares(IplImage* image, bool visualize) if (visualize) { for(size_t j = 0; j < 4; ++j) { PointDouble &intc = blob_corners[i][j]; - if (j == 0) cvCircle(image, cvPoint(int(intc.x), int(intc.y)), 5, CV_RGB(255, 255, 255)); - if (j == 1) cvCircle(image, cvPoint(int(intc.x), int(intc.y)), 5, CV_RGB(255, 0, 0)); - if (j == 2) cvCircle(image, cvPoint(int(intc.x), int(intc.y)), 5, CV_RGB(0, 255, 0)); - if (j == 3) cvCircle(image, cvPoint(int(intc.x), int(intc.y)), 5, CV_RGB(0, 0, 255)); + if (j == 0) cv::circle(image, cvPoint(int(intc.x), int(intc.y)), 5, CV_RGB(255, 255, 255)); + if (j == 1) cv::circle(image, cvPoint(int(intc.x), int(intc.y)), 5, CV_RGB(255, 0, 0)); + if (j == 2) cv::circle(image, cvPoint(int(intc.x), int(intc.y)), 5, CV_RGB(0, 255, 0)); + if (j == 3) cv::circle(image, cvPoint(int(intc.x), int(intc.y)), 5, CV_RGB(0, 0, 255)); } } } @@ -236,38 +248,53 @@ void LabelingCvSeq::LabelSquares(IplImage* image, bool visualize) cvClearMemStorage(storage); } -CvSeq* LabelingCvSeq::LabelImage(IplImage* image, int min_size, bool approx) +CvSeq* LabelingCvSeq::LabelImage(cv::Mat* image, int min_size, bool approx) { - assert(image->origin == 0); // Currently only top-left origin supported - if (gray && ((gray->width != image->width) || (gray->height != image->height))) { - cvReleaseImage(&gray); gray=NULL; - if (bw) cvReleaseImage(&bw); bw=NULL; + // assert(image->origin == 0); // Currently only top-left origin supported + if (gray && ((gray->rows != image->rows) || (gray->cols != image->cols))) { + // cvReleaseImage(&gray); + gray=NULL; + // if (bw) cvReleaseImage(&bw); + bw=NULL; } if (gray == NULL) { - gray = cvCreateImage(cvSize(image->width, image->height), IPL_DEPTH_8U, 1); - gray->origin = image->origin; - bw = cvCreateImage(cvSize(image->width, image->height), IPL_DEPTH_8U, 1); - bw->origin = image->origin; + + cv::Mat temp = cv::Mat(cv::Size(image->rows, image->cols),CV_8UC1); + + gray = &temp; + + cv::Mat temp2 = cv::Mat(cv::Size(image->rows, image->cols),CV_8UC1); + //gray->origin = image->origin; + bw = &temp2; } // Convert grayscale and threshold - if(image->nChannels == 4) - cvCvtColor(image, gray, CV_RGBA2GRAY); - else if(image->nChannels == 3) - cvCvtColor(image, gray, CV_RGB2GRAY); - else if(image->nChannels == 1) - cvCopy(image, gray); - else { + if(image->channels() == 4) + { + cv::cvtColor(*image, *gray, cv::COLOR_RGB2GRAY); + } + else if(image->channels() == 3) + { + cv::cvtColor(*image, *gray, cv::COLOR_RGB2GRAY); + } + else if(image->channels() == 1) + { + // cv::Copy(image, gray); + cv::Mat tmp = image->clone(); + gray = &tmp; + } + else + { cerr<<"Unsupported image format"<width, gray->height), IPL_DEPTH_8U, 3); - IplImage *tmp2 = cvCreateImage(cvSize(gray->width*5, gray->height*5), IPL_DEPTH_8U, 3); - cvCvtColor(gray, tmp, CV_GRAY2RGB); + cv::Mat *tmp = cvCreateImage(cvSize(gray->rows, gray->cols), IPL_DEPTH_8U, 3); + cv::Mat *tmp2 = cvCreateImage(cvSize(gray->rows*5, gray->cols*5), IPL_DEPTH_8U, 3); + cvCvtColor(gray, tmp, cv::COLOR_RGB2GRAY); cvResize(tmp, tmp2, CV_INTER_NN); #endif @@ -376,8 +403,8 @@ void FitLineGray(CvMat *line_data, float params[4], IplImage *gray) { unsigned char c1 = (unsigned char)gray->imageData[int((p->y+yy[i])*gray->widthStep+(p->x+xx[i]))]; unsigned char c2 = (unsigned char)gray->imageData[int((p->y+yy[i+1])*gray->widthStep+(p->x+xx[i+1]))]; #ifdef SHOW_DEBUG - cvCircle(tmp2, cvPoint((p->x+xx[i])*5+2,(p->y+yy[i])*5+2), 0, CV_RGB(0,0,255)); - cvCircle(tmp2, cvPoint((p->x+xx[i+1])*5+2,(p->y+yy[i+1])*5+2), 0, CV_RGB(0,0,255)); + cv::circle(tmp2, cvPoint((p->x+xx[i])*5+2,(p->y+yy[i])*5+2), 0, CV_RGB(0,0,255)); + cv::circle(tmp2, cvPoint((p->x+xx[i+1])*5+2,(p->y+yy[i+1])*5+2), 0, CV_RGB(0,0,255)); #endif double w = absdiff(c1, c2); dx += dxx[i]*w; @@ -390,7 +417,7 @@ void FitLineGray(CvMat *line_data, float params[4], IplImage *gray) { #ifdef SHOW_DEBUG cvLine(tmp2, cvPoint(p->x*5+2,p->y*5+2), cvPoint((p->x+dx)*5+2, (p->y+dy)*5+2), CV_RGB(0,255,0)); p->x += float(dx); p->y += float(dy); - cvCircle(tmp2, cvPoint(p->x*5+2,p->y*5+2), 0, CV_RGB(255,0,0)); + cv::circle(tmp2, cvPoint(p->x*5+2,p->y*5+2), 0, CV_RGB(255,0,0)); #else p->x += float(dx); p->y += float(dy); #endif diff --git a/ar_track_alvar/src/Marker.cpp b/ar_track_alvar/src/Marker.cpp index 1bf6f38..31e72e5 100644 --- a/ar_track_alvar/src/Marker.cpp +++ b/ar_track_alvar/src/Marker.cpp @@ -25,6 +25,8 @@ #include "ar_track_alvar/Marker.h" #include #include "opencv2/core/types_c.h" +#include +#include template class ALVAR_EXPORT alvar::MarkerIteratorImpl; template class ALVAR_EXPORT alvar::MarkerIteratorImpl; @@ -37,37 +39,37 @@ using namespace std; #define HEADER_SIZE 8 -void Marker::VisualizeMarkerPose(IplImage *image, Camera *cam, double visualize2d_points[12][2], CvScalar color) const { +void Marker::VisualizeMarkerPose(cv::Mat *image, Camera *cam, double visualize2d_points[12][2], cv::Scalar color) const { // Cube for (int i=0; i<4; i++) { - cvLine(image, cvPoint((int)visualize2d_points[i][0], (int)visualize2d_points[i][1]), cvPoint((int)visualize2d_points[(i+1)%4][0], (int)visualize2d_points[(i+1)%4][1]), color); - cvLine(image, cvPoint((int)visualize2d_points[i][0], (int)visualize2d_points[i][1]), cvPoint((int)visualize2d_points[4+i][0], (int)visualize2d_points[4+i][1]), color); - cvLine(image, cvPoint((int)visualize2d_points[4+i][0], (int)visualize2d_points[4+i][1]), cvPoint((int)visualize2d_points[4+((i+1)%4)][0], (int)visualize2d_points[4+((i+1)%4)][1]), color); + cv::line(*image, cvPoint((int)visualize2d_points[i][0], (int)visualize2d_points[i][1]), cvPoint((int)visualize2d_points[(i+1)%4][0], (int)visualize2d_points[(i+1)%4][1]), color); + cv::line(*image, cvPoint((int)visualize2d_points[i][0], (int)visualize2d_points[i][1]), cvPoint((int)visualize2d_points[4+i][0], (int)visualize2d_points[4+i][1]), color); + cv::line(*image, cvPoint((int)visualize2d_points[4+i][0], (int)visualize2d_points[4+i][1]), cvPoint((int)visualize2d_points[4+((i+1)%4)][0], (int)visualize2d_points[4+((i+1)%4)][1]), color); } // Coordinates - cvLine(image, cvPoint((int)visualize2d_points[8][0], (int)visualize2d_points[8][1]), cvPoint((int)visualize2d_points[9][0], (int)visualize2d_points[9][1]), CV_RGB(255,0,0)); - cvLine(image, cvPoint((int)visualize2d_points[8][0], (int)visualize2d_points[8][1]), cvPoint((int)visualize2d_points[10][0], (int)visualize2d_points[10][1]), CV_RGB(0,255,0)); - cvLine(image, cvPoint((int)visualize2d_points[8][0], (int)visualize2d_points[8][1]), cvPoint((int)visualize2d_points[11][0], (int)visualize2d_points[11][1]), CV_RGB(0,0,255)); + cv::line(*image, cvPoint((int)visualize2d_points[8][0], (int)visualize2d_points[8][1]), cvPoint((int)visualize2d_points[9][0], (int)visualize2d_points[9][1]), cv::Scalar(255,0,0)); + cv::line(*image, cvPoint((int)visualize2d_points[8][0], (int)visualize2d_points[8][1]), cvPoint((int)visualize2d_points[10][0], (int)visualize2d_points[10][1]), cv::Scalar(0,255,0)); + cv::line(*image, cvPoint((int)visualize2d_points[8][0], (int)visualize2d_points[8][1]), cvPoint((int)visualize2d_points[11][0], (int)visualize2d_points[11][1]), cv::Scalar(0,0,255)); } -void Marker::VisualizeMarkerContent(IplImage *image, Camera *cam, double datatext_point[2], double content_point[2]) const { +void Marker::VisualizeMarkerContent(cv::Mat *image, Camera *cam, double datatext_point[2], double content_point[2]) const { #ifdef VISUALIZE_MARKER_POINTS for (size_t i=0; i= 0) && (x < image->width) && - (y >= 0) && (y < image->height)) + if ((x >= 0) && (x < image->rows) && + (y >= 0) && (y < image->cols)) { if (cvGet2D(marker_content, j/3, i/3).val[0]) { - cvSet2D(image, y, x, CV_RGB(255,255,255)); + cvSet2D(image, y, x, cvScalar(255, 255, 255, 0)); } else { - cvSet2D(image, y, x, CV_RGB(0,0,0)); + cvSet2D(image, y, x, cvScalar(0, 0, 0, 0)); } } } } } -void Marker::VisualizeMarkerError(IplImage *image, Camera *cam, double errortext_point[2]) const { - CvFont font; - cvInitFont(&font, 0, 0.5, 0.5, 0); +void Marker::VisualizeMarkerError(cv::Mat *image, Camera *cam, double errortext_point[2]) const { + // CvFont font; + // cvInitFont(cv::FONT_HERSHEY_SIMPLEX, 0, 0.5, 0.5, 0); std::stringstream val; if (GetError(MARGIN_ERROR|DECODE_ERROR) > 0) { val.str(""); val< 0.01) { val.str(""); val< &_marker_corners_img, int *orientation, double *error) { - vector::iterator corners_new = _marker_corners_img.begin(); - vector::const_iterator corners_old = marker_corners_img.begin(); +void Marker::CompareCorners(std::vector &_marker_corners_img, int *orientation, double *error) { + vector::iterator corners_new = _marker_corners_img.begin(); + vector::const_iterator corners_old = marker_corners_img.begin(); vector errors(4); for (int i=0; i<4; i++) { errors[0] += PointSquaredDistance(marker_corners_img[i], _marker_corners_img[i]); @@ -176,17 +178,17 @@ void Marker::CompareCorners(vector &_marker_corners_img, int *orie *error /= sqrt(max(PointSquaredDistance(marker_corners_img[0], marker_corners_img[2]), PointSquaredDistance(marker_corners_img[1], marker_corners_img[3]))); } -void Marker::CompareContent(vector &_marker_corners_img, IplImage *gray, Camera *cam, int *orientation) const { +void Marker::CompareContent(std::vector &_marker_corners_img, cv::Mat *gray, Camera *cam, int *orientation) const { // TODO: Note, that to use this method you need to straighten the content // TODO: This method can be used with image based trackingt } -bool Marker::UpdateContent(vector &_marker_corners_img, IplImage *gray, Camera *cam, int frame_no /*= 0*/) { +bool Marker::UpdateContent(std::vector &_marker_corners_img, cv::Mat *gray, Camera *cam, int frame_no /*= 0*/) { return UpdateContentBasic(_marker_corners_img, gray, cam, frame_no); } -bool Marker::UpdateContentBasic(vector &_marker_corners_img, IplImage *gray, Camera *cam, int frame_no /*= 0*/) { +bool Marker::UpdateContentBasic(std::vector &_marker_corners_img, cv::Mat *gray, Camera *cam, int frame_no /*= 0*/) { vector marker_corners_img_undist; marker_corners_img_undist.resize(_marker_corners_img.size()); copy(_marker_corners_img.begin(), _marker_corners_img.end(), marker_corners_img_undist.begin()); @@ -207,8 +209,8 @@ bool Marker::UpdateContentBasic(vector &_marker_corners_img, IplIm double min = 255.0, max = 0.0; for (int j=0; jheight; j++) { for (int i=0; iwidth; i++) { - x = (int)(0.5+Limit(marker_points_img[(j*marker_content->width)+i].x, 1, gray->width-2)); - y = (int)(0.5+Limit(marker_points_img[(j*marker_content->width)+i].y, 1, gray->height-2)); + x = (int)(0.5+Limit(marker_points_img[(j*marker_content->width)+i].x, 1, gray->rows-2)); + y = (int)(0.5+Limit(marker_points_img[(j*marker_content->width)+i].y, 1, gray->cols-2)); marker_points_img[(j*marker_content->width)+i].val = (int)cvGetReal2D(gray, y, x); @@ -244,16 +246,16 @@ bool Marker::UpdateContentBasic(vector &_marker_corners_img, IplIm min = max = 0; // Now min and max values are averages over black and white border pixels. for (size_t i=0; iwidth-1)); - y = (int)(0.5+Limit(marker_margin_w_img[i].y, 0, gray->height-1)); + x = (int)(0.5+Limit(marker_margin_w_img[i].x, 0, gray->rows-1)); + y = (int)(0.5+Limit(marker_margin_w_img[i].y, 0, gray->cols-1)); marker_margin_w_img[i].val = (int)cvGetReal2D(gray, y, x); max += marker_margin_w_img[i].val; //if(marker_margin_w_img[i].val > max) max = marker_margin_w_img[i].val; //if(marker_margin_w_img[i].val < min) min = marker_margin_w_img[i].val; } for (size_t i=0; iwidth-1)); - y = (int)(0.5+Limit(marker_margin_b_img[i].y, 0, gray->height-1)); + x = (int)(0.5+Limit(marker_margin_b_img[i].x, 0, gray->rows-1)); + y = (int)(0.5+Limit(marker_margin_b_img[i].y, 0, gray->cols-1)); marker_margin_b_img[i].val = (int)cvGetReal2D(gray, y, x); min += marker_margin_b_img[i].val; //if(marker_margin_b_img[i].val > max) max = marker_margin_b_img[i].val; @@ -264,7 +266,13 @@ bool Marker::UpdateContentBasic(vector &_marker_corners_img, IplIm min /= marker_margin_b_img.size(); // Threshold the marker content - cvThreshold(marker_content, marker_content, (max+min)/2.0, 255, CV_THRESH_BINARY); + + cv::Mat temp = cv::cvarrToMat(marker_content); + + cv::threshold(temp, temp, (max+min)/2.0, 255, cv::THRESH_BINARY); + + CvMat temp2 = cvMat(temp); + marker_content = &temp2; // Count erroneous margin nodes int erroneous = 0; @@ -304,7 +312,7 @@ bool Marker::UpdateContentBasic(vector &_marker_corners_img, IplIm #endif return true; } -void Marker::UpdatePose(vector &_marker_corners_img, Camera *cam, int orientation, int frame_no /* =0 */, bool update_pose /* =true */) { +void Marker::UpdatePose(std::vector &_marker_corners_img, Camera *cam, int orientation, int frame_no /* =0 */, bool update_pose /* =true */) { marker_corners_img.resize(_marker_corners_img.size()); copy(_marker_corners_img.begin(), _marker_corners_img.end(), marker_corners_img.begin()); @@ -328,30 +336,45 @@ void Marker::SaveMarkerImage(const char *filename, int save_res) const { } scale = double(save_res)/double(res+margin+margin); - IplImage *img = cvCreateImage(cvSize(save_res, save_res), IPL_DEPTH_8U, 1); - IplImage *img_content = cvCreateImage(cvSize(int(res*scale+0.5), int(res*scale+0.5)), IPL_DEPTH_8U, 1); + + cv::Mat temp = cv::Mat(cv::Size(save_res, save_res),CV_8UC1); + cv::Mat *img = &temp; + + cv::Mat temp2 = cv::Mat(cv::Size(int(res*scale+0.5), int(res*scale+0.5)),CV_8UC1); + cv::Mat *img_content = &temp2; + + // cvCreateImage(cvSize(int(res*scale+0.5), int(res*scale+0.5)), IPL_DEPTH_8U, 1); + + cvZero(img); CvMat submat; cvGetSubRect(img, &submat, cvRect(int(margin*scale), int(margin*scale), int(res*scale), int(res*scale))); - cvResize(marker_content, img_content, CV_INTER_NN); + cv::resize(cv::cvarrToMat(marker_content), *img_content,img_content->size(), cv::INTER_NEAREST); cvCopy(img_content, &submat); - cvSaveImage(filename, img); - cvReleaseImage(&img_content); - cvReleaseImage(&img); + cv::imwrite(filename, *img); + // cvReleaseImage(&img_content); + // cvReleaseImage(&img); } -void Marker::ScaleMarkerToImage(IplImage *image) const { +void Marker::ScaleMarkerToImage(cv::Mat *image) const { const int multiplier=96; - IplImage *img = cvCreateImage(cvSize(int(multiplier*(res+margin+margin)+0.5), int(multiplier*(res+margin+margin)+0.5)), IPL_DEPTH_8U, 1); - IplImage *img_content = cvCreateImage(cvSize(int(multiplier*res+0.5), int(multiplier*res+0.5)), IPL_DEPTH_8U, 1); + cv::Mat temp = cv::Mat(cv::Size(int(multiplier*(res+margin+margin)+0.5), int(multiplier*(res+margin+margin)+0.5)),CV_8UC1); + cv::Mat *img = &temp; + + cv::Mat temp2 = cv::Mat(cv::Size(int(multiplier*res+0.5), int(multiplier*res+0.5)),CV_8UC1); + cv::Mat *img_content = &temp2; + + + + cvZero(img); CvMat submat; cvGetSubRect(img, &submat, cvRect(int(multiplier*margin+0.5), int(multiplier*margin+0.5), int(multiplier*res+0.5), int(multiplier*res+0.5))); - cvResize(marker_content, img_content, CV_INTER_NN); + cv::resize(cv::cvarrToMat(marker_content), *img_content, img_content->size(), cv::INTER_NEAREST); cvCopy(img_content, &submat); - cvResize(img, image, CV_INTER_NN); - cvReleaseImage(&img_content); - cvReleaseImage(&img); + cv::resize(*img, *image,image->size(), cv::INTER_NEAREST); + // cvReleaseImage(&img_content); + // cvReleaseImage(&img); } void Marker::SetMarkerSize(double _edge_length, int _res, double _margin) { @@ -668,7 +691,7 @@ void MarkerData::DecodeOrientation(int *error, int *total, int *orientation) { //*orientation = 0; // ttehop } -bool MarkerData::DetectResolution(vector &_marker_corners_img, IplImage *gray, Camera *cam) { +bool MarkerData::DetectResolution(std::vector &_marker_corners_img, cv::Mat *gray, Camera *cam) { vector marker_corners_img_undist; marker_corners_img_undist.resize(_marker_corners_img.size()); copy(_marker_corners_img.begin(), _marker_corners_img.end(), marker_corners_img_undist.begin()); @@ -702,24 +725,26 @@ bool MarkerData::DetectResolution(vector &_marker_corners_img, Ipl pt2.x = int(line_points_img[0].x); pt2.y = int(line_points_img[0].y); if ((pt2.x < 0) || (pt2.y < 0) || - (pt2.x >= gray->width) || (pt2.y >= gray->height)) + (pt2.x >= gray->rows) || (pt2.y >= gray->cols)) { return false; } bool white=true; for (int i=0; i<4; i++) { - CvLineIterator iterator; + // CvLineIterator iterator; pt1.x = int(line_points_img[i+1].x); pt1.y = int(line_points_img[i+1].y); if ((pt1.x < 0) || (pt1.y < 0) || - (pt1.x >= gray->width) || (pt1.y >= gray->height)) + (pt1.x >= gray->rows) || (pt1.y >= gray->cols)) { return false; } - int count = cvInitLineIterator(gray, pt1, pt2, &iterator, 8, 0); + cv::LineIterator iterator = cv::LineIterator(*gray, pt1, pt2, 8, 0); + int count = iterator.count; std::vector vals; - for(int ii = 0; ii < count; ii++ ){ - CV_NEXT_LINE_POINT(iterator); + for(int ii = 0; ii < count; ii++, ++iterator){ + //CV_NEXT_LINE_POINT(iterator); + vals.push_back(*(iterator.ptr)); } uchar vmin = *(std::min_element(vals.begin(), vals.end())); @@ -762,7 +787,7 @@ bool MarkerData::DetectResolution(vector &_marker_corners_img, Ipl return true; } -bool MarkerData::UpdateContent(vector &_marker_corners_img, IplImage *gray, Camera *cam, int frame_no /*= 0*/) { +bool MarkerData::UpdateContent(vector &_marker_corners_img, cv::Mat *gray, Camera *cam, int frame_no /*= 0*/) { if (res == 0) { if (!DetectResolution(_marker_corners_img, gray, cam)) return false; } diff --git a/ar_track_alvar/src/MarkerDetector.cpp b/ar_track_alvar/src/MarkerDetector.cpp index 258ba32..d7f02db 100644 --- a/ar_track_alvar/src/MarkerDetector.cpp +++ b/ar_track_alvar/src/MarkerDetector.cpp @@ -75,7 +75,7 @@ namespace alvar { detect_pose_grayscale = _detect_pose_grayscale; } - int MarkerDetectorImpl::Detect(IplImage *image, + int MarkerDetectorImpl::Detect(cv::Mat *image, Camera *cam, bool track, bool visualize, @@ -84,7 +84,7 @@ namespace alvar { LabelingMethod labeling_method, bool update_pose) { - assert(image->origin == 0); // Currently only top-left origin supported + //assert(image->origin == 0); // Currently only top-left origin supported double error=-1; // Swap marker tables @@ -104,7 +104,7 @@ namespace alvar { labeling->SetCamera(cam); labeling->LabelSquares(image, visualize); vector >& blob_corners = labeling->blob_corners; - IplImage* gray = labeling->gray; + cv::Mat* gray = labeling->gray; int orientation; @@ -165,9 +165,9 @@ namespace alvar { return (int) _markers_size(); } - int MarkerDetectorImpl::DetectAdditional(IplImage *image, Camera *cam, bool visualize, double max_track_error) + int MarkerDetectorImpl::DetectAdditional(cv::Mat *image, Camera *cam, bool visualize, double max_track_error) { - assert(image->origin == 0); // Currently only top-left origin supported + //assert(image->origin == 0); // Currently only top-left origin supported if(!labeling) return -1; double error=-1; int orientation; diff --git a/ar_track_alvar/src/MultiMarker.cpp b/ar_track_alvar/src/MultiMarker.cpp index 9aa389f..26009dc 100644 --- a/ar_track_alvar/src/MultiMarker.cpp +++ b/ar_track_alvar/src/MultiMarker.cpp @@ -72,7 +72,7 @@ bool MultiMarker::SaveXML(const char* fname) { for(int j = 0; j < 4; ++j) { TiXmlElement *xml_corner = new TiXmlElement("corner"); xml_marker->LinkEndChild(xml_corner); - CvPoint3D64f X = pointcloud[pointcloud_index(marker_indices[i], j)]; + cv::Point3f X = pointcloud[pointcloud_index(marker_indices[i], j)]; xml_corner->SetDoubleAttribute("x", X.x); xml_corner->SetDoubleAttribute("y", X.y); xml_corner->SetDoubleAttribute("z", X.z); @@ -103,7 +103,7 @@ bool MultiMarker::SaveText(const char* fname) { for(size_t i = 0; i < n_markers; ++i) for(size_t j = 0; j < 4; ++j) { - CvPoint3D64f X = pointcloud[pointcloud_index(marker_indices[i], j)]; + cv::Point3f X = pointcloud[pointcloud_index(marker_indices[i], j)]; file_op<QueryDoubleAttribute("x", &X.x) != TIXML_SUCCESS) return false; if (xml_corner->QueryDoubleAttribute("y", &X.y) != TIXML_SUCCESS) return false; if (xml_corner->QueryDoubleAttribute("z", &X.z) != TIXML_SUCCESS) return false; @@ -190,7 +190,7 @@ bool MultiMarker::LoadText(const char* fname) { for(size_t i = 0; i < n_markers; ++i) for(size_t j = 0; j < 4; ++j) { - CvPoint3D64f X; + cv::Point3f X; file_op>>X.x; file_op>>X.y; file_op>>X.z; @@ -226,7 +226,7 @@ void MultiMarker::PointCloudReset() { pointcloud.clear(); } -void MultiMarker::PointCloudCorners3d(double edge_length, Pose &pose, CvPoint3D64f corners[4]) { +void MultiMarker::PointCloudCorners3d(double edge_length, Pose &pose, cv::Point3f corners[4]) { // Transformation from origin to current marker CvMat *m3 = cvCreateMat(4,4,CV_64F); cvSetIdentity(m3); pose.GetMatrix(m3); @@ -262,7 +262,7 @@ void MultiMarker::PointCloudCorners3d(double edge_length, Pose &pose, CvPoint3D6 } void MultiMarker::PointCloudAdd(int marker_id, double edge_length, Pose &pose) { - CvPoint3D64f corners[4]; + cv::Point3f corners[4]; PointCloudCorners3d(edge_length, pose, corners); for(size_t j = 0; j < 4; ++j) { pointcloud[pointcloud_index(marker_id, j, true)] = corners[j]; @@ -281,7 +281,7 @@ void MultiMarker::PointCloudCopy(const MultiMarker *m) { void MultiMarker::PointCloudGet(int marker_id, int point, double &x, double &y, double &z) { - CvPoint3D64f p3d = pointcloud[pointcloud_index(marker_id, point)]; + cv::Point3f p3d = pointcloud[pointcloud_index(marker_id, point)]; x = p3d.x; y = p3d.y; z = p3d.z; @@ -293,8 +293,8 @@ bool MultiMarker::IsValidMarker(int marker_id) { } -double MultiMarker::_GetPose(MarkerIterator &begin, MarkerIterator &end, Camera* cam, Pose& pose, IplImage* image) { - vector world_points; +double MultiMarker::_GetPose(MarkerIterator &begin, MarkerIterator &end, Camera* cam, Pose& pose, cv::Mat* image) { + vector world_points; vector image_points; // Reset the marker_status to 1 for all markers in point_cloud @@ -314,10 +314,10 @@ double MultiMarker::_GetPose(MarkerIterator &begin, MarkerIterator &end, Camera* if (marker_status[index] > 0) { for(size_t j = 0; j < marker->marker_corners.size(); ++j) { - CvPoint3D64f Xnew = pointcloud[pointcloud_index(id, (int)j)]; + cv::Point3f Xnew = pointcloud[pointcloud_index(id, (int)j)]; world_points.push_back(Xnew); image_points.push_back(marker->marker_corners_img.at(j)); - if (image) cvCircle(image, cvPoint(int(marker->marker_corners_img[j].x), int(marker->marker_corners_img[j].y)), 3, CV_RGB(0,255,0)); + if (image) cv::circle(*image, cvPoint(int(marker->marker_corners_img[j].x), int(marker->marker_corners_img[j].y)), 3, cv::Scalar(0,255,0)); } marker_status[index] = 2; // Used for tracking } @@ -336,14 +336,14 @@ double MultiMarker::_GetPose(MarkerIterator &begin, MarkerIterator &end, Camera* } -int MultiMarker::_SetTrackMarkers(MarkerDetectorImpl &marker_detector, Camera* cam, Pose& pose, IplImage *image) { +int MultiMarker::_SetTrackMarkers(MarkerDetectorImpl &marker_detector, Camera* cam, Pose& pose, cv::Mat *image) { int count=0; marker_detector.TrackMarkersReset(); for(size_t i = 0; i < marker_indices.size(); ++i) { int id = marker_indices[i]; // If the marker wasn't tracked lets add it to be trackable if (marker_status[i] == 1) { - vector pw(4); + vector pw(4); pw[0] = pointcloud[pointcloud_index(id, 0)]; pw[1] = pointcloud[pointcloud_index(id, 1)]; pw[2] = pointcloud[pointcloud_index(id, 2)]; @@ -360,10 +360,10 @@ int MultiMarker::_SetTrackMarkers(MarkerDetectorImpl &marker_detector, Camera* c p[3].x = pi[3].x; p[3].y = pi[3].y; if (image) { - cvLine(image, cvPoint(int(p[0].x), int(p[0].y)), cvPoint(int(p[1].x), int(p[1].y)), CV_RGB(255,0,0)); - cvLine(image, cvPoint(int(p[1].x), int(p[1].y)), cvPoint(int(p[2].x), int(p[2].y)), CV_RGB(255,0,0)); - cvLine(image, cvPoint(int(p[2].x), int(p[2].y)), cvPoint(int(p[3].x), int(p[3].y)), CV_RGB(255,0,0)); - cvLine(image, cvPoint(int(p[3].x), int(p[3].y)), cvPoint(int(p[0].x), int(p[0].y)), CV_RGB(255,0,0)); + cv::line(*image, cvPoint(int(p[0].x), int(p[0].y)), cvPoint(int(p[1].x), int(p[1].y)), cv::Scalar(255,0,0)); + cv::line(*image, cvPoint(int(p[1].x), int(p[1].y)), cvPoint(int(p[2].x), int(p[2].y)), cv::Scalar(255,0,0)); + cv::line(*image, cvPoint(int(p[2].x), int(p[2].y)), cvPoint(int(p[3].x), int(p[3].y)), cv::Scalar(255,0,0)); + cv::line(*image, cvPoint(int(p[3].x), int(p[3].y)), cvPoint(int(p[0].x), int(p[0].y)), cv::Scalar(255,0,0)); } marker_detector.TrackMarkerAdd(id, p); count++; diff --git a/ar_track_alvar/src/MultiMarkerBundle.cpp b/ar_track_alvar/src/MultiMarkerBundle.cpp index 680be3b..fd4e273 100644 --- a/ar_track_alvar/src/MultiMarkerBundle.cpp +++ b/ar_track_alvar/src/MultiMarkerBundle.cpp @@ -88,9 +88,7 @@ void Est(CvMat* state, CvMat* estimation, void *param) double proj[2]={0}; CvMat mat_proj = cvMat(1, 1, CV_64FC2, proj); - cvProjectPoints2(&mat_object_points, &mat_rotation_vector, - &mat_translation_vector, &(camera->calib_K), - &(camera->calib_D), &mat_proj); + cv::projectPoints(cv::cvarrToMat(&mat_object_points), cv::cvarrToMat(&mat_rotation_vector),cv::cvarrToMat(&mat_translation_vector), &(camera->calib_K),&(camera->calib_D), &mat_proj); index = i*n_points*2 + j*2; estimation->data.db[index+0] = proj[0]; diff --git a/ar_track_alvar/src/MultiMarkerInitializer.cpp b/ar_track_alvar/src/MultiMarkerInitializer.cpp index a87b294..561ef23 100644 --- a/ar_track_alvar/src/MultiMarkerInitializer.cpp +++ b/ar_track_alvar/src/MultiMarkerInitializer.cpp @@ -67,7 +67,7 @@ void MultiMarkerInitializer::MeasurementsAdd(MarkerIterator &begin, MarkerIterat if (index == 0 && marker_status[index] == 0) { Pose pose; - CvPoint3D64f corners[4]; + cv::Point3f corners[4]; PointCloudCorners3d(marker->GetMarkerEdgeLength(), pose, corners); for(size_t j = 0; j < 4; ++j) { int p_index = pointcloud_index(id, j); @@ -135,10 +135,10 @@ bool MultiMarkerInitializer::updateMarkerPoses(vector &points, cv::RotatedRect& ellipse_bo CvMat* vector = cvCreateMat(1, int(points.size()), CV_64FC2); for(size_t i = 0; i < points.size(); ++i) { - CV_MAT_ELEM(*vector, CvPoint2D64f, 0, i) = (CvPoint2D64f)points[i]; + CV_MAT_ELEM(*vector, cv::Point2f, 0, i) = (cv::Point2f)points[i]; } ellipse_box = cv::fitEllipse(cv::cvarrToMat(&vector)); //cvReleaseMat(&vector); From 1eccb627774624c55a92b6ab262c22ac9a2a90e5 Mon Sep 17 00:00:00 2001 From: Patrick Musau Date: Wed, 21 Jul 2021 16:57:20 -0500 Subject: [PATCH 05/27] Port towards nodes --- ar_track_alvar/CMakeLists.txt | 40 +- .../ar_track_alvar/ConnectedComponents.h | 2 +- ar_track_alvar/nodes/FindMarkerBundles.cpp | 12 +- .../nodes/FindMarkerBundlesNoKinect.cpp | 8 +- ar_track_alvar/nodes/IndividualMarkers.cpp | 10 +- .../nodes/IndividualMarkersNoKinect.cpp | 10 +- ar_track_alvar/nodes/TrainMarkerBundle.cpp | 8 +- ar_track_alvar/src/ConnectedComponents.cpp | 385 ++++++++++-------- ar_track_alvar/src/MultiMarker.cpp | 8 +- ar_track_alvar/src/MultiMarkerBundle.cpp | 2 +- ar_track_alvar/src/SampleMarkerCreator.cpp | 6 +- 11 files changed, 267 insertions(+), 224 deletions(-) diff --git a/ar_track_alvar/CMakeLists.txt b/ar_track_alvar/CMakeLists.txt index 71c39e7..d8ea5f1 100644 --- a/ar_track_alvar/CMakeLists.txt +++ b/ar_track_alvar/CMakeLists.txt @@ -55,6 +55,7 @@ set(ALVAR_TARGETS ar_track_alvar individualMarkers individualMarkersNoKinect tra set(dependencies OpenCV tf2_ros + tf2 pcl_ros pcl_conversions std_msgs @@ -66,6 +67,7 @@ set(dependencies geometry_msgs cv_bridge sensor_msgs + ar_track_alvar_msgs ) include_directories(include @@ -91,7 +93,7 @@ add_library(ar_track_alvar src/Threads_unix.cpp src/Mutex.cpp src/Mutex_unix.cpp - #src/ConnectedComponents.cpp + src/ConnectedComponents.cpp src/Line.cpp src/Plugin.cpp src/Plugin_unix.cpp @@ -103,9 +105,9 @@ add_library(ar_track_alvar src/Kalman.cpp src/kinect_filtering.cpp src/Optimization.cpp - # src/MultiMarker.cpp - # src/MultiMarkerBundle.cpp - # src/MultiMarkerInitializer.cpp + src/MultiMarker.cpp + src/MultiMarkerBundle.cpp + src/MultiMarkerInitializer.cpp ) target_link_libraries(${PROJECT_NAME} ${OpenCV_LIBRARIES} ${TinyXML_LIBRARIES}) @@ -119,26 +121,26 @@ add_library(medianFilter src/medianFilter.cpp) target_link_libraries(medianFilter ar_track_alvar) ament_target_dependencies(medianFilter ${dependencies}) -# add_executable(individualMarkers nodes/IndividualMarkers.cpp) -# target_link_libraries(individualMarkers ar_track_alvar kinect_filtering) -# ament_target_dependencies(individualMarkers ${dependencies}) +add_executable(individualMarkers nodes/IndividualMarkers.cpp) +target_link_libraries(individualMarkers ar_track_alvar kinect_filtering) +ament_target_dependencies(individualMarkers ${dependencies}) -# add_executable(individualMarkersNoKinect nodes/IndividualMarkersNoKinect.cpp) -# target_link_libraries(individualMarkersNoKinect ar_track_alvar) -# ament_target_dependencies(individualMarkersNoKinect ${dependencies}) +add_executable(individualMarkersNoKinect nodes/IndividualMarkersNoKinect.cpp) +target_link_libraries(individualMarkersNoKinect ar_track_alvar) +ament_target_dependencies(individualMarkersNoKinect ${dependencies}) -# add_executable(trainMarkerBundle nodes/TrainMarkerBundle.cpp) -# target_link_libraries(trainMarkerBundle ar_track_alvar) -# ament_target_dependencies(trainMarkerBundle ${dependencies}) +add_executable(trainMarkerBundle nodes/TrainMarkerBundle.cpp) +target_link_libraries(trainMarkerBundle ar_track_alvar) +ament_target_dependencies(trainMarkerBundle ${dependencies}) -# add_executable(findMarkerBundles nodes/FindMarkerBundles.cpp) -# target_link_libraries(findMarkerBundles ar_track_alvar kinect_filtering medianFilter) -# ament_target_dependencies(findMarkerBundles ${dependencies}) +add_executable(findMarkerBundles nodes/FindMarkerBundles.cpp) +target_link_libraries(findMarkerBundles ar_track_alvar kinect_filtering medianFilter) +ament_target_dependencies(findMarkerBundles ${dependencies}) -# add_executable(findMarkerBundlesNoKinect nodes/FindMarkerBundlesNoKinect.cpp) -# target_link_libraries(findMarkerBundlesNoKinect ar_track_alvar) -# ament_target_dependencies(findMarkerBundlesNoKinect ${dependencies}) +add_executable(findMarkerBundlesNoKinect nodes/FindMarkerBundlesNoKinect.cpp) +target_link_libraries(findMarkerBundlesNoKinect ar_track_alvar) +ament_target_dependencies(findMarkerBundlesNoKinect ${dependencies}) # add_executable(createMarker src/SampleMarkerCreator.cpp) # target_link_libraries(createMarker ar_track_alvar) diff --git a/ar_track_alvar/include/ar_track_alvar/ConnectedComponents.h b/ar_track_alvar/include/ar_track_alvar/ConnectedComponents.h index 8b0e579..35d9317 100644 --- a/ar_track_alvar/include/ar_track_alvar/ConnectedComponents.h +++ b/ar_track_alvar/include/ar_track_alvar/ConnectedComponents.h @@ -131,7 +131,7 @@ protected : void LabelSquares(cv::Mat* image, bool visualize=false); // TODO: Releases memory inside, cannot return CvSeq* - CvSeq* LabelImage(cv::Mat* image, int min_size, bool approx=false); + std::vector LabelImage(cv::Mat* image, int min_size, bool approx=false); }; } // namespace alvar diff --git a/ar_track_alvar/nodes/FindMarkerBundles.cpp b/ar_track_alvar/nodes/FindMarkerBundles.cpp index 6397d9e..3e701af 100644 --- a/ar_track_alvar/nodes/FindMarkerBundles.cpp +++ b/ar_track_alvar/nodes/FindMarkerBundles.cpp @@ -39,11 +39,11 @@ #include "ar_track_alvar/MultiMarkerInitializer.h" #include "ar_track_alvar/Shared.h" #include -#include -#include +#include +#include #include "tf2_ros/buffer.h" #include "tf2_ros/transform_listener.h" -#include +#include #include #include @@ -52,18 +52,18 @@ #include #include -#include +#include #include "rclcpp/rclcpp.hpp" #include #include #include #include #include -#include +// #include #include #include -#include +// #include #include #include #include diff --git a/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp b/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp index bc0ca94..31d0689 100644 --- a/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp +++ b/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp @@ -41,12 +41,12 @@ #include "ar_track_alvar/MultiMarkerInitializer.h" #include "ar_track_alvar/Shared.h" #include -#include -#include +#include +#include #include "tf2_ros/buffer.h" #include "tf2_ros/transform_listener.h" -#include -#include +#include +#include using namespace alvar; using namespace std; diff --git a/ar_track_alvar/nodes/IndividualMarkers.cpp b/ar_track_alvar/nodes/IndividualMarkers.cpp index 80d1940..6255300 100644 --- a/ar_track_alvar/nodes/IndividualMarkers.cpp +++ b/ar_track_alvar/nodes/IndividualMarkers.cpp @@ -39,14 +39,14 @@ #include "ar_track_alvar/MarkerDetector.h" #include "ar_track_alvar/Shared.h" #include -#include -#include +#include +#include #include "tf2_ros/buffer.h" #include "tf2_ros/transform_listener.h" -#include -#include +#include +#include #include -#include +// #include #include #include diff --git a/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp b/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp index 6b0e2d9..56f46df 100644 --- a/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp +++ b/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp @@ -40,13 +40,13 @@ #include "ar_track_alvar/MarkerDetector.h" #include "ar_track_alvar/Shared.h" #include -#include -#include +#include +#include #include "tf2_ros/buffer.h" #include "tf2_ros/transform_listener.h" -#include -#include -#include +#include +#include +// #include #include using namespace alvar; diff --git a/ar_track_alvar/nodes/TrainMarkerBundle.cpp b/ar_track_alvar/nodes/TrainMarkerBundle.cpp index 5b8fe3a..302fbc5 100644 --- a/ar_track_alvar/nodes/TrainMarkerBundle.cpp +++ b/ar_track_alvar/nodes/TrainMarkerBundle.cpp @@ -41,12 +41,12 @@ #include "ar_track_alvar/MultiMarkerInitializer.h" #include "ar_track_alvar/Shared.h" #include -#include -#include +#include +#include #include "tf2_ros/buffer.h" #include "tf2_ros/transform_listener.h" -#include -#include +#include +#include #include using namespace alvar; diff --git a/ar_track_alvar/src/ConnectedComponents.cpp b/ar_track_alvar/src/ConnectedComponents.cpp index 4d0585f..f0004ad 100644 --- a/ar_track_alvar/src/ConnectedComponents.cpp +++ b/ar_track_alvar/src/ConnectedComponents.cpp @@ -114,141 +114,167 @@ void LabelingCvSeq::LabelSquares(cv::Mat* image, bool visualize) cv::adaptiveThreshold(*gray, *bw, 255, cv::ADAPTIVE_THRESH_MEAN_C, cv::THRESH_BINARY_INV, thresh_param1, thresh_param2); //cvThreshold(gray, bw, 127, 255, CV_THRESH_BINARY_INV); - CvSeq* contours; - CvSeq* squares = cvCreateSeq(0, sizeof(CvSeq), sizeof(CvSeq), storage); - CvSeq* square_contours = cvCreateSeq(0, sizeof(CvSeq), sizeof(CvSeq), storage); - cv::findContours(*bw, storage, &contours, sizeof(CvContour), - CV_RETR_LIST, CV_CHAIN_APPROX_NONE, cvPoint(0,0)); - while(contours) + vector > contours; + //vector hierarchy; + cv::findContours(*bw, contours, cv::RETR_TREE, cv::CHAIN_APPROX_SIMPLE ); + vector > contours_poly( contours.size() ); + vector boundRect(contours.size()); + + + // CvSeq* contours; + // CvSeq* squares = cvCreateSeq(0, sizeof(CvSeq), sizeof(CvSeq), storage); + // CvSeq* square_contours = cvCreateSeq(0, sizeof(CvSeq), sizeof(CvSeq), storage); + + + for( size_t i = 0; i < contours.size(); i++ ) { - if(contours->total < _min_edge) - { - contours = contours->h_next; - continue; - } - - CvSeq* result = cv::approxPolyDP(cv::cvarrToMat(contours), sizeof(CvContour), storage, - CV_POLY_APPROX_DP, cvContourPerimeter(contours)*0.035, 0 ); // TODO: Parameters? - - if( result->total == 4 && CheckBorder(result, image->rows, image->cols) && - fabs(cvContourArea(result,CV_WHOLE_SEQ)) > _min_area && // TODO check limits - cvCheckContourConvexity(result) ) // ttehop: Changed to 'contours' instead of 'result' - { - cvSeqPush(squares, result); - cvSeqPush(square_contours, contours); - } - contours = contours->h_next; + cv::approxPolyDP( contours[i], contours_poly[i], 3, true ); + boundRect[i] = cv::boundingRect( contours_poly[i]); + // minEnclosingCircle( contours_poly[i], centers[i], radius[i] ); } + cv::RNG rng(12345); + for( size_t i = 0; i< contours.size(); i++ ) + { + cv::Scalar color = cv::Scalar( rng.uniform(0, 256), rng.uniform(0,256), rng.uniform(0,256) ); + cv::drawContours(*image, contours_poly, (int)i, color ); + cv::rectangle(*image, boundRect[i].tl(), boundRect[i].br(), color, 2 ); + } - _n_blobs = squares->total; - blob_corners.resize(_n_blobs); - // For every detected 4-corner blob - for(int i = 0; i < _n_blobs; ++i) - { - vector fitted_lines(4); - blob_corners[i].resize(4); - CvSeq* sq = (CvSeq*)cv::getSeqElem(squares, i); - CvSeq* square_contour = (CvSeq*)cv::getSeqElem(square_contours, i); + + + // while(contours) + // { + // if(contours->total < _min_edge) + // { + // contours = contours->h_next; + // continue; + // } + + + // approxPolyDP( contours[i], contours_poly[i], 3, true ); + // CvSeq* result = cv::approxPolyDP(cv::cvarrToMat(contours), sizeof(CvContour), storage, + // CV_POLY_APPROX_DP, cvContourPerimeter(contours)*0.035, 0 ); // TODO: Parameters? + + // if( result->total == 4 && CheckBorder(result, image->rows, image->cols) && + // fabs(cvContourArea(result,CV_WHOLE_SEQ)) > _min_area && // TODO check limits + // cvCheckContourConvexity(result) ) // ttehop: Changed to 'contours' instead of 'result' + // { + // cvSeqPush(squares, result); + // cvSeqPush(square_contours, contours); + // } + // contours = contours->h_next; + // } + + // _n_blobs = squares->total; + // blob_corners.resize(_n_blobs); + + // // For every detected 4-corner blob + // for(int i = 0; i < _n_blobs; ++i) + // { + // vector fitted_lines(4); + // blob_corners[i].resize(4); + // CvSeq* sq = (CvSeq*)cv::getSeqElem(squares, i); + // CvSeq* square_contour = (CvSeq*)cv::getSeqElem(square_contours, i); - for(int j = 0; j < 4; ++j) - { - CvPoint* pt0 = (CvPoint*)cv::getSeqElem(sq, j); - CvPoint* pt1 = (CvPoint*)cv::getSeqElem(sq, (j+1)%4); - int k0=-1, k1=-1; - for (int k = 0; ktotal; k++) { - CvPoint* pt2 = (CvPoint*)cv::getSeqElem(square_contour, k); - if ((pt0->x == pt2->x) && (pt0->y == pt2->y)) k0=k; - if ((pt1->x == pt2->x) && (pt1->y == pt2->y)) k1=k; - } - int len; - if (k1 >= k0) len = k1-k0-1; // neither k0 nor k1 are included - else len = square_contour->total-k0+k1-1; - if (len == 0) len = 1; - - CvMat* line_data = cvCreateMat(1, len, CV_32FC2); - for (int l=0; ltotal; - CvPoint* p = (CvPoint*)cv::getSeqElem(square_contour, ll); - CvPoint2D32f pp; - pp.x = float(p->x); - pp.y = float(p->y); - - // Undistort - if(cam) - cam->Undistort(pp); - - CV_MAT_ELEM(*line_data, CvPoint2D32f, 0, l) = pp; - } - - // Fit edge and put to vector of edges - float params[4] = {0}; - - // TODO: The detect_pose_grayscale is still under work... - /* - if (detect_pose_grayscale && - (pt0->x > 3) && (pt0->y > 3) && - (pt0->x < (gray->width-4)) && - (pt0->y < (gray->height-4))) - { - // ttehop: Grayscale experiment - FitLineGray(line_data, params, gray); - } - */ - cvFitLine(line_data, CV_DIST_L2, 0, 0.01, 0.01, params); - - //cvFitLine(line_data, CV_DIST_L2, 0, 0.01, 0.01, params); - ////cvFitLine(line_data, CV_DIST_HUBER, 0, 0.01, 0.01, params); - Line line = Line(params); - if(visualize) DrawLine(image, line); - fitted_lines[j] = line; - - cvReleaseMat(&line_data); - } - - // Calculated four intersection points - for(size_t j = 0; j < 4; ++j) - { - PointDouble intc = Intersection(fitted_lines[j],fitted_lines[(j+1)%4]); - - // TODO: Instead, test OpenCV find corner in sub-pix... - //CvPoint2D32f pt = cvPoint2D32f(intc.x, intc.y); - //cvFindCornerSubPix(gray, &pt, - // 1, cvSize(3,3), cvSize(-1,-1), - // cvTermCriteria( - // CV_TERMCRIT_ITER+CV_TERMCRIT_EPS,10,1e-4)); + // for(int j = 0; j < 4; ++j) + // { + // CvPoint* pt0 = (CvPoint*)cv::getSeqElem(sq, j); + // CvPoint* pt1 = (CvPoint*)cv::getSeqElem(sq, (j+1)%4); + // int k0=-1, k1=-1; + // for (int k = 0; ktotal; k++) { + // CvPoint* pt2 = (CvPoint*)cv::getSeqElem(square_contour, k); + // if ((pt0->x == pt2->x) && (pt0->y == pt2->y)) k0=k; + // if ((pt1->x == pt2->x) && (pt1->y == pt2->y)) k1=k; + // } + // int len; + // if (k1 >= k0) len = k1-k0-1; // neither k0 nor k1 are included + // else len = square_contour->total-k0+k1-1; + // if (len == 0) len = 1; + + // CvMat* line_data = cvCreateMat(1, len, CV_32FC2); + // for (int l=0; ltotal; + // CvPoint* p = (CvPoint*)cv::getSeqElem(square_contour, ll); + // CvPoint2D32f pp; + // pp.x = float(p->x); + // pp.y = float(p->y); + + // // Undistort + // if(cam) + // cam->Undistort(pp); + + // CV_MAT_ELEM(*line_data, CvPoint2D32f, 0, l) = pp; + // } + + // // Fit edge and put to vector of edges + // cv::Vec4f params = {0}; + + // // TODO: The detect_pose_grayscale is still under work... + // /* + // if (detect_pose_grayscale && + // (pt0->x > 3) && (pt0->y > 3) && + // (pt0->x < (gray->width-4)) && + // (pt0->y < (gray->height-4))) + // { + // // ttehop: Grayscale experiment + // FitLineGray(line_data, params, gray); + // } + // */ + // cvFitLine(line_data, CV_DIST_L2, 0, 0.01, 0.01, params); + + // //cvFitLine(line_data, CV_DIST_L2, 0, 0.01, 0.01, params); + // ////cvFitLine(line_data, CV_DIST_HUBER, 0, 0.01, 0.01, params); + // Line line = Line(params); + // if(visualize) DrawLine(image, line); + // fitted_lines[j] = line; + + // cvReleaseMat(&line_data); + // } + + // // Calculated four intersection points + // for(size_t j = 0; j < 4; ++j) + // { + // PointDouble intc = Intersection(fitted_lines[j],fitted_lines[(j+1)%4]); + + // // TODO: Instead, test OpenCV find corner in sub-pix... + // //CvPoint2D32f pt = cvPoint2D32f(intc.x, intc.y); + // //cvFindCornerSubPix(gray, &pt, + // // 1, cvSize(3,3), cvSize(-1,-1), + // // cvTermCriteria( + // // CV_TERMCRIT_ITER+CV_TERMCRIT_EPS,10,1e-4)); - // TODO: Now there is a wierd systematic 0.5 pixel error that is fixed here... - //intc.x += 0.5; - //intc.y += 0.5; - - if(cam) cam->Distort(intc); - - // TODO: Should we make this always counter-clockwise or clockwise? - /* - if (image->origin && j == 1) blob_corners[i][3] = intc; - else if (image->origin && j == 3) blob_corners[i][1] = intc; - else blob_corners[i][j] = intc; - */ - blob_corners[i][j] = intc; - } - if (visualize) { - for(size_t j = 0; j < 4; ++j) { - PointDouble &intc = blob_corners[i][j]; - if (j == 0) cv::circle(image, cvPoint(int(intc.x), int(intc.y)), 5, CV_RGB(255, 255, 255)); - if (j == 1) cv::circle(image, cvPoint(int(intc.x), int(intc.y)), 5, CV_RGB(255, 0, 0)); - if (j == 2) cv::circle(image, cvPoint(int(intc.x), int(intc.y)), 5, CV_RGB(0, 255, 0)); - if (j == 3) cv::circle(image, cvPoint(int(intc.x), int(intc.y)), 5, CV_RGB(0, 0, 255)); - } - } - } - - cvClearMemStorage(storage); + // // TODO: Now there is a wierd systematic 0.5 pixel error that is fixed here... + // //intc.x += 0.5; + // //intc.y += 0.5; + + // if(cam) cam->Distort(intc); + + // // TODO: Should we make this always counter-clockwise or clockwise? + // /* + // if (image->origin && j == 1) blob_corners[i][3] = intc; + // else if (image->origin && j == 3) blob_corners[i][1] = intc; + // else blob_corners[i][j] = intc; + // */ + // blob_corners[i][j] = intc; + // } + // if (visualize) { + // for(size_t j = 0; j < 4; ++j) { + // PointDouble &intc = blob_corners[i][j]; + // if (j == 0) cv::circle(*image, cvPoint(int(intc.x), int(intc.y)), 5, cv::Scalar(255, 255, 255)); + // if (j == 1) cv::circle(*image, cvPoint(int(intc.x), int(intc.y)), 5, cv::Scalar(255, 0, 0)); + // if (j == 2) cv::circle(*image, cvPoint(int(intc.x), int(intc.y)), 5, cv::Scalar(0, 255, 0)); + // if (j == 3) cv::circle(*image, cvPoint(int(intc.x), int(intc.y)), 5, cv::Scalar(0, 0, 255)); + // } + // } + //} + + // cvClearMemStorage(storage); } -CvSeq* LabelingCvSeq::LabelImage(cv::Mat* image, int min_size, bool approx) +vector LabelingCvSeq::LabelImage(cv::Mat* image, int min_size, bool approx) { // assert(image->origin == 0); // Currently only top-left origin supported if (gray && ((gray->rows != image->rows) || (gray->cols != image->cols))) { @@ -290,43 +316,56 @@ CvSeq* LabelingCvSeq::LabelImage(cv::Mat* image, int min_size, bool approx) cv::adaptiveThreshold(*gray, *bw, 255, cv::ADAPTIVE_THRESH_MEAN_C, cv::THRESH_BINARY_INV, thresh_param1, thresh_param2); - CvSeq* contours; - CvSeq* edges = cvCreateSeq(0, sizeof(CvSeq), sizeof(CvSeq), storage); - CvSeq* squares = cvCreateSeq(0, sizeof(CvSeq), sizeof(CvSeq), storage); + vector > contours; + //vector hierarchy; + cv::findContours(*bw, contours, cv::RETR_TREE, cv::CHAIN_APPROX_SIMPLE ); + vector > contours_poly( contours.size() ); + vector boundRect(contours.size()); - cv::findContours(bw, storage, &contours, sizeof(CvContour), - CV_RETR_LIST, CV_CHAIN_APPROX_NONE, cvPoint(0,0)); - //cvFindContours(bw, storage, &contours, sizeof(CvContour), - // CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE, cvPoint(0,0)); + for( size_t i = 0; i < contours.size(); i++ ) + { + cv::approxPolyDP( contours[i], contours_poly[i], 3, true ); + boundRect[i] = cv::boundingRect( contours_poly[i]); + // minEnclosingCircle( contours_poly[i], centers[i], radius[i] ); + } + // CvSeq* contours; + // CvSeq* edges = cvCreateSeq(0, sizeof(CvSeq), sizeof(CvSeq), storage); + // CvSeq* squares = cvCreateSeq(0, sizeof(CvSeq), sizeof(CvSeq), storage); - while(contours) - { - if(contours->total < min_size) - { - contours = contours->h_next; - continue; - } + // cv::findContours(bw, storage, &contours, sizeof(CvContour), + // CV_RETR_LIST, CV_CHAIN_APPROX_NONE, cvPoint(0,0)); + // //cvFindContours(bw, storage, &contours, sizeof(CvContour), + // // CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE, cvPoint(0,0)); + + + // while(contours) + // { + // if(contours->total < min_size) + // { + // contours = contours->h_next; + // continue; + // } - if(approx) - { - CvSeq* result = cv::approxPolyDP(contours, sizeof(CvContour), storage, - CV_POLY_APPROX_DP, cvContourPerimeter(contours)*0.02, 0 ); // TODO: Parameters? - - if(cvCheckContourConvexity(result)) - { - cvSeqPush(squares, result); - } - } - else - cvSeqPush(squares, contours); + // if(approx) + // { + // CvSeq* result = cv::approxPolyDP(contours, sizeof(CvContour), storage, + // CV_POLY_APPROX_DP, cvContourPerimeter(contours)*0.02, 0 ); // TODO: Parameters? + + // if(cvCheckContourConvexity(result)) + // { + // cvSeqPush(squares, result); + // } + // } + // else + // cvSeqPush(squares, contours); - contours = contours->h_next; - } + // contours = contours->h_next; + // } - cvClearMemStorage(storage); + // cvClearMemStorage(storage); - return squares; + return boundRect; } inline int round(double x) { @@ -394,34 +433,36 @@ void FitLineGray(CvMat *line_data, float params[4], cv::Mat *gray) { dyy[i] = (yy[i]+yy[i+1])/2; } - // Adjust the points - for (int l=0; lcols; l++) { - CvPoint2D32f *p = (CvPoint2D32f*)CV_MAT_ELEM_PTR_FAST(*line_data, 0, l, sizeof(CvPoint2D32f)); + CvMat temp1 = cvMat(*gray); + CvMat* temp2 = &temp1; + // // Adjust the points + // for (int l=0; lcols; l++) { + // CvPoint2D32f *p = (CvPoint2D32f*)CV_MAT_ELEM_PTR_FAST(*line_data, 0, l, sizeof(CvPoint2D32f)); - double dx=0, dy=0, ww=0; - for (int i=0; iimageData[int((p->y+yy[i])*gray->widthStep+(p->x+xx[i]))]; - unsigned char c2 = (unsigned char)gray->imageData[int((p->y+yy[i+1])*gray->widthStep+(p->x+xx[i+1]))]; + // double dx=0, dy=0, ww=0; + // for (int i=0; iimageData[int((p->y+yy[i])*temp2->widthStep+(p->x+xx[i]))]; + // unsigned char c2 = (unsigned char)temp2->imageData[int((p->y+yy[i+1])*temp2->widthStep+(p->x+xx[i+1]))]; #ifdef SHOW_DEBUG cv::circle(tmp2, cvPoint((p->x+xx[i])*5+2,(p->y+yy[i])*5+2), 0, CV_RGB(0,0,255)); cv::circle(tmp2, cvPoint((p->x+xx[i+1])*5+2,(p->y+yy[i+1])*5+2), 0, CV_RGB(0,0,255)); #endif - double w = absdiff(c1, c2); - dx += dxx[i]*w; - dy += dyy[i]*w; - ww += w; - } - if (ww > 0) { - dx /= ww; dy /= ww; - } + // double w = absdiff(c1, c2); + // dx += dxx[i]*w; + // dy += dyy[i]*w; + // ww += w; + // } + // if (ww > 0) { + // dx /= ww; dy /= ww; + // } #ifdef SHOW_DEBUG cvLine(tmp2, cvPoint(p->x*5+2,p->y*5+2), cvPoint((p->x+dx)*5+2, (p->y+dy)*5+2), CV_RGB(0,255,0)); p->x += float(dx); p->y += float(dy); cv::circle(tmp2, cvPoint(p->x*5+2,p->y*5+2), 0, CV_RGB(255,0,0)); #else - p->x += float(dx); p->y += float(dy); + // p->x += float(dx); p->y += float(dy); #endif - } + #ifdef SHOW_DEBUG cvNamedWindow("tmp"); diff --git a/ar_track_alvar/src/MultiMarker.cpp b/ar_track_alvar/src/MultiMarker.cpp index 26009dc..7158faf 100644 --- a/ar_track_alvar/src/MultiMarker.cpp +++ b/ar_track_alvar/src/MultiMarker.cpp @@ -152,9 +152,9 @@ bool MultiMarker::LoadXML(const char* fname) { if (!xml_corner) return false; cv::Point3f X; - if (xml_corner->QueryDoubleAttribute("x", &X.x) != TIXML_SUCCESS) return false; - if (xml_corner->QueryDoubleAttribute("y", &X.y) != TIXML_SUCCESS) return false; - if (xml_corner->QueryDoubleAttribute("z", &X.z) != TIXML_SUCCESS) return false; + if (xml_corner->QueryDoubleAttribute("x", (double *)&X.x) != TIXML_SUCCESS) return false; + if (xml_corner->QueryDoubleAttribute("y", (double *)&X.y) != TIXML_SUCCESS) return false; + if (xml_corner->QueryDoubleAttribute("z", (double *)&X.z) != TIXML_SUCCESS) return false; pointcloud[pointcloud_index(marker_indices[i], j)] = X; xml_corner = (TiXmlElement*)xml_corner->NextSibling("corner"); @@ -348,7 +348,7 @@ int MultiMarker::_SetTrackMarkers(MarkerDetectorImpl &marker_detector, Camera* c pw[1] = pointcloud[pointcloud_index(id, 1)]; pw[2] = pointcloud[pointcloud_index(id, 2)]; pw[3] = pointcloud[pointcloud_index(id, 3)]; - vector pi(4); + vector pi(4); cam->ProjectPoints(pw, &pose, pi); PointDouble p[4]; // TODO: This type copying is so silly!!! p[0].x = pi[0].x; diff --git a/ar_track_alvar/src/MultiMarkerBundle.cpp b/ar_track_alvar/src/MultiMarkerBundle.cpp index fd4e273..7e9103b 100644 --- a/ar_track_alvar/src/MultiMarkerBundle.cpp +++ b/ar_track_alvar/src/MultiMarkerBundle.cpp @@ -88,7 +88,7 @@ void Est(CvMat* state, CvMat* estimation, void *param) double proj[2]={0}; CvMat mat_proj = cvMat(1, 1, CV_64FC2, proj); - cv::projectPoints(cv::cvarrToMat(&mat_object_points), cv::cvarrToMat(&mat_rotation_vector),cv::cvarrToMat(&mat_translation_vector), &(camera->calib_K),&(camera->calib_D), &mat_proj); + cv::projectPoints(cv::cvarrToMat(&mat_object_points), cv::cvarrToMat(&mat_rotation_vector),cv::cvarrToMat(&mat_translation_vector), cv::cvarrToMat(&(camera->calib_K)),cv::cvarrToMat(&(camera->calib_D)), cv::cvarrToMat(&mat_proj)); index = i*n_points*2 + j*2; estimation->data.db[index+0] = proj[0]; diff --git a/ar_track_alvar/src/SampleMarkerCreator.cpp b/ar_track_alvar/src/SampleMarkerCreator.cpp index c0a8f04..f2b4fb8 100644 --- a/ar_track_alvar/src/SampleMarkerCreator.cpp +++ b/ar_track_alvar/src/SampleMarkerCreator.cpp @@ -1,10 +1,10 @@ #include "ar_track_alvar/MultiMarker.h" -#include "highgui.h" +#include using namespace std; using namespace alvar; struct State { - IplImage *img; + cv::Mat img; stringstream filename; double minx, miny, maxx, maxy; // top-left and bottom-right in pixel units MultiMarker multi_marker; @@ -44,7 +44,7 @@ struct State { MarkerData md(marker_side_len, content_res, margin_res); int side_len = int(marker_side_len*units+0.5); if (img == 0) { - img = cvCreateImage(cvSize(side_len, side_len), IPL_DEPTH_8U, 1); + img = cv::Mat(cv::Size(side_len, side_len),CV_8UC1); filename.str(""); filename<<"MarkerData"; minx = (posx*units) - (marker_side_len*units/2.0); From 57b3a8ea43e167738466654631cc7791803c9639 Mon Sep 17 00:00:00 2001 From: Patrick Musau Date: Fri, 23 Jul 2021 12:49:14 -0500 Subject: [PATCH 06/27] TinyXML changes? --- ar_track_alvar/CMakeLists.txt | 27 +- .../include/ar_track_alvar/FileFormatUtils.h | 10 +- ar_track_alvar/nodes/.vscode/settings.json | 57 +- ar_track_alvar/nodes/IndividualMarkers.cpp | 959 +++++++++--------- .../nodes/IndividualMarkersNoKinect.cpp | 2 +- ar_track_alvar/package.xml | 1 + ar_track_alvar/src/Camera.cpp | 19 +- ar_track_alvar/src/FileFormatUtils.cpp | 31 +- ar_track_alvar/src/MultiMarker.cpp | 50 +- ar_track_alvar/src/Util.cpp | 52 +- 10 files changed, 672 insertions(+), 536 deletions(-) diff --git a/ar_track_alvar/CMakeLists.txt b/ar_track_alvar/CMakeLists.txt index d8ea5f1..ee00103 100644 --- a/ar_track_alvar/CMakeLists.txt +++ b/ar_track_alvar/CMakeLists.txt @@ -33,6 +33,7 @@ find_package(pcl_conversions REQUIRED) find_package(ar_track_alvar_msgs) find_package(std_msgs) find_package(sensor_msgs) +find_package(tf2_geometry_msgs REQUIRED) find_package(geometry_msgs) find_package(visualization_msgs) find_package(PCL REQUIRED QUIET COMPONENTS common io) @@ -59,12 +60,14 @@ set(dependencies pcl_ros pcl_conversions std_msgs + TinyXML2 image_transport perception_pcl visualization_msgs rclcpp resource_retriever geometry_msgs + tf2_geometry_msgs cv_bridge sensor_msgs ar_track_alvar_msgs @@ -125,22 +128,22 @@ add_executable(individualMarkers nodes/IndividualMarkers.cpp) target_link_libraries(individualMarkers ar_track_alvar kinect_filtering) ament_target_dependencies(individualMarkers ${dependencies}) -add_executable(individualMarkersNoKinect nodes/IndividualMarkersNoKinect.cpp) -target_link_libraries(individualMarkersNoKinect ar_track_alvar) -ament_target_dependencies(individualMarkersNoKinect ${dependencies}) +# add_executable(individualMarkersNoKinect nodes/IndividualMarkersNoKinect.cpp) +# target_link_libraries(individualMarkersNoKinect ar_track_alvar) +# ament_target_dependencies(individualMarkersNoKinect ${dependencies}) -add_executable(trainMarkerBundle nodes/TrainMarkerBundle.cpp) -target_link_libraries(trainMarkerBundle ar_track_alvar) -ament_target_dependencies(trainMarkerBundle ${dependencies}) +# add_executable(trainMarkerBundle nodes/TrainMarkerBundle.cpp) +# target_link_libraries(trainMarkerBundle ar_track_alvar) +# ament_target_dependencies(trainMarkerBundle ${dependencies}) -add_executable(findMarkerBundles nodes/FindMarkerBundles.cpp) -target_link_libraries(findMarkerBundles ar_track_alvar kinect_filtering medianFilter) -ament_target_dependencies(findMarkerBundles ${dependencies}) +# add_executable(findMarkerBundles nodes/FindMarkerBundles.cpp) +# target_link_libraries(findMarkerBundles ar_track_alvar kinect_filtering medianFilter) +# ament_target_dependencies(findMarkerBundles ${dependencies}) -add_executable(findMarkerBundlesNoKinect nodes/FindMarkerBundlesNoKinect.cpp) -target_link_libraries(findMarkerBundlesNoKinect ar_track_alvar) -ament_target_dependencies(findMarkerBundlesNoKinect ${dependencies}) +# add_executable(findMarkerBundlesNoKinect nodes/FindMarkerBundlesNoKinect.cpp) +# target_link_libraries(findMarkerBundlesNoKinect ar_track_alvar) +# ament_target_dependencies(findMarkerBundlesNoKinect ${dependencies}) # add_executable(createMarker src/SampleMarkerCreator.cpp) # target_link_libraries(createMarker ar_track_alvar) diff --git a/ar_track_alvar/include/ar_track_alvar/FileFormatUtils.h b/ar_track_alvar/include/ar_track_alvar/FileFormatUtils.h index c5e242d..95c9b52 100644 --- a/ar_track_alvar/include/ar_track_alvar/FileFormatUtils.h +++ b/ar_track_alvar/include/ar_track_alvar/FileFormatUtils.h @@ -35,7 +35,7 @@ #include #include #include -#include "tinyxml.h" +#include namespace alvar { @@ -47,7 +47,7 @@ namespace alvar { * \brief Reads matrix type, rows and cols from XML element. * \return true if XML element appears to be valid; otherwise false. */ - static bool decodeXMLMatrix(const TiXmlElement *xml_matrix, int &type, int &rows, int &cols); + static bool decodeXMLMatrix(const tinyxml2::XMLElement *xml_matrix, int &type, int &rows, int &cols); public: @@ -55,7 +55,7 @@ namespace alvar { * \param xml_matrix alvar:matrix element. * \return CvMat that has the correct size for \e parseXMLMatrix. */ - static CvMat* allocateXMLMatrix(const TiXmlElement *xml_matrix); + static CvMat* allocateXMLMatrix(const tinyxml2::XMLElement *xml_matrix); /** \brief Reads contents of alvar:matrix into CvMat. * @@ -68,7 +68,7 @@ namespace alvar { * the xml_matrix. * \return true if matrix was successfully parsed; otherwise false. */ - static bool parseXMLMatrix(const TiXmlElement *xml_matrix, CvMat *matrix); + static bool parseXMLMatrix(const tinyxml2::XMLElement *xml_matrix, CvMat *matrix); /** \brief Allocates new XML element and populates it with a CvMat data. * @@ -78,7 +78,7 @@ namespace alvar { * \param matrix Data that is written into the returned XML element. * \return Newly allocated TiXmlElement. */ - static TiXmlElement* createXMLMatrix(const char* element_name, const CvMat *matrix); + static tinyxml2::XMLElement* createXMLMatrix(const char* element_name, const CvMat *matrix); }; } diff --git a/ar_track_alvar/nodes/.vscode/settings.json b/ar_track_alvar/nodes/.vscode/settings.json index c62e128..411983b 100644 --- a/ar_track_alvar/nodes/.vscode/settings.json +++ b/ar_track_alvar/nodes/.vscode/settings.json @@ -14,6 +14,61 @@ "string_view": "cpp", "initializer_list": "cpp", "bitset": "cpp", - "utility": "cpp" + "utility": "cpp", + "cctype": "cpp", + "clocale": "cpp", + "cmath": "cpp", + "csignal": "cpp", + "cstdarg": "cpp", + "cstddef": "cpp", + "cstdio": "cpp", + "cstdlib": "cpp", + "cstring": "cpp", + "ctime": "cpp", + "cwchar": "cpp", + "cwctype": "cpp", + "atomic": "cpp", + "strstream": "cpp", + "bit": "cpp", + "*.tcc": "cpp", + "chrono": "cpp", + "cinttypes": "cpp", + "codecvt": "cpp", + "condition_variable": "cpp", + "cstdint": "cpp", + "map": "cpp", + "set": "cpp", + "exception": "cpp", + "algorithm": "cpp", + "functional": "cpp", + "iterator": "cpp", + "memory": "cpp", + "memory_resource": "cpp", + "numeric": "cpp", + "optional": "cpp", + "random": "cpp", + "ratio": "cpp", + "system_error": "cpp", + "tuple": "cpp", + "type_traits": "cpp", + "fstream": "cpp", + "future": "cpp", + "iomanip": "cpp", + "iosfwd": "cpp", + "iostream": "cpp", + "istream": "cpp", + "limits": "cpp", + "mutex": "cpp", + "new": "cpp", + "ostream": "cpp", + "shared_mutex": "cpp", + "sstream": "cpp", + "stdexcept": "cpp", + "streambuf": "cpp", + "thread": "cpp", + "cfenv": "cpp", + "typeindex": "cpp", + "typeinfo": "cpp", + "variant": "cpp" } } \ No newline at end of file diff --git a/ar_track_alvar/nodes/IndividualMarkers.cpp b/ar_track_alvar/nodes/IndividualMarkers.cpp index 6255300..9a16579 100644 --- a/ar_track_alvar/nodes/IndividualMarkers.cpp +++ b/ar_track_alvar/nodes/IndividualMarkers.cpp @@ -42,59 +42,163 @@ #include #include #include "tf2_ros/buffer.h" +#include "tf2_ros/create_timer_ros.h" #include "tf2_ros/transform_listener.h" #include +#include +#include #include #include -// #include -#include +#include "rclcpp/rclcpp.hpp" #include +#include + +using namespace alvar; +using namespace std; +using boost::make_shared; namespace gm=geometry_msgs; namespace ata=ar_track_alvar; - typedef pcl::PointXYZRGB ARPoint; typedef pcl::PointCloud ARCloud; -using namespace alvar; -using namespace std; -using boost::make_shared; -bool init=true; -Camera *cam; -cv_bridge::CvImagePtr cv_ptr_; -image_transport::Subscriber cam_sub_; -ros::Subscriber cloud_sub_; -ros::Publisher arMarkerPub_; -ros::Publisher rvizMarkerPub_; -ros::Publisher rvizMarkerPub2_; -ar_track_alvar_msgs::AlvarMarkers arPoseMarkers_; -visualization_msgs::msg::Marker rvizMarker_; -tf2_ros::TransformListener *tf_listener; -tf::TransformBroadcaster *tf_broadcaster; -MarkerDetector marker_detector; - -bool enableSwitched = false; -bool enabled = true; -bool output_frame_from_msg; -double max_frequency; -double marker_size; -double max_new_marker_error; -double max_track_error; -std::string cam_image_topic; -std::string cam_info_topic; -std::string output_frame; -int marker_resolution = 5; // default marker resolution -int marker_margin = 2; // default marker margin - - -//Debugging utility function +class IndividualMarkers : public rclcpp::Node +{ + private: + bool init=true; + Camera *cam; + cv_bridge::CvImagePtr cv_ptr_; + + rclcpp::Subscription::SharedPtr cloud_sub_; + rclcpp::Publisher::SharedPtr arMarkerPub_; + rclcpp::Publisher::SharedPtr rvizMarkerPub_; + rclcpp::Publisher::SharedPtr rvizMarkerPub2_; + + tf2_ros::TransformBroadcaster tf_broadcaster_; + tf2_ros::TransformListener tf_listener_; + tf2_ros::Buffer tf2_; + + ar_track_alvar_msgs::msg::AlvarMarkers arPoseMarkers_; + visualization_msgs::msg::Marker rvizMarker_; + + MarkerDetector marker_detector; + bool enableSwitched = false; + bool enabled = true; + bool output_frame_from_msg; + double max_frequency; + double marker_size; + double max_new_marker_error; + double max_track_error; + std::string cam_image_topic; + std::string cam_info_topic; + std::string output_frame; + int marker_resolution = 5; // default marker resolution + int marker_margin = 2; // default marker margin + + + + + public: + IndividualMarkers(int argc, char* argv[]):Node("marker_detect"), tf2_(this->get_clock()), tf_listener_(tf2_), tf_broadcaster_(this) + { + if(argc > 1) + { + RCLCPP_WARN(this->get_logger(), "Command line arguments are deprecated. Consider using ROS parameters and remappings."); + if(argc < 7) + { + std::cout << std::endl; + cout << "Not enough arguments provided." << endl; + cout << "Usage: ./individualMarkers " + << " [ ]"; + std::cout << std::endl; + exit(0); + } + // Get params from command line + marker_size = atof(argv[1]); + max_new_marker_error = atof(argv[2]); + max_track_error = atof(argv[3]); + cam_image_topic = argv[4]; + cam_info_topic = argv[5]; + output_frame = argv[6]; + + if (argc > 7) + { + max_frequency = atof(argv[7]); + this->declare_parameter("max_frequency", max_frequency); + } + if (argc > 8) + { + marker_resolution = atoi(argv[8]); + } + if (argc > 9) + { + marker_margin = atoi(argv[9]); + } + } + else + { + this->declare_parameter("marker_size", 10.0); + this->declare_parameter("max_new_marker_error", 0.08); + this->declare_parameter("max_track_error", 0.2); + this->declare_parameter("max_frequency", 8.0); + this->declare_parameter("marker_resolution", 5); + this->declare_parameter("marker_margin", 2); + this->declare_parameter("output_frame_from_msg", false); + + this->get_parameter("marker_size", marker_size); + this->get_parameter("max_new_marker_error", max_new_marker_error); + this->get_parameter("max_track_error", max_track_error); + this->get_parameter("max_frequency", max_frequency); + this->get_parameter("marker_resolution", marker_resolution); + this->get_parameter("marker_margin", marker_margin); + this->get_parameter("output_frame_from_msg", output_frame_from_msg); + + if (!output_frame_from_msg && !this->get_parameter("output_frame", output_frame)) + { + RCLCPP_ERROR(this->get_logger(), "Param 'output_frame' has to be set if the output frame is not " + "derived from the point cloud message."); + exit(0); + } + + // Camera input topics. Use remapping to map to your camera topics. + cam_image_topic = "camera_image"; + cam_info_topic = "camera_info"; + } + + marker_detector.SetMarkerSize(marker_size, marker_resolution, marker_margin); + cam = new Camera(cam_info_topic); + + + arMarkerPub_ = this->create_publisher ("ar_pose_marker", 0); + rvizMarkerPub_ = this->create_publisher ("visualization_marker", 0); + rvizMarkerPub2_ = this->create_publisher ("ARmarker_points", 0); + + + //Give tf a chance to catch up before the camera callback starts asking for transforms + // It will also reconfigure parameters for the first time, setting the default values + //TODO: come back to this, there's probably a better way to do this + rclcpp::Rate loop_rate(100); + loop_rate.sleep(); + + if (enabled == true) + { + // This always happens, as enable is true by default + RCLCPP_INFO(this->get_logger(), "Subscribing to image topic"); + + cloud_sub_ = this->create_subscription(cam_image_topic, 1, + std::bind(&IndividualMarkers::getPointCloudCallback, this, std::placeholders::_1)); + } + } + + + //Debugging utility function void draw3dPoints(ARCloud::Ptr cloud, string frame, int color, int id, double rad) { visualization_msgs::msg::Marker rvizMarker; rvizMarker.header.frame_id = frame; - rvizMarker.header.stamp = rclcpp::Time::now(); + rvizMarker.header.stamp = this->get_clock()->now(); rvizMarker.id = id; rvizMarker.ns = "3dpts"; @@ -124,7 +228,7 @@ void draw3dPoints(ARCloud::Ptr cloud, string frame, int color, int id, double ra rvizMarker.color.a = 1.0; } - gm::Point p; + gm::msg::Point p; for(int i=0; ipoints.size(); i++){ p.x = cloud->points[i].x; p.y = cloud->points[i].y; @@ -133,482 +237,431 @@ void draw3dPoints(ARCloud::Ptr cloud, string frame, int color, int id, double ra } rvizMarker.lifetime = rclcpp::Duration (1.0); - rvizMarkerPub2_.publish (rvizMarker); + rvizMarkerPub2_->publish (rvizMarker); } -void drawArrow(gm::Point start, tf::Matrix3x3 mat, string frame, int color, int id) -{ - visualization_msgs::msg::Marker rvizMarker; - - rvizMarker.header.frame_id = frame; - rvizMarker.header.stamp = rclcpp::Time::now(); - rvizMarker.id = id; - rvizMarker.ns = "arrow"; - - rvizMarker.scale.x = 0.01; - rvizMarker.scale.y = 0.01; - rvizMarker.scale.z = 0.1; - - rvizMarker.type = visualization_msgs::msg::Marker::ARROW; - rvizMarker.action = visualization_msgs::msg::Marker::ADD; + void drawArrow(gm::msg::Point start, tf2::Matrix3x3 mat, string frame, int color, int id) + { + visualization_msgs::msg::Marker rvizMarker; + + rvizMarker.header.frame_id = frame; + rvizMarker.header.stamp = this->get_clock()->now(); + rvizMarker.id = id; + rvizMarker.ns = "arrow"; + + rvizMarker.scale.x = 0.01; + rvizMarker.scale.y = 0.01; + rvizMarker.scale.z = 0.1; + + rvizMarker.type = visualization_msgs::msg::Marker::ARROW; + rvizMarker.action = visualization_msgs::msg::Marker::ADD; + + for(int i=0; i<3; i++){ + rvizMarker.points.clear(); + rvizMarker.points.push_back(start); + gm::msg::Point end; + end.x = start.x + mat[0][i]; + end.y = start.y + mat[1][i]; + end.z = start.z + mat[2][i]; + rvizMarker.points.push_back(end); + rvizMarker.id += 10*i; + rvizMarker.lifetime = rclcpp::Duration (1.0); + + if(color==1){ + rvizMarker.color.r = 1.0f; + rvizMarker.color.g = 0.0f; + rvizMarker.color.b = 0.0f; + rvizMarker.color.a = 1.0; + } + if(color==2){ + rvizMarker.color.r = 0.0f; + rvizMarker.color.g = 1.0f; + rvizMarker.color.b = 0.0f; + rvizMarker.color.a = 1.0; + } + if(color==3){ + rvizMarker.color.r = 0.0f; + rvizMarker.color.g = 0.0f; + rvizMarker.color.b = 1.0f; + rvizMarker.color.a = 1.0; + } + color += 1; - for(int i=0; i<3; i++){ - rvizMarker.points.clear(); - rvizMarker.points.push_back(start); - gm::Point end; - end.x = start.x + mat[0][i]; - end.y = start.y + mat[1][i]; - end.z = start.z + mat[2][i]; - rvizMarker.points.push_back(end); - rvizMarker.id += 10*i; - rvizMarker.lifetime = rclcpp::Duration (1.0); - - if(color==1){ - rvizMarker.color.r = 1.0f; - rvizMarker.color.g = 0.0f; - rvizMarker.color.b = 0.0f; - rvizMarker.color.a = 1.0; + rvizMarkerPub2_->publish (rvizMarker); } - if(color==2){ - rvizMarker.color.r = 0.0f; - rvizMarker.color.g = 1.0f; - rvizMarker.color.b = 0.0f; - rvizMarker.color.a = 1.0; - } - if(color==3){ - rvizMarker.color.r = 0.0f; - rvizMarker.color.g = 0.0f; - rvizMarker.color.b = 1.0f; - rvizMarker.color.a = 1.0; - } - color += 1; - - rvizMarkerPub2_.publish (rvizMarker); } -} - - -int PlaneFitPoseImprovement(int id, const ARCloud &corners_3D, ARCloud::Ptr selected_points, const ARCloud &cloud, Pose &p){ - ata::PlaneFitResult res = ata::fitPlane(selected_points); - gm::PoseStamped pose; - pose.header.stamp = pcl_conversions::fromPCL(cloud.header).stamp; - pose.header.frame_id = cloud.header.frame_id; - pose.pose.position = ata::centroid(*res.inliers); - - draw3dPoints(selected_points, cloud.header.frame_id, 1, id, 0.005); - - //Get 2 points that point forward in marker x direction - int i1,i2; - if(isnan(corners_3D[0].x) || isnan(corners_3D[0].y) || isnan(corners_3D[0].z) || - isnan(corners_3D[3].x) || isnan(corners_3D[3].y) || isnan(corners_3D[3].z)) + int PlaneFitPoseImprovement(int id, const ARCloud &corners_3D, ARCloud::Ptr selected_points, const ARCloud &cloud, Pose &p) + { + ata::PlaneFitResult res = ata::fitPlane(selected_points); + gm::msg::PoseStamped pose; + pose.header.stamp = pcl_conversions::fromPCL(cloud.header).stamp; + pose.header.frame_id = cloud.header.frame_id; + pose.pose.position = ata::centroid(*res.inliers); + + draw3dPoints(selected_points, cloud.header.frame_id, 1, id, 0.005); + + //Get 2 points that point forward in marker x direction + int i1,i2; + if(isnan(corners_3D[0].x) || isnan(corners_3D[0].y) || isnan(corners_3D[0].z) || + isnan(corners_3D[3].x) || isnan(corners_3D[3].y) || isnan(corners_3D[3].z)) { + if(isnan(corners_3D[1].x) || isnan(corners_3D[1].y) || isnan(corners_3D[1].z) || - isnan(corners_3D[2].x) || isnan(corners_3D[2].y) || isnan(corners_3D[2].z)) - { - return -1; - } - else{ - i1 = 1; - i2 = 2; + isnan(corners_3D[2].x) || isnan(corners_3D[2].y) || isnan(corners_3D[2].z)) + { + return -1; + } + else + { + i1 = 1; + i2 = 2; } } - else{ - i1 = 0; - i2 = 3; - } - - //Get 2 points the point forward in marker y direction - int i3,i4; - if(isnan(corners_3D[0].x) || isnan(corners_3D[0].y) || isnan(corners_3D[0].z) || - isnan(corners_3D[1].x) || isnan(corners_3D[1].y) || isnan(corners_3D[1].z)) + else { - if(isnan(corners_3D[3].x) || isnan(corners_3D[3].y) || isnan(corners_3D[3].z) || - isnan(corners_3D[2].x) || isnan(corners_3D[2].y) || isnan(corners_3D[2].z)) - { - return -1; - } - else{ - i3 = 2; - i4 = 3; - } + i1 = 0; + i2 = 3; } - else{ - i3 = 1; - i4 = 0; - } - ARCloud::Ptr orient_points(new ARCloud()); - orient_points->points.push_back(corners_3D[i1]); - draw3dPoints(orient_points, cloud.header.frame_id, 3, id+1000, 0.008); + //Get 2 points the point forward in marker y direction + int i3,i4; + if(isnan(corners_3D[0].x) || isnan(corners_3D[0].y) || isnan(corners_3D[0].z) || + isnan(corners_3D[1].x) || isnan(corners_3D[1].y) || isnan(corners_3D[1].z)) + { + if(isnan(corners_3D[3].x) || isnan(corners_3D[3].y) || isnan(corners_3D[3].z) || + isnan(corners_3D[2].x) || isnan(corners_3D[2].y) || isnan(corners_3D[2].z)) + { + return -1; + } + else + { + i3 = 2; + i4 = 3; + } + } + else + { + i3 = 1; + i4 = 0; + } - orient_points->clear(); - orient_points->points.push_back(corners_3D[i2]); - draw3dPoints(orient_points, cloud.header.frame_id, 2, id+2000, 0.008); + ARCloud::Ptr orient_points(new ARCloud()); + orient_points->points.push_back(corners_3D[i1]); + draw3dPoints(orient_points, cloud.header.frame_id, 3, id+1000, 0.008); - int succ; - succ = ata::extractOrientation(res.coeffs, corners_3D[i1], corners_3D[i2], corners_3D[i3], corners_3D[i4], pose.pose.orientation); - if(succ < 0) return -1; + orient_points->clear(); + orient_points->points.push_back(corners_3D[i2]); + draw3dPoints(orient_points, cloud.header.frame_id, 2, id+2000, 0.008); - tf::Matrix3x3 mat; - succ = ata::extractFrame(res.coeffs, corners_3D[i1], corners_3D[i2], corners_3D[i3], corners_3D[i4], mat); - if(succ < 0) return -1; + int succ; + succ = ata::extractOrientation(res.coeffs, corners_3D[i1], corners_3D[i2], corners_3D[i3], corners_3D[i4], pose.pose.orientation); + if(succ < 0) return -1; - drawArrow(pose.pose.position, mat, cloud.header.frame_id, 1, id); + tf2::Matrix3x3 mat; + succ = ata::extractFrame(res.coeffs, corners_3D[i1], corners_3D[i2], corners_3D[i3], corners_3D[i4], mat); + if(succ < 0) return -1; - p.translation[0] = pose.pose.position.x * 100.0; - p.translation[1] = pose.pose.position.y * 100.0; - p.translation[2] = pose.pose.position.z * 100.0; - p.quaternion[1] = pose.pose.orientation.x; - p.quaternion[2] = pose.pose.orientation.y; - p.quaternion[3] = pose.pose.orientation.z; - p.quaternion[0] = pose.pose.orientation.w; + drawArrow(pose.pose.position, mat, cloud.header.frame_id, 1, id); - return 0; -} + p.translation[0] = pose.pose.position.x * 100.0; + p.translation[1] = pose.pose.position.y * 100.0; + p.translation[2] = pose.pose.position.z * 100.0; + p.quaternion[1] = pose.pose.orientation.x; + p.quaternion[2] = pose.pose.orientation.y; + p.quaternion[3] = pose.pose.orientation.z; + p.quaternion[0] = pose.pose.orientation.w; + return 0; + } -void GetMarkerPoses(IplImage *image, ARCloud &cloud) { + void GetMarkerPoses(cv::Mat * image, ARCloud &cloud) + { - //Detect and track the markers - if (marker_detector.Detect(image, cam, true, false, max_new_marker_error, - max_track_error, CVSEQ, true)) + //Detect and track the markers + if (marker_detector.Detect(image, cam, true, false, max_new_marker_error, max_track_error, CVSEQ, true)) { - ROS_DEBUG_STREAM("--------------------------"); - for (size_t i=0; isize(); i++) - { - vector > pixels; - Marker *m = &((*marker_detector.markers)[i]); - int id = m->GetId(); - ROS_DEBUG_STREAM("******* ID: " << id); - - int resol = m->GetRes(); - int ori = m->ros_orientation; - - PointDouble pt1, pt2, pt3, pt4; - pt4 = m->ros_marker_points_img[0]; - pt3 = m->ros_marker_points_img[resol-1]; - pt1 = m->ros_marker_points_img[(resol*resol)-resol]; - pt2 = m->ros_marker_points_img[(resol*resol)-1]; - - m->ros_corners_3D[0] = cloud(pt1.x, pt1.y); - m->ros_corners_3D[1] = cloud(pt2.x, pt2.y); - m->ros_corners_3D[2] = cloud(pt3.x, pt3.y); - m->ros_corners_3D[3] = cloud(pt4.x, pt4.y); - - if(ori >= 0 && ori < 4){ - if(ori != 0){ - std::rotate(m->ros_corners_3D.begin(), m->ros_corners_3D.begin() + ori, m->ros_corners_3D.end()); - } - } - else - RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "FindMarkerBundles: Bad Orientation: %i for ID: %i", ori, id); - - //Get the 3D marker points - BOOST_FOREACH (const PointDouble& p, m->ros_marker_points_img) - pixels.push_back(cv::Point(p.x, p.y)); - ARCloud::Ptr selected_points = ata::filterCloud(cloud, pixels); - - //Use the kinect data to find a plane and pose for the marker - int ret = PlaneFitPoseImprovement(i, m->ros_corners_3D, selected_points, cloud, m->pose); - } - } -} - + + std::cout << "--------------------------" << std::endl; + for (size_t i=0; isize(); i++) + { + vector > pixels; + Marker *m = &((*marker_detector.markers)[i]); + int id = m->GetId(); + std::cout<< "******* ID: " << id << std::endl; + + int resol = m->GetRes(); + int ori = m->ros_orientation; + + PointDouble pt1, pt2, pt3, pt4; + pt4 = m->ros_marker_points_img[0]; + pt3 = m->ros_marker_points_img[resol-1]; + pt1 = m->ros_marker_points_img[(resol*resol)-resol]; + pt2 = m->ros_marker_points_img[(resol*resol)-1]; + + m->ros_corners_3D[0] = cloud(pt1.x, pt1.y); + m->ros_corners_3D[1] = cloud(pt2.x, pt2.y); + m->ros_corners_3D[2] = cloud(pt3.x, pt3.y); + m->ros_corners_3D[3] = cloud(pt4.x, pt4.y); + + if(ori >= 0 && ori < 4) + { + if(ori != 0) + { + std::rotate(m->ros_corners_3D.begin(), m->ros_corners_3D.begin() + ori, m->ros_corners_3D.end()); + } + } + else + { + RCLCPP_ERROR(this->get_logger(), "FindMarkerBundles: Bad Orientation: %i for ID: %i", ori, id); + + //Get the 3D marker points + BOOST_FOREACH (const PointDouble& p, m->ros_marker_points_img) + { + pixels.push_back(cv::Point(p.x, p.y)); + } + ARCloud::Ptr selected_points = ata::filterCloud(cloud, pixels); + + //Use the kinect data to find a plane and pose for the marker + int ret = PlaneFitPoseImprovement(i, m->ros_corners_3D, selected_points, cloud, m->pose); + } + } + }} + void getPointCloudCallback (const sensor_msgs::msg::PointCloud2::SharedPtr msg) + { + sensor_msgs::msg::Image image_msg; -void getPointCloudCallback (const sensor_msgs::msg::PointCloud2::ConstSharedPtr &msg) -{ - sensor_msgs::msg::ImagePtr image_msg(new sensor_msgs::msg::Image); + // If desired, use the frame in the message's header. + if (output_frame_from_msg) + output_frame = msg->header.frame_id; - // If desired, use the frame in the message's header. - if (output_frame_from_msg) - output_frame = msg->header.frame_id; + //If we've already gotten the cam info, then go ahead + if(cam->getCamInfo_){ + //Convert cloud to PCL + ARCloud cloud; + pcl::fromROSMsg(*msg, cloud); + //Get an OpenCV image from the cloud + pcl::toROSMsg (*msg, image_msg); - //If we've already gotten the cam info, then go ahead - if(cam->getCamInfo_){ - //Convert cloud to PCL - ARCloud cloud; - pcl::fromROSMsg(*msg, cloud); - //Get an OpenCV image from the cloud - pcl::toROSMsg (*msg, *image_msg); - image_msg->header.stamp = msg->header.stamp; - image_msg->header.frame_id = msg->header.frame_id; + image_msg.header.stamp = msg->header.stamp; + image_msg.header.frame_id = msg->header.frame_id; - //Convert the image - cv_ptr_ = cv_bridge::toCvCopy(image_msg, sensor_msgs::image_encodings::BGR8); + //Convert the image + cv_ptr_ = cv_bridge::toCvCopy(image_msg, sensor_msgs::image_encodings::BGR8); - //Get the estimated pose of the main markers by using all the markers in each bundle + //Get the estimated pose of the main markers by using all the markers in each bundle - // GetMultiMarkersPoses expects an IplImage*, but as of ros groovy, cv_bridge gives - // us a cv::Mat. I'm too lazy to change to cv::Mat throughout right now, so I - // do this conversion here -jbinney - IplImage ipl_image = cv_ptr_->image; + // GetMultiMarkersPoses expects an IplImage*, but as of ros groovy, cv_bridge gives + // us a cv::Mat. I'm too lazy to change to cv::Mat throughout right now, so I + // do this conversion here -jbinney + cv::Mat ipl_image = cv_ptr_->image; - //Use the kinect to improve the pose - Pose ret_pose; - GetMarkerPoses(&ipl_image, cloud); + //Use the kinect to improve the pose + Pose ret_pose; + GetMarkerPoses(&ipl_image, cloud); - tf2::StampedTransform CamToOutput; - if (image_msg->header.frame_id == output_frame) { - CamToOutput.setIdentity(); - } else { + geometry_msgs::msg::TransformStamped CamToOutput; try { - tf_listener->waitForTransform(output_frame, image_msg->header.frame_id, - image_msg->header.stamp, rclcpp::Duration(1.0)); - tf_listener->lookupTransform(output_frame, image_msg->header.frame_id, - image_msg->header.stamp, CamToOutput); - } catch (tf::TransformException ex) { - RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "%s",ex.what()); + tf2_.canTransform(output_frame, image_msg.header.frame_id, image_msg.header.stamp,rclcpp::Duration(1.0)); + CamToOutput = tf2_.lookupTransform(output_frame, image_msg.header.frame_id, image_msg.header.stamp, rclcpp::Duration(1.0)); + } catch (tf2::TransformException ex) { + RCLCPP_ERROR(this->get_logger(), "%s",ex.what()); } - } - - try{ - + try + { arPoseMarkers_.markers.clear (); for (size_t i=0; isize(); i++) - { - //Get the pose relative to the camera - int id = (*(marker_detector.markers))[i].GetId(); - Pose p = (*(marker_detector.markers))[i].pose; - - double px = p.translation[0]/100.0; - double py = p.translation[1]/100.0; - double pz = p.translation[2]/100.0; - double qx = p.quaternion[1]; - double qy = p.quaternion[2]; - double qz = p.quaternion[3]; - double qw = p.quaternion[0]; - - tf::Quaternion rotation (qx,qy,qz,qw); - tf::Vector3 origin (px,py,pz); - tf::Transform t (rotation, origin); - tf::Vector3 markerOrigin (0, 0, 0); - tf::Transform m (tf::Quaternion::getIdentity (), markerOrigin); - tf::Transform markerPose = t * m; // marker pose in the camera frame - - //Publish the transform from the camera to the marker - std::string markerFrame = "ar_marker_"; - std::stringstream out; - out << id; - std::string id_string = out.str(); - markerFrame += id_string; - tf2::StampedTransform camToMarker (t, image_msg->header.stamp, image_msg->header.frame_id, markerFrame.c_str()); - tf_broadcaster->sendTransform(camToMarker); - - //Create the rviz visualization messages - tf::poseTFToMsg (markerPose, rvizMarker_.pose); - rvizMarker_.header.frame_id = image_msg->header.frame_id; - rvizMarker_.header.stamp = image_msg->header.stamp; - rvizMarker_.id = id; - - rvizMarker_.scale.x = 1.0 * marker_size/100.0; - rvizMarker_.scale.y = 1.0 * marker_size/100.0; - rvizMarker_.scale.z = 0.2 * marker_size/100.0; - rvizMarker_.ns = "basic_shapes"; - rvizMarker_.type = visualization_msgs::msg::Marker::CUBE; - rvizMarker_.action = visualization_msgs::msg::Marker::ADD; - switch (id) - { - case 0: - rvizMarker_.color.r = 0.0f; - rvizMarker_.color.g = 0.0f; - rvizMarker_.color.b = 1.0f; - rvizMarker_.color.a = 1.0; - break; - case 1: - rvizMarker_.color.r = 1.0f; - rvizMarker_.color.g = 0.0f; - rvizMarker_.color.b = 0.0f; - rvizMarker_.color.a = 1.0; - break; - case 2: - rvizMarker_.color.r = 0.0f; - rvizMarker_.color.g = 1.0f; - rvizMarker_.color.b = 0.0f; - rvizMarker_.color.a = 1.0; - break; - case 3: - rvizMarker_.color.r = 0.0f; - rvizMarker_.color.g = 0.5f; - rvizMarker_.color.b = 0.5f; - rvizMarker_.color.a = 1.0; - break; - case 4: - rvizMarker_.color.r = 0.5f; - rvizMarker_.color.g = 0.5f; - rvizMarker_.color.b = 0.0; - rvizMarker_.color.a = 1.0; - break; - default: - rvizMarker_.color.r = 0.5f; - rvizMarker_.color.g = 0.0f; - rvizMarker_.color.b = 0.5f; - rvizMarker_.color.a = 1.0; - break; - } - rvizMarker_.lifetime = rclcpp::Duration (1.0); - rvizMarkerPub_.publish (rvizMarker_); - - //Get the pose of the tag in the camera frame, then the output frame (usually torso) - tf::Transform tagPoseOutput = CamToOutput * markerPose; - - //Create the pose marker messages - ar_track_alvar_msgs::AlvarMarker ar_pose_marker; - tf::poseTFToMsg (tagPoseOutput, ar_pose_marker.pose.pose); - ar_pose_marker.header.frame_id = output_frame; - ar_pose_marker.header.stamp = image_msg->header.stamp; - ar_pose_marker.id = id; - arPoseMarkers_.markers.push_back (ar_pose_marker); - } - arPoseMarkers_.header.stamp = image_msg->header.stamp; - arMarkerPub_.publish (arPoseMarkers_); + { + //Get the pose relative to the camera + int id = (*(marker_detector.markers))[i].GetId(); + Pose p = (*(marker_detector.markers))[i].pose; + + double px = p.translation[0]/100.0; + double py = p.translation[1]/100.0; + double pz = p.translation[2]/100.0; + double qx = p.quaternion[1]; + double qy = p.quaternion[2]; + double qz = p.quaternion[3]; + double qw = p.quaternion[0]; + + tf2::Quaternion rotation (qx,qy,qz,qw); + tf2::Vector3 origin (px,py,pz); + tf2::Transform t (rotation, origin); + tf2::Vector3 markerOrigin (0, 0, 0); + tf2::Transform m (tf2::Quaternion::getIdentity (), markerOrigin); + tf2::Transform markerPose = t * m; // marker pose in the camera frame + + //Publish the transform from the camera to the marker + std::string markerFrame = "ar_marker_"; + std::stringstream out; + out << id; + std::string id_string = out.str(); + markerFrame += id_string; + geometry_msgs::msg::TransformStamped camToMarker; + camToMarker.header.stamp = image_msg.header.stamp; + camToMarker.header.frame_id = image_msg.header.frame_id; + camToMarker.child_frame_id = markerFrame; + + geometry_msgs::msg::Vector3 trans; + trans.x = px; + trans.y = py; + trans.z = pz; + geometry_msgs::msg::Quaternion rot; + rot.x = qx; + rot.y = qy; + rot.z = qz; + rot.w = qw; + + + camToMarker.transform.translation = trans; + camToMarker.transform.rotation = rot; + tf_broadcaster_.sendTransform(camToMarker); + + + //Create the rviz visualization messages + rvizMarker_.pose.position.x = markerPose.getOrigin().getX(); + rvizMarker_.pose.position.y = markerPose.getOrigin().getY(); + rvizMarker_.pose.position.z = markerPose.getOrigin().getZ(); + rvizMarker_.pose.orientation.x = markerPose.getRotation().getX(); + rvizMarker_.pose.orientation.y = markerPose.getRotation().getY(); + rvizMarker_.pose.orientation.z = markerPose.getRotation().getZ(); + rvizMarker_.pose.orientation.w = markerPose.getRotation().getW(); + + rvizMarker_.header.frame_id = image_msg.header.frame_id; + rvizMarker_.header.stamp = image_msg.header.stamp; + rvizMarker_.id = id; + + rvizMarker_.scale.x = 1.0 * marker_size/100.0; + rvizMarker_.scale.y = 1.0 * marker_size/100.0; + rvizMarker_.scale.z = 0.2 * marker_size/100.0; + rvizMarker_.ns = "basic_shapes"; + rvizMarker_.type = visualization_msgs::msg::Marker::CUBE; + rvizMarker_.action = visualization_msgs::msg::Marker::ADD; + switch (id) + { + case 0: + rvizMarker_.color.r = 0.0f; + rvizMarker_.color.g = 0.0f; + rvizMarker_.color.b = 1.0f; + rvizMarker_.color.a = 1.0; + break; + case 1: + rvizMarker_.color.r = 1.0f; + rvizMarker_.color.g = 0.0f; + rvizMarker_.color.b = 0.0f; + rvizMarker_.color.a = 1.0; + break; + case 2: + rvizMarker_.color.r = 0.0f; + rvizMarker_.color.g = 1.0f; + rvizMarker_.color.b = 0.0f; + rvizMarker_.color.a = 1.0; + break; + case 3: + rvizMarker_.color.r = 0.0f; + rvizMarker_.color.g = 0.5f; + rvizMarker_.color.b = 0.5f; + rvizMarker_.color.a = 1.0; + break; + case 4: + rvizMarker_.color.r = 0.5f; + rvizMarker_.color.g = 0.5f; + rvizMarker_.color.b = 0.0; + rvizMarker_.color.a = 1.0; + break; + default: + rvizMarker_.color.r = 0.5f; + rvizMarker_.color.g = 0.0f; + rvizMarker_.color.b = 0.5f; + rvizMarker_.color.a = 1.0; + break; + } + + rvizMarker_.lifetime = rclcpp::Duration (1.0); + rvizMarkerPub_->publish (rvizMarker_); + + //Get the pose of the tag in the camera frame, then the output frame (usually torso) + + + + // tf2::Transform tagPoseOutput = CamToOutput * markerPose; + //Create the pose marker messages + ar_track_alvar_msgs::msg::AlvarMarker ar_pose_marker; + tf2::doTransform(ar_pose_marker.pose, ar_pose_marker.pose,CamToOutput); + ar_pose_marker.header.frame_id = output_frame; + ar_pose_marker.header.stamp = image_msg.header.stamp; + ar_pose_marker.id = id; + arPoseMarkers_.markers.push_back (ar_pose_marker); } - catch (cv_bridge::Exception& e){ - ROS_ERROR ("Could not convert from '%s' to 'rgb8'.", image_msg->encoding.c_str ()); + arPoseMarkers_.header.stamp = image_msg.header.stamp; + arMarkerPub_->publish (arPoseMarkers_); + } + catch (cv_bridge::Exception& e) + { + RCLCPP_ERROR(this->get_logger(),"Could not convert from '%s' to 'rgb8'.", image_msg.encoding.c_str ()); } } -} - -void configCallback(ar_track_alvar::ParamsConfig &config, uint32_t level) -{ - RCLCPP_INFO(rclcpp::get_logger("ArTrackAlvar"), "AR tracker reconfigured: %s %.2f %.2f %.2f %.2f", config.enabled ? "ENABLED" : "DISABLED", - config.max_frequency, config.marker_size, config.max_new_marker_error, config.max_track_error); - - enableSwitched = enabled != config.enabled; - - enabled = config.enabled; - max_frequency = config.max_frequency; - marker_size = config.marker_size; - max_new_marker_error = config.max_new_marker_error; - max_track_error = config.max_track_error; -} + } +}; -int main(int argc, char *argv[]) -{ - ros::init (argc, argv, "marker_detect"); - rclcpp::Node n, pn("~"); - - if(argc > 1) { - RCLCPP_WARN(rclcpp::get_logger("ArTrackAlvar"), "Command line arguments are deprecated. Consider using ROS parameters and remappings."); - - if(argc < 7){ - std::cout << std::endl; - cout << "Not enough arguments provided." << endl; - cout << "Usage: ./individualMarkers " - << " [ ]"; - std::cout << std::endl; - return 0; - } - // Get params from command line - marker_size = atof(argv[1]); - max_new_marker_error = atof(argv[2]); - max_track_error = atof(argv[3]); - cam_image_topic = argv[4]; - cam_info_topic = argv[5]; - output_frame = argv[6]; - - if (argc > 7) { - max_frequency = atof(argv[7]); - pn.setParam("max_frequency", max_frequency); - } - if (argc > 8) - marker_resolution = atoi(argv[8]); - if (argc > 9) - marker_margin = atoi(argv[9]); - - } else { - // Get params from ros param server. - pn.param("marker_size", marker_size, 10.0); - pn.param("max_new_marker_error", max_new_marker_error, 0.08); - pn.param("max_track_error", max_track_error, 0.2); - pn.param("max_frequency", max_frequency, 8.0); - pn.setParam("max_frequency", max_frequency); // in case it was not set. - pn.param("marker_resolution", marker_resolution, 5); - pn.param("marker_margin", marker_margin, 2); - pn.param("output_frame_from_msg", output_frame_from_msg, false); - - if (!output_frame_from_msg && !pn.getParam("output_frame", output_frame)) { - RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "Param 'output_frame' has to be set if the output frame is not " - "derived from the point cloud message."); - exit(EXIT_FAILURE); - } - // Camera input topics. Use remapping to map to your camera topics. - cam_image_topic = "camera_image"; - cam_info_topic = "camera_info"; - } - // Set dynamically configurable parameters so they don't get replaced by default values - pn.setParam("marker_size", marker_size); - pn.setParam("max_new_marker_error", max_new_marker_error); - pn.setParam("max_track_error", max_track_error); - marker_detector.SetMarkerSize(marker_size, marker_resolution, marker_margin); - cam = new Camera(n, cam_info_topic); - if (!output_frame_from_msg) { - // TF listener is only required when output frame != camera frame. - tf_listener = new tf2_ros::TransformListener(n); - } - tf_broadcaster = new tf::TransformBroadcaster(); - arMarkerPub_ = n.advertise < ar_track_alvar_msgs::AlvarMarkers > ("ar_pose_marker", 0); - rvizMarkerPub_ = n.advertise < visualization_msgs::msg::Marker > ("visualization_marker", 0); - rvizMarkerPub2_ = n.advertise < visualization_msgs::msg::Marker > ("ARmarker_points", 0); - // Prepare dynamic reconfiguration - dynamic_reconfigure::Server < ar_track_alvar::ParamsConfig > server; - dynamic_reconfigure::Server::CallbackType f; - f = boost::bind(&configCallback, _1, _2); - server.setCallback(f); - //Give tf a chance to catch up before the camera callback starts asking for transforms - // It will also reconfigure parameters for the first time, setting the default values - rclcpp::Duration(1.0).sleep(); - rclcpp::spin_some(node); - if (enabled == true) - { - // This always happens, as enable is true by default - RCLCPP_INFO(rclcpp::get_logger("ArTrackAlvar"), "Subscribing to image topic"); - cloud_sub_ = n.subscribe(cam_image_topic, 1, &getPointCloudCallback); - } - // Run at the configured rate, discarding pointcloud msgs if necessary - rclcpp::Rate rate(max_frequency); - while (rclcpp::ok()) - { - rclcpp::spin_some(node); - rate.sleep(); - if (std::abs((rate.expectedCycleTime() - rclcpp::Duration(1.0 / max_frequency)).toSec()) > 0.001) - { - // Change rate dynamically; if must be above 0, as 0 will provoke a segfault on next spinOnce - RCLCPP_DEBUG(rclcpp::get_logger("ArTrackAlvar"), "Changing frequency from %.2f to %.2f", 1.0 / rate.expectedCycleTime().toSec(), max_frequency); - rate = rclcpp::Rate(max_frequency); - } - if (enableSwitched == true) - { - // Enable/disable switch: subscribe/unsubscribe to make use of pointcloud processing nodelet - // lazy publishing policy; in CPU-scarce computer as TurtleBot's laptop this is a huge saving - if (enabled == false) - cloud_sub_.shutdown(); - else - cloud_sub_ = n.subscribe(cam_image_topic, 1, &getPointCloudCallback); - enableSwitched = false; - } - } +int main(int argc, char *argv[]) +{ + rclcpp::init(argc, argv); + rclcpp::spin(std::make_shared(argc,argv)); + rclcpp::shutdown(); return 0; + // rclcpp::Node n, pn("~"); + + + // // Run at the configured rate, discarding pointcloud msgs if necessary + // rclcpp::Rate rate(max_frequency); + + // while (rclcpp::ok()) + // { + // rclcpp::spin_some(node); + // rate.sleep(); + + // if (std::abs((rate.expectedCycleTime() - rclcpp::Duration(1.0 / max_frequency)).toSec()) > 0.001) + // { + // // Change rate dynamically; if must be above 0, as 0 will provoke a segfault on next spinOnce + // RCLCPP_DEBUG(rclcpp::get_logger("ArTrackAlvar"), "Changing frequency from %.2f to %.2f", 1.0 / rate.expectedCycleTime().toSec(), max_frequency); + // rate = rclcpp::Rate(max_frequency); + // } + + // if (enableSwitched == true) + // { + // // Enable/disable switch: subscribe/unsubscribe to make use of pointcloud processing nodelet + // // lazy publishing policy; in CPU-scarce computer as TurtleBot's laptop this is a huge saving + // if (enabled == false) + // cloud_sub_.shutdown(); + // else + // cloud_sub_ = n.subscribe(cam_image_topic, 1, &getPointCloudCallback); + + // enableSwitched = false; + // } + // } + + // return 0; } diff --git a/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp b/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp index 56f46df..7d9627e 100644 --- a/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp +++ b/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp @@ -47,7 +47,7 @@ #include #include // #include -#include +// #include using namespace alvar; using namespace std; diff --git a/ar_track_alvar/package.xml b/ar_track_alvar/package.xml index 65e1674..e56a1df 100644 --- a/ar_track_alvar/package.xml +++ b/ar_track_alvar/package.xml @@ -24,6 +24,7 @@ This package is a ROS wrapper for Alvar, an open source AR tag tracking library. std_msgs tf2 tf2_ros +tf2_geometry_msgs tinyxml2 visualization_msgs libopencv-dev diff --git a/ar_track_alvar/src/Camera.cpp b/ar_track_alvar/src/Camera.cpp index 1d05c13..3fe016c 100644 --- a/ar_track_alvar/src/Camera.cpp +++ b/ar_track_alvar/src/Camera.cpp @@ -180,13 +180,13 @@ void Camera::SetSimpleCalib(int _x_res, int _y_res, double f_fac) } bool Camera::LoadCalibXML(const char *calibfile) { - TiXmlDocument document; + tinyxml2::XMLDocument document; if (!document.LoadFile(calibfile)) return false; - TiXmlElement *xml_root = document.RootElement(); + tinyxml2::XMLElement *xml_root = document.RootElement(); return - xml_root->QueryIntAttribute("width", &calib_x_res) == TIXML_SUCCESS && - xml_root->QueryIntAttribute("height", &calib_y_res) == TIXML_SUCCESS && + xml_root->QueryIntAttribute("width", &calib_x_res) == EXIT_SUCCESS && + xml_root->QueryIntAttribute("height", &calib_y_res) == EXIT_SUCCESS && FileFormatUtils::parseXMLMatrix(xml_root->FirstChildElement("intrinsic_matrix"), &calib_K) && FileFormatUtils::parseXMLMatrix(xml_root->FirstChildElement("distortion"), &calib_D); } @@ -336,10 +336,13 @@ bool Camera::SetCalib(const char *calibfile, int _x_res, int _y_res, FILE_FORMAT } bool Camera::SaveCalibXML(const char *calibfile) { - TiXmlDocument document; - document.LinkEndChild(new TiXmlDeclaration("1.0", "UTF-8", "no")); - document.LinkEndChild(new TiXmlElement("camera")); - TiXmlElement *xml_root = document.RootElement(); + tinyxml2::XMLDocument document; + tinyxml2::XMLDeclaration* declaration=document.NewDeclaration(); + document.InsertFirstChild(declaration); + tinyxml2::XMLElement * temp; + temp->SetName("Camera"); + document.LinkEndChild(temp); + tinyxml2::XMLElement *xml_root = document.RootElement(); xml_root->SetAttribute("width", calib_x_res); xml_root->SetAttribute("height", calib_y_res); xml_root->LinkEndChild(FileFormatUtils::createXMLMatrix("intrinsic_matrix", &calib_K)); diff --git a/ar_track_alvar/src/FileFormatUtils.cpp b/ar_track_alvar/src/FileFormatUtils.cpp index 5444546..4b597a9 100644 --- a/ar_track_alvar/src/FileFormatUtils.cpp +++ b/ar_track_alvar/src/FileFormatUtils.cpp @@ -28,19 +28,19 @@ namespace alvar { - bool FileFormatUtils::decodeXMLMatrix(const TiXmlElement *xml_matrix, int &type, int &rows, int &cols) { + bool FileFormatUtils::decodeXMLMatrix(const tinyxml2::XMLElement *xml_matrix, int &type, int &rows, int &cols) { const char * xml_type = xml_matrix->Attribute("type"); if (strcmp("CV_32F", xml_type) == 0) type = CV_32F; else if (strcmp("CV_64F", xml_type) == 0) type = CV_64F; else return false; - if (xml_matrix->QueryIntAttribute("rows", &rows) != TIXML_SUCCESS) return false; - if (xml_matrix->QueryIntAttribute("cols", &cols) != TIXML_SUCCESS) return false; + if (xml_matrix->QueryIntAttribute("rows", &rows) != EXIT_SUCCESS) return false; + if (xml_matrix->QueryIntAttribute("cols", &cols) != EXIT_SUCCESS) return false; return true; } - CvMat* FileFormatUtils::allocateXMLMatrix(const TiXmlElement *xml_matrix) { + CvMat* FileFormatUtils::allocateXMLMatrix(const tinyxml2::XMLElement *xml_matrix) { if (!xml_matrix) return NULL; int type, rows, cols; @@ -49,7 +49,7 @@ namespace alvar { return cvCreateMat(rows, cols, type); } - bool FileFormatUtils::parseXMLMatrix(const TiXmlElement *xml_matrix, CvMat *matrix) { + bool FileFormatUtils::parseXMLMatrix(const tinyxml2::XMLElement *xml_matrix, CvMat *matrix) { if (!xml_matrix || !matrix) return false; int type, rows, cols; @@ -59,23 +59,24 @@ namespace alvar { if (rows != matrix->rows) return false; if (cols != matrix->cols) return false; - const TiXmlElement *xml_data = xml_matrix->FirstChildElement("data"); + const tinyxml2::XMLElement *xml_data = xml_matrix->FirstChildElement("data"); for (int r = 0; r < matrix->rows; ++r) { for (int c = 0; c < matrix->cols; ++c) { if (!xml_data) return false; double value = atof(xml_data->GetText()); cvSetReal2D(matrix, r, c, value); - xml_data = (const TiXmlElement *) xml_data->NextSibling("data"); + xml_data = (const tinyxml2::XMLElement *) xml_data->NextSiblingElement("data"); } } return true; } - TiXmlElement* FileFormatUtils::createXMLMatrix(const char* element_name, const CvMat *matrix) { + tinyxml2::XMLElement * FileFormatUtils::createXMLMatrix(const char* element_name, const CvMat *matrix) { if (!matrix) return NULL; - TiXmlElement* xml_matrix = new TiXmlElement(element_name); + tinyxml2::XMLElement * xml_matrix; + xml_matrix->SetName(element_name); int precision; if (cvGetElemType(matrix) == CV_32F) { xml_matrix->SetAttribute("type", "CV_32F"); @@ -86,7 +87,7 @@ namespace alvar { precision = std::numeric_limits::digits10 + 2; } else { - delete xml_matrix; + //delete xml_matrix; return NULL; } @@ -95,12 +96,18 @@ namespace alvar { for (int r = 0; r < matrix->rows; ++r) { for (int c = 0; c < matrix->cols; ++c) { - TiXmlElement *xml_data = new TiXmlElement("data"); + tinyxml2::XMLElement *xml_data; + xml_data->SetName("data"); + + + xml_matrix->LinkEndChild(xml_data); std::stringstream ss; ss.precision(precision); ss<LinkEndChild(new TiXmlText(ss.str().c_str())); + + + xml_data->SetText(ss.str().c_str()); } } return xml_matrix; diff --git a/ar_track_alvar/src/MultiMarker.cpp b/ar_track_alvar/src/MultiMarker.cpp index 7158faf..dbd275f 100644 --- a/ar_track_alvar/src/MultiMarker.cpp +++ b/ar_track_alvar/src/MultiMarker.cpp @@ -54,28 +54,36 @@ void MultiMarker::Reset() } bool MultiMarker::SaveXML(const char* fname) { - TiXmlDocument document; - document.LinkEndChild(new TiXmlDeclaration("1.0", "UTF-8", "no")); - document.LinkEndChild(new TiXmlElement("multimarker")); - TiXmlElement *xml_root = document.RootElement(); + tinyxml2::XMLDocument document; + tinyxml2::XMLDeclaration* declaration=document.NewDeclaration(); + document.InsertFirstChild(declaration); + tinyxml2::XMLElement * temp; + temp->SetName("multimarker"); + document.LinkEndChild(temp); + tinyxml2::XMLElement *xml_root = document.RootElement(); int n_markers = marker_indices.size(); xml_root->SetAttribute("markers", n_markers); for(int i = 0; i < n_markers; ++i) { - TiXmlElement *xml_marker = new TiXmlElement("marker"); + tinyxml2::XMLElement *xml_marker; + xml_marker->SetName("marker"); + + + xml_root->LinkEndChild(xml_marker); xml_marker->SetAttribute("index", marker_indices[i]); xml_marker->SetAttribute("status", marker_status[i]); for(int j = 0; j < 4; ++j) { - TiXmlElement *xml_corner = new TiXmlElement("corner"); + tinyxml2::XMLElement *xml_corner; + xml_corner->SetName("corner"); xml_marker->LinkEndChild(xml_corner); cv::Point3f X = pointcloud[pointcloud_index(marker_indices[i], j)]; - xml_corner->SetDoubleAttribute("x", X.x); - xml_corner->SetDoubleAttribute("y", X.y); - xml_corner->SetDoubleAttribute("z", X.z); + xml_corner->DoubleAttribute("x", X.x); + xml_corner->DoubleAttribute("y", X.y); + xml_corner->DoubleAttribute("z", X.z); } } return document.SaveFile(fname); @@ -125,42 +133,42 @@ bool MultiMarker::Save(const char* fname, FILE_FORMAT format) { } bool MultiMarker::LoadXML(const char* fname) { - TiXmlDocument document; + tinyxml2::XMLDocument document; if (!document.LoadFile(fname)) return false; - TiXmlElement *xml_root = document.RootElement(); + tinyxml2::XMLElement *xml_root = document.RootElement(); int n_markers; - if (xml_root->QueryIntAttribute("markers", &n_markers) != TIXML_SUCCESS) return false; + if (xml_root->QueryIntAttribute("markers", &n_markers) != EXIT_SUCCESS) return false; pointcloud.clear(); marker_indices.resize(n_markers); marker_status.resize(n_markers); - TiXmlElement *xml_marker = xml_root->FirstChildElement("marker"); + tinyxml2::XMLElement *xml_marker = xml_root->FirstChildElement("marker"); for(int i = 0; i < n_markers; ++i) { if (!xml_marker) return false; int index, status; - if (xml_marker->QueryIntAttribute("index", &index) != TIXML_SUCCESS) return false; - if (xml_marker->QueryIntAttribute("status", &status) != TIXML_SUCCESS) return false; + if (xml_marker->QueryIntAttribute("index", &index) != EXIT_SUCCESS) return false; + if (xml_marker->QueryIntAttribute("status", &status) != EXIT_SUCCESS) return false; marker_indices[i] = index; marker_status[i] = status; if(i==0) master_id = index; - TiXmlElement *xml_corner = xml_marker->FirstChildElement("corner"); + tinyxml2::XMLElement *xml_corner = xml_marker->FirstChildElement("corner"); for(int j = 0; j < 4; ++j) { if (!xml_corner) return false; cv::Point3f X; - if (xml_corner->QueryDoubleAttribute("x", (double *)&X.x) != TIXML_SUCCESS) return false; - if (xml_corner->QueryDoubleAttribute("y", (double *)&X.y) != TIXML_SUCCESS) return false; - if (xml_corner->QueryDoubleAttribute("z", (double *)&X.z) != TIXML_SUCCESS) return false; + if (xml_corner->QueryDoubleAttribute("x", (double *)&X.x) != EXIT_SUCCESS) return false; + if (xml_corner->QueryDoubleAttribute("y", (double *)&X.y) != EXIT_SUCCESS) return false; + if (xml_corner->QueryDoubleAttribute("z", (double *)&X.z) != EXIT_SUCCESS) return false; pointcloud[pointcloud_index(marker_indices[i], j)] = X; - xml_corner = (TiXmlElement*)xml_corner->NextSibling("corner"); + xml_corner = (tinyxml2::XMLElement*)xml_corner->NextSiblingElement("corner"); } - xml_marker = (TiXmlElement*)xml_marker->NextSibling("marker"); + xml_marker = (tinyxml2::XMLElement*)xml_marker->NextSiblingElement("marker"); } return true; } diff --git a/ar_track_alvar/src/Util.cpp b/ar_track_alvar/src/Util.cpp index e0b4c37..4734fe1 100644 --- a/ar_track_alvar/src/Util.cpp +++ b/ar_track_alvar/src/Util.cpp @@ -355,21 +355,23 @@ int HistogramSubpixel::GetMax(double *dim0, double *dim1, double *dim2) { } struct SerializationFormatterXml { - TiXmlDocument document; - TiXmlElement *xml_current; + tinyxml2::XMLDocument document; + tinyxml2::XMLElement *xml_current; SerializationFormatterXml() : xml_current(0) {} }; bool Serialization::Output() { SerializationFormatterXml *xml = (SerializationFormatterXml *)formatter_handle; if (filename.size() > 0) { - //xml->document.LinkEndChild(new TiXmlDeclaration("1.0", "UTF-8", "no")); - xml->document.InsertBeforeChild (xml->document.RootElement(), TiXmlDeclaration("1.0", "UTF-8", "no")); + tinyxml2::XMLDeclaration* declaration=xml->document.NewDeclaration(); + xml->document.InsertFirstChild(declaration); xml->document.SaveFile(filename.c_str()); } else { - const TiXmlNode *node = (xml->xml_current ? xml->xml_current : xml->document.RootElement()); - std::basic_ostream *os = dynamic_cast *>(stream); - (*os)<<(*node); + std::cout << "Invalid Filename" << std::endl; + exit(0); + // const tinyxml2::XMLNode *node = (xml->xml_current ? xml->xml_current : xml->document.RootElement()); + // std::basic_ostream *os = dynamic_cast *>(stream); + // (*os)<<(*node); //(*stream)<<(*node); } return true; @@ -380,13 +382,15 @@ bool Serialization::Input() { if (filename.size() > 0) { xml->document.LoadFile(filename.c_str()); } else { - // TODO: How this should be handled with nested classes? - TiXmlNode *node = (xml->xml_current ? xml->xml_current : xml->document.RootElement()); - if (node == 0) { - node = (TiXmlElement*)xml->document.LinkEndChild(new TiXmlElement("root")); - } - std::basic_istream *is = dynamic_cast *>(stream); - (*is)>>(*node); + std::cout << "Invalid Filename" << std::endl; + exit(0); + // // TODO: How this should be handled with nested classes? + // tinyxml2::XMLNode *node = (xml->xml_current ? xml->xml_current : xml->document.RootElement()); + // if (node == 0) { + // node = (tinyxml2::XMLElement*)xml->document.LinkEndChild(new tinyxml2::XMLElement("root")); + // } + // std::basic_istream *is = dynamic_cast *>(stream); + // (*is)>>(*node); //(*stream)>>(*node); } return true; @@ -401,21 +405,23 @@ bool Serialization::Descend(const char *id) { return false; } } else { - xml->xml_current = (TiXmlElement*)xml->xml_current->FirstChild (id); + xml->xml_current = (tinyxml2::XMLElement*)xml->xml_current->FirstChildElement(id); if (xml->xml_current == NULL) return false; } } else { + tinyxml2::XMLElement* temp; + temp->SetName(id); if (xml->xml_current == 0) { - xml->xml_current = (TiXmlElement*)xml->document.LinkEndChild(new TiXmlElement(id)); + xml->xml_current = (tinyxml2::XMLElement*)xml->document.LinkEndChild(temp); } else { - xml->xml_current = (TiXmlElement*)xml->xml_current->LinkEndChild(new TiXmlElement(id)); + xml->xml_current = (tinyxml2::XMLElement*)xml->xml_current->LinkEndChild(temp); } } return true; } bool Serialization::Ascend() { SerializationFormatterXml *xml = (SerializationFormatterXml *)formatter_handle; - xml->xml_current = (TiXmlElement*)xml->xml_current->Parent(); + xml->xml_current = (tinyxml2::XMLElement*)xml->xml_current->Parent(); return true; } @@ -454,8 +460,8 @@ bool Serialization::Serialize(int &data, const std::string &name) { SerializationFormatterXml *xml = (SerializationFormatterXml *)formatter_handle; if (!xml || !xml->xml_current) return false; bool ret = true; - if (input) ret = (xml->xml_current->QueryIntAttribute(name, &data) == TIXML_SUCCESS); - else xml->xml_current->SetAttribute(name, data); + if (input) ret = (xml->xml_current->QueryIntAttribute(name.c_str(), &data) == EXIT_SUCCESS); + else xml->xml_current->SetAttribute(name.c_str(), data); return ret; } @@ -477,8 +483,8 @@ bool Serialization::Serialize(unsigned long &data, const std::string &name) { bool Serialization::Serialize(double &data, const std::string &name) { SerializationFormatterXml *xml = (SerializationFormatterXml *)formatter_handle; bool ret = true; - if (input) ret = (xml->xml_current->QueryDoubleAttribute(name, &data) == TIXML_SUCCESS); - else xml->xml_current->SetDoubleAttribute(name.c_str(), data); + if (input) ret = (xml->xml_current->QueryDoubleAttribute(name.c_str(), &data) == EXIT_SUCCESS); + else xml->xml_current->DoubleAttribute(name.c_str(), data); return ret; } @@ -498,7 +504,7 @@ bool Serialization::Serialize(CvMat &data, const std::string &name) { SerializationFormatterXml *xml = (SerializationFormatterXml *)formatter_handle; bool ret = true; if (input) { - TiXmlElement *xml_matrix = (TiXmlElement*)xml->xml_current->FirstChild(name); + tinyxml2::XMLElement *xml_matrix = (tinyxml2::XMLElement*)xml->xml_current->FirstChildElement(name.c_str()); if (xml_matrix == NULL) return false; if (!FileFormatUtils::parseXMLMatrix(xml_matrix, &data)) return false; } From 5a5f369c01c6af01df4f4de4e829a278968443b3 Mon Sep 17 00:00:00 2001 From: Patrick Musau Date: Fri, 23 Jul 2021 16:32:23 -0500 Subject: [PATCH 07/27] Individual Marker Done? --- ar_track_alvar/CMakeLists.txt | 6 +- ar_track_alvar/nodes/IndividualMarkers.cpp | 47 -- .../nodes/IndividualMarkersNoKinect.cpp | 499 ++++++++++-------- 3 files changed, 275 insertions(+), 277 deletions(-) diff --git a/ar_track_alvar/CMakeLists.txt b/ar_track_alvar/CMakeLists.txt index ee00103..75dc6a1 100644 --- a/ar_track_alvar/CMakeLists.txt +++ b/ar_track_alvar/CMakeLists.txt @@ -128,9 +128,9 @@ add_executable(individualMarkers nodes/IndividualMarkers.cpp) target_link_libraries(individualMarkers ar_track_alvar kinect_filtering) ament_target_dependencies(individualMarkers ${dependencies}) -# add_executable(individualMarkersNoKinect nodes/IndividualMarkersNoKinect.cpp) -# target_link_libraries(individualMarkersNoKinect ar_track_alvar) -# ament_target_dependencies(individualMarkersNoKinect ${dependencies}) +add_executable(individualMarkersNoKinect nodes/IndividualMarkersNoKinect.cpp) +target_link_libraries(individualMarkersNoKinect ar_track_alvar) +ament_target_dependencies(individualMarkersNoKinect ${dependencies}) # add_executable(trainMarkerBundle nodes/TrainMarkerBundle.cpp) # target_link_libraries(trainMarkerBundle ar_track_alvar) diff --git a/ar_track_alvar/nodes/IndividualMarkers.cpp b/ar_track_alvar/nodes/IndividualMarkers.cpp index 9a16579..5869183 100644 --- a/ar_track_alvar/nodes/IndividualMarkers.cpp +++ b/ar_track_alvar/nodes/IndividualMarkers.cpp @@ -610,58 +610,11 @@ void draw3dPoints(ARCloud::Ptr cloud, string frame, int color, int id, double ra } }; - - - - - - - - - - - - - - - - int main(int argc, char *argv[]) { rclcpp::init(argc, argv); rclcpp::spin(std::make_shared(argc,argv)); rclcpp::shutdown(); return 0; - // rclcpp::Node n, pn("~"); - - // // Run at the configured rate, discarding pointcloud msgs if necessary - // rclcpp::Rate rate(max_frequency); - - // while (rclcpp::ok()) - // { - // rclcpp::spin_some(node); - // rate.sleep(); - - // if (std::abs((rate.expectedCycleTime() - rclcpp::Duration(1.0 / max_frequency)).toSec()) > 0.001) - // { - // // Change rate dynamically; if must be above 0, as 0 will provoke a segfault on next spinOnce - // RCLCPP_DEBUG(rclcpp::get_logger("ArTrackAlvar"), "Changing frequency from %.2f to %.2f", 1.0 / rate.expectedCycleTime().toSec(), max_frequency); - // rate = rclcpp::Rate(max_frequency); - // } - - // if (enableSwitched == true) - // { - // // Enable/disable switch: subscribe/unsubscribe to make use of pointcloud processing nodelet - // // lazy publishing policy; in CPU-scarce computer as TurtleBot's laptop this is a huge saving - // if (enabled == false) - // cloud_sub_.shutdown(); - // else - // cloud_sub_ = n.subscribe(cam_image_topic, 1, &getPointCloudCallback); - - // enableSwitched = false; - // } - // } - - // return 0; } diff --git a/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp b/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp index 7d9627e..d189bd3 100644 --- a/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp +++ b/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp @@ -43,108 +43,244 @@ #include #include #include "tf2_ros/buffer.h" +#include "tf2_ros/create_timer_ros.h" #include "tf2_ros/transform_listener.h" #include +#include #include -// #include -// #include using namespace alvar; using namespace std; -bool init=true; -Camera *cam; -cv_bridge::CvImagePtr cv_ptr_; -image_transport::Subscriber cam_sub_; -ros::Publisher arMarkerPub_; -ros::Publisher rvizMarkerPub_; -ar_track_alvar_msgs::AlvarMarkers arPoseMarkers_; -visualization_msgs::msg::Marker rvizMarker_; -tf2_ros::TransformListener *tf_listener; -tf::TransformBroadcaster *tf_broadcaster; -MarkerDetector marker_detector; - -bool enableSwitched = false; -bool enabled = true; -double max_frequency; -double marker_size; -double max_new_marker_error; -double max_track_error; -std::string cam_image_topic; -std::string cam_info_topic; -std::string output_frame; -int marker_resolution = 5; // default marker resolution -int marker_margin = 2; // default marker margin - -void getCapCallback (const sensor_msgs::msg::Image::ConstSharedPtr & image_msg); - - -void getCapCallback (const sensor_msgs::msg::Image::ConstSharedPtr & image_msg) +class IndividualMarkersNoKinect : public rclcpp::Node { - //If we've already gotten the cam info, then go ahead - if(cam->getCamInfo_){ - try{ - tf2::StampedTransform CamToOutput; - try{ - tf_listener->waitForTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp, rclcpp::Duration(1.0)); - tf_listener->lookupTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp, CamToOutput); + + + private: + bool init=true; + Camera *cam; + cv_bridge::CvImagePtr cv_ptr_; + + ar_track_alvar_msgs::msg::AlvarMarkers arPoseMarkers_; + visualization_msgs::msg::Marker rvizMarker_; + + + rclcpp::Subscription::SharedPtr enable_sub_; + rclcpp::Subscription::SharedPtr cam_sub_; + rclcpp::Publisher::SharedPtr arMarkerPub_; + rclcpp::Publisher::SharedPtr rvizMarkerPub_; + + tf2_ros::TransformBroadcaster tf_broadcaster_; + tf2_ros::TransformListener tf_listener_; + tf2_ros::Buffer tf2_; + MarkerDetector marker_detector; + + bool enableSwitched = false; + bool enabled = true; + double max_frequency; + double marker_size; + double max_new_marker_error; + double max_track_error; + std::string cam_image_topic; + std::string cam_info_topic; + //image_transport::ImageTransport it_ + std::string output_frame; + int marker_resolution = 5; // default marker resolution + int marker_margin = 2; // default marker margin + + + public: + IndividualMarkersNoKinect(int argc, char* argv[]) : Node("marker_detect"), tf2_(this->get_clock()), tf_listener_(tf2_), tf_broadcaster_(this)//, it_(this) + { + if(argc > 1) + { + RCLCPP_WARN(this->get_logger(), "Command line arguments are deprecated. Consider using ROS parameters and remappings."); + + if(argc < 7) + { + std::cout << std::endl; + cout << "Not enough arguments provided." << endl; + cout << "Usage: ./individualMarkersNoKinect " + << " [ ]"; + std::cout << std::endl; + exit(0); + } + + // Get params from command line + marker_size = atof(argv[1]); + max_new_marker_error = atof(argv[2]); + max_track_error = atof(argv[3]); + cam_image_topic = argv[4]; + cam_info_topic = argv[5]; + output_frame = argv[6]; + + if (argc > 7) + { + max_frequency = atof(argv[7]); + this->declare_parameter("max_frequency", max_frequency); + } + if (argc > 8) + { + marker_resolution = atoi(argv[8]); + } + + if (argc > 9) + { + marker_margin = atoi(argv[9]); + } + + } + + else + { + // Get params from ros param server. + this->declare_parameter("marker_size", 10.0); + this->declare_parameter("max_new_marker_error", 0.08); + this->declare_parameter("max_track_error", 0.2); + this->declare_parameter("max_frequency", 8.0); + this->declare_parameter("marker_resolution", 5); + this->declare_parameter("marker_margin", 2); + + + this->get_parameter("marker_size", marker_size); + this->get_parameter("max_new_marker_error", max_new_marker_error); + this->get_parameter("max_track_error", max_track_error); + this->get_parameter("max_frequency", max_frequency); + this->get_parameter("marker_resolution", marker_resolution); + this->get_parameter("marker_margin", marker_margin); + + + + + if (!this->get_parameter("output_frame", output_frame)) + { + RCLCPP_ERROR(this->get_logger(), "Param 'output_frame' has to be set."); + exit(0); + } + + // Camera input topics. Use remapping to map to your camera topics. + cam_image_topic = "camera_image"; + cam_info_topic = "camera_info"; + } + + marker_detector.SetMarkerSize(marker_size, marker_resolution, marker_margin); + cam = new Camera(cam_info_topic); + + //Give tf a chance to catch up before the camera callback starts asking for transforms + // It will also reconfigure parameters for the first time, setting the default values + //TODO: come back to this, there's probably a better way to do this + rclcpp::Rate loop_rate(100); + loop_rate.sleep(); + + + cam_sub_ = this->create_subscription(cam_image_topic, 1, + std::bind(&IndividualMarkersNoKinect::getCapCallback, this, std::placeholders::_1)); + + + enable_sub_ = this->create_subscription("enable_detection", 1, std::bind(&IndividualMarkersNoKinect::enableCallback, this, std::placeholders::_1)); + + + } + + void enableCallback(const std_msgs::msg::Bool::SharedPtr msg) + { + enableSwitched = enabled != msg->data; + enabled = msg->data; + } + + + void getCapCallback (const sensor_msgs::msg::Image::SharedPtr image_msg) + { + //If we've already gotten the cam info, then go ahead + if(cam->getCamInfo_){ + try + { + geometry_msgs::msg::TransformStamped CamToOutput; + try + { + tf2_.canTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp, rclcpp::Duration(1.0)); + CamToOutput = tf2_.lookupTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp, rclcpp::Duration(1.0)); } - catch (tf::TransformException ex){ - RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "%s",ex.what()); + catch (tf2::TransformException ex){ + RCLCPP_ERROR(this->get_logger(), "%s",ex.what()); } - //Convert the image - cv_ptr_ = cv_bridge::toCvCopy(image_msg, sensor_msgs::image_encodings::BGR8); + //Convert the image + cv_ptr_ = cv_bridge::toCvCopy(image_msg, sensor_msgs::image_encodings::BGR8); - //Get the estimated pose of the main markers by using all the markers in each bundle + //Get the estimated pose of the main markers by using all the markers in each bundle - // GetMultiMarkersPoses expects an IplImage*, but as of ros groovy, cv_bridge gives - // us a cv::Mat. I'm too lazy to change to cv::Mat throughout right now, so I - // do this conversion here -jbinney - IplImage ipl_image = cv_ptr_->image; + // GetMultiMarkersPoses expects an IplImage*, but as of ros groovy, cv_bridge gives + // us a cv::Mat. I'm too lazy to change to cv::Mat throughout right now, so I + // do this conversion here -jbinney - marker_detector.Detect(&ipl_image, cam, true, false, max_new_marker_error, max_track_error, CVSEQ, true); - arPoseMarkers_.markers.clear (); - for (size_t i=0; isize(); i++) - { - //Get the pose relative to the camera + cv::Mat ipl_image = cv_ptr_->image; + marker_detector.Detect(&ipl_image, cam, true, false, max_new_marker_error, max_track_error, CVSEQ, true); + arPoseMarkers_.markers.clear (); + for (size_t i=0; isize(); i++) + { + //Get the pose relative to the camera int id = (*(marker_detector.markers))[i].GetId(); - Pose p = (*(marker_detector.markers))[i].pose; - double px = p.translation[0]/100.0; - double py = p.translation[1]/100.0; - double pz = p.translation[2]/100.0; - double qx = p.quaternion[1]; - double qy = p.quaternion[2]; - double qz = p.quaternion[3]; - double qw = p.quaternion[0]; - - tf::Quaternion rotation (qx,qy,qz,qw); - tf::Vector3 origin (px,py,pz); - tf::Transform t (rotation, origin); - tf::Vector3 markerOrigin (0, 0, 0); - tf::Transform m (tf::Quaternion::getIdentity (), markerOrigin); - tf::Transform markerPose = t * m; // marker pose in the camera frame - - tf::Vector3 z_axis_cam = tf::Transform(rotation, tf::Vector3(0,0,0)) * tf::Vector3(0, 0, 1); -// RCLCPP_INFO(rclcpp::get_logger("ArTrackAlvar"), "%02i Z in cam frame: %f %f %f",id, z_axis_cam.x(), z_axis_cam.y(), z_axis_cam.z()); - /// as we can't see through markers, this one is false positive detection - if (z_axis_cam.z() > 0) - { - continue; - } - - //Publish the transform from the camera to the marker - std::string markerFrame = "ar_marker_"; - std::stringstream out; - out << id; - std::string id_string = out.str(); - markerFrame += id_string; - tf2::StampedTransform camToMarker (t, image_msg->header.stamp, image_msg->header.frame_id, markerFrame.c_str()); - tf_broadcaster->sendTransform(camToMarker); - + Pose p = (*(marker_detector.markers))[i].pose; + double px = p.translation[0]/100.0; + double py = p.translation[1]/100.0; + double pz = p.translation[2]/100.0; + double qx = p.quaternion[1]; + double qy = p.quaternion[2]; + double qz = p.quaternion[3]; + double qw = p.quaternion[0]; + + tf2::Quaternion rotation (qx,qy,qz,qw); + tf2::Vector3 origin (px,py,pz); + tf2::Transform t (rotation, origin); + tf2::Vector3 markerOrigin (0, 0, 0); + tf2::Transform m (tf2::Quaternion::getIdentity (), markerOrigin); + tf2::Transform markerPose = t * m; // marker pose in the camera frame + + + tf2::Vector3 z_axis_cam = tf2::Transform(rotation, tf2::Vector3(0,0,0)) * tf2::Vector3(0, 0, 1); + + /// as we can't see through markers, this one is false positive detection + if (z_axis_cam.z() > 0) + { + continue; + } + + //Publish the transform from the camera to the marker + std::string markerFrame = "ar_marker_"; + std::stringstream out; + out << id; + std::string id_string = out.str(); + markerFrame += id_string; + + geometry_msgs::msg::TransformStamped camToMarker; + camToMarker.header.stamp = image_msg->header.stamp; + camToMarker.header.frame_id = image_msg->header.frame_id; + camToMarker.child_frame_id = markerFrame; + + geometry_msgs::msg::Vector3 trans; + trans.x = px; + trans.y = py; + trans.z = pz; + geometry_msgs::msg::Quaternion rot; + rot.x = qx; + rot.y = qy; + rot.z = qz; + rot.w = qw; + + camToMarker.transform.translation = trans; + camToMarker.transform.rotation = rot; + tf_broadcaster_.sendTransform(camToMarker); + //Create the rviz visualization messages - tf::poseTFToMsg (markerPose, rvizMarker_.pose); + rvizMarker_.pose.position.x = markerPose.getOrigin().getX(); + rvizMarker_.pose.position.y = markerPose.getOrigin().getY(); + rvizMarker_.pose.position.z = markerPose.getOrigin().getZ(); + rvizMarker_.pose.orientation.x = markerPose.getRotation().getX(); + rvizMarker_.pose.orientation.y = markerPose.getRotation().getY(); + rvizMarker_.pose.orientation.z = markerPose.getRotation().getZ(); + rvizMarker_.pose.orientation.w = markerPose.getRotation().getW(); rvizMarker_.header.frame_id = image_msg->header.frame_id; rvizMarker_.header.stamp = image_msg->header.stamp; rvizMarker_.id = id; @@ -195,158 +331,67 @@ void getCapCallback (const sensor_msgs::msg::Image::ConstSharedPtr & image_msg) break; } rvizMarker_.lifetime = rclcpp::Duration (1.0); - rvizMarkerPub_.publish (rvizMarker_); + rvizMarkerPub_->publish (rvizMarker_); + //Create the pose marker messages + ar_track_alvar_msgs::msg::AlvarMarker ar_pose_marker; //Get the pose of the tag in the camera frame, then the output frame (usually torso) - tf::Transform tagPoseOutput = CamToOutput * markerPose; - - //Create the pose marker messages - ar_track_alvar_msgs::AlvarMarker ar_pose_marker; - tf::poseTFToMsg (tagPoseOutput, ar_pose_marker.pose.pose); - ar_pose_marker.header.frame_id = output_frame; - ar_pose_marker.header.stamp = image_msg->header.stamp; - ar_pose_marker.id = id; - arPoseMarkers_.markers.push_back (ar_pose_marker); + tf2::doTransform(ar_pose_marker.pose, ar_pose_marker.pose,CamToOutput); + + + ar_pose_marker.header.frame_id = output_frame; + ar_pose_marker.header.stamp = image_msg->header.stamp; + ar_pose_marker.id = id; + arPoseMarkers_.markers.push_back (ar_pose_marker); } - arMarkerPub_.publish (arPoseMarkers_); + arMarkerPub_->publish (arPoseMarkers_); } catch (cv_bridge::Exception& e){ - ROS_ERROR ("Could not convert from '%s' to 'rgb8'.", image_msg->encoding.c_str ()); + RCLCPP_ERROR(this->get_logger(),"Could not convert from '%s' to 'rgb8'.", image_msg->encoding.c_str ()); } - } -} - -void configCallback(ar_track_alvar::ParamsConfig &config, uint32_t level) -{ - RCLCPP_INFO(rclcpp::get_logger("ArTrackAlvar"), "AR tracker reconfigured: %s %.2f %.2f %.2f %.2f", config.enabled ? "ENABLED" : "DISABLED", - config.max_frequency, config.marker_size, config.max_new_marker_error, config.max_track_error); - - enableSwitched = enabled != config.enabled; - - enabled = config.enabled; - max_frequency = config.max_frequency; - marker_size = config.marker_size; - max_new_marker_error = config.max_new_marker_error; - max_track_error = config.max_track_error; -} + } + } -void enableCallback(const std_msgs::msg::Bool::ConstSharedPtr& msg) -{ - enableSwitched = enabled != msg->data; - enabled = msg->data; -} +}; int main(int argc, char *argv[]) { - ros::init (argc, argv, "marker_detect"); - rclcpp::Node n, pn("~"); - - if(argc > 1) { - RCLCPP_WARN(rclcpp::get_logger("ArTrackAlvar"), "Command line arguments are deprecated. Consider using ROS parameters and remappings."); - - if(argc < 7){ - std::cout << std::endl; - cout << "Not enough arguments provided." << endl; - cout << "Usage: ./individualMarkersNoKinect " - << " [ ]"; - std::cout << std::endl; - return 0; - } - // Get params from command line - marker_size = atof(argv[1]); - max_new_marker_error = atof(argv[2]); - max_track_error = atof(argv[3]); - cam_image_topic = argv[4]; - cam_info_topic = argv[5]; - output_frame = argv[6]; - - if (argc > 7) { - max_frequency = atof(argv[7]); - pn.setParam("max_frequency", max_frequency); - } - if (argc > 8) - marker_resolution = atoi(argv[8]); - if (argc > 9) - marker_margin = atoi(argv[9]); - - } else { - // Get params from ros param server. - pn.param("marker_size", marker_size, 10.0); - pn.param("max_new_marker_error", max_new_marker_error, 0.08); - pn.param("max_track_error", max_track_error, 0.2); - pn.param("max_frequency", max_frequency, 8.0); - pn.setParam("max_frequency", max_frequency); // in case it was not set. - pn.param("marker_resolution", marker_resolution, 5); - pn.param("marker_margin", marker_margin, 2); - if (!pn.getParam("output_frame", output_frame)) { - RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "Param 'output_frame' has to be set."); - exit(EXIT_FAILURE); - } - - // Camera input topics. Use remapping to map to your camera topics. - cam_image_topic = "camera_image"; - cam_info_topic = "camera_info"; - } - - // Set dynamically configurable parameters so they don't get replaced by default values - pn.setParam("marker_size", marker_size); - pn.setParam("max_new_marker_error", max_new_marker_error); - pn.setParam("max_track_error", max_track_error); - - marker_detector.SetMarkerSize(marker_size, marker_resolution, marker_margin); - - cam = new Camera(n, cam_info_topic); - tf_listener = new tf2_ros::TransformListener(n); - tf_broadcaster = new tf::TransformBroadcaster(); - arMarkerPub_ = n.advertise < ar_track_alvar_msgs::AlvarMarkers > ("ar_pose_marker", 0); - rvizMarkerPub_ = n.advertise < visualization_msgs::msg::Marker > ("visualization_marker", 0); - - // Prepare dynamic reconfiguration - dynamic_reconfigure::Server < ar_track_alvar::ParamsConfig > server; - dynamic_reconfigure::Server::CallbackType f; - - f = boost::bind(&configCallback, _1, _2); - server.setCallback(f); - - //Give tf a chance to catch up before the camera callback starts asking for transforms - // It will also reconfigure parameters for the first time, setting the default values - rclcpp::Duration(1.0).sleep(); - rclcpp::spin_some(node); - - image_transport::ImageTransport it_(n); - - // Run at the configured rate, discarding pointcloud msgs if necessary - rclcpp::Rate rate(max_frequency); - - /// Subscriber for enable-topic so that a user can turn off the detection if it is not used without - /// having to use the reconfigure where he has to know all parameters - auto enable_sub_ = pn.subscribe("enable_detection", 1, &enableCallback); - - enableSwitched = true; - while (rclcpp::ok()) - { - rclcpp::spin_some(node); - rate.sleep(); - - if (std::abs((rate.expectedCycleTime() - rclcpp::Duration(1.0 / max_frequency)).toSec()) > 0.001) - { - // Change rate dynamically; if must be above 0, as 0 will provoke a segfault on next spinOnce - RCLCPP_DEBUG(rclcpp::get_logger("ArTrackAlvar"), "Changing frequency from %.2f to %.2f", 1.0 / rate.expectedCycleTime().toSec(), max_frequency); - rate = rclcpp::Rate(max_frequency); - } - - if (enableSwitched) - { - // Enable/disable switch: subscribe/unsubscribe to make use of pointcloud processing nodelet - // lazy publishing policy; in CPU-scarce computer as TurtleBot's laptop this is a huge saving - if (enabled) - cam_sub_ = it_.subscribe(cam_image_topic, 1, &getCapCallback); - else - cam_sub_.shutdown(); - enableSwitched = false; - } - } + rclcpp::init(argc, argv); + rclcpp::spin(std::make_shared(argc,argv)); + rclcpp::shutdown(); + return 0; + + // TODO Figure out how to do this + // image_transport::ImageTransport it_(n); + + // // Run at the configured rate, discarding pointcloud msgs if necessary + // rclcpp::Rate rate(max_frequency); + + + // enableSwitched = true; + // while (rclcpp::ok()) + // { + // rclcpp::spin_some(node); + // rate.sleep(); + + // if (std::abs((rate.expectedCycleTime() - rclcpp::Duration(1.0 / max_frequency)).toSec()) > 0.001) + // { + // // Change rate dynamically; if must be above 0, as 0 will provoke a segfault on next spinOnce + // RCLCPP_DEBUG(rclcpp::get_logger("ArTrackAlvar"), "Changing frequency from %.2f to %.2f", 1.0 / rate.expectedCycleTime().toSec(), max_frequency); + // rate = rclcpp::Rate(max_frequency); + // } + + // if (enableSwitched) + // { + // // Enable/disable switch: subscribe/unsubscribe to make use of pointcloud processing nodelet + // // lazy publishing policy; in CPU-scarce computer as TurtleBot's laptop this is a huge saving + // if (enabled) + // cam_sub_ = it_.subscribe(cam_image_topic, 1, &getCapCallback); + // else + // cam_sub_.shutdown(); + // enableSwitched = false; + // } + // } - return 0; } From fc21070692c7b4df7b1f8816b7d6209ad4cd1597 Mon Sep 17 00:00:00 2001 From: Patrick Musau Date: Mon, 26 Jul 2021 15:13:15 -0500 Subject: [PATCH 08/27] port of three nodes --- ar_track_alvar/CMakeLists.txt | 12 +- ar_track_alvar/nodes/FindMarkerBundles.cpp | 1427 +++++++++-------- .../nodes/IndividualMarkersNoKinect.cpp | 2 + ar_track_alvar/nodes/TrainMarkerBundle.cpp | 726 +++++---- 4 files changed, 1128 insertions(+), 1039 deletions(-) diff --git a/ar_track_alvar/CMakeLists.txt b/ar_track_alvar/CMakeLists.txt index 75dc6a1..ca84834 100644 --- a/ar_track_alvar/CMakeLists.txt +++ b/ar_track_alvar/CMakeLists.txt @@ -132,14 +132,14 @@ add_executable(individualMarkersNoKinect nodes/IndividualMarkersNoKinect.cpp) target_link_libraries(individualMarkersNoKinect ar_track_alvar) ament_target_dependencies(individualMarkersNoKinect ${dependencies}) -# add_executable(trainMarkerBundle nodes/TrainMarkerBundle.cpp) -# target_link_libraries(trainMarkerBundle ar_track_alvar) -# ament_target_dependencies(trainMarkerBundle ${dependencies}) +add_executable(trainMarkerBundle nodes/TrainMarkerBundle.cpp) +target_link_libraries(trainMarkerBundle ar_track_alvar) +ament_target_dependencies(trainMarkerBundle ${dependencies}) -# add_executable(findMarkerBundles nodes/FindMarkerBundles.cpp) -# target_link_libraries(findMarkerBundles ar_track_alvar kinect_filtering medianFilter) -# ament_target_dependencies(findMarkerBundles ${dependencies}) +add_executable(findMarkerBundles nodes/FindMarkerBundles.cpp) +target_link_libraries(findMarkerBundles ar_track_alvar kinect_filtering medianFilter) +ament_target_dependencies(findMarkerBundles ${dependencies}) # add_executable(findMarkerBundlesNoKinect nodes/FindMarkerBundlesNoKinect.cpp) # target_link_libraries(findMarkerBundlesNoKinect ar_track_alvar) diff --git a/ar_track_alvar/nodes/FindMarkerBundles.cpp b/ar_track_alvar/nodes/FindMarkerBundles.cpp index 3e701af..91222c9 100644 --- a/ar_track_alvar/nodes/FindMarkerBundles.cpp +++ b/ar_track_alvar/nodes/FindMarkerBundles.cpp @@ -67,6 +67,9 @@ #include #include #include +#include +#include "tf2_ros/create_timer_ros.h" +#include #define MAIN_MARKER 1 @@ -83,771 +86,803 @@ using namespace alvar; using namespace std; using boost::make_shared; -Camera *cam; -cv_bridge::CvImagePtr cv_ptr_; -image_transport::Subscriber cam_sub_; - -rclcpp::Publisher::SharedPtr arMarkerPub_; -rclcpp::Publisher::SharedPtr rvizMarkerPub_; -rclcpp::Publisher::SharedPtr rvizMarkerPub2_; - - -rclcpp::Subscription::SharedPtr cloud_sub_; - -ar_track_alvar_msgs::msg::AlvarMarkers arPoseMarkers_; -tf2_ros::TransformListener *tf_listener; -tf::TransformBroadcaster *tf_broadcaster; -MarkerDetector marker_detector; -MultiMarkerBundle **multi_marker_bundles=NULL; - -Pose *bundlePoses; -int *master_id; -int *bundles_seen; -bool *master_visible; -std::vector *bundle_indices; -bool init = true; -ata::MedianFilter **med_filts; -int med_filt_size; - -double marker_size; -double max_new_marker_error; -double max_track_error; -std::string cam_image_topic; -std::string cam_info_topic; -std::string output_frame; -int n_bundles = 0; - -//Debugging utility function -void draw3dPoints(ARCloud::Ptr cloud, string frame, int color, int id, double rad) +class FindMarkerBundles : public rclcpp::Node { - visualization_msgs::msg::Marker rvizMarker; - - rvizMarker.header.frame_id = frame; - rvizMarker.header.stamp = rclcpp::Time::now(); - rvizMarker.id = id; - rvizMarker.ns = "3dpts"; - - rvizMarker.scale.x = rad; - rvizMarker.scale.y = rad; - rvizMarker.scale.z = rad; - - rvizMarker.type = visualization_msgs::msg::Marker::SPHERE_LIST; - rvizMarker.action = visualization_msgs::msg::Marker::ADD; - - if(color==1){ - rvizMarker.color.r = 0.0f; - rvizMarker.color.g = 1.0f; - rvizMarker.color.b = 1.0f; - rvizMarker.color.a = 1.0; - } - if(color==2){ - rvizMarker.color.r = 1.0f; - rvizMarker.color.g = 0.0f; - rvizMarker.color.b = 1.0f; - rvizMarker.color.a = 1.0; - } - if(color==3){ - rvizMarker.color.r = 1.0f; - rvizMarker.color.g = 1.0f; - rvizMarker.color.b = 0.0f; - rvizMarker.color.a = 1.0; - } - - gm::Point p; - for(int i=0; ipoints.size(); i++){ - p.x = cloud->points[i].x; - p.y = cloud->points[i].y; - p.z = cloud->points[i].z; - rvizMarker.points.push_back(p); - } - - rvizMarker.lifetime = rclcpp::Duration (1.0); - rvizMarkerPub2_.publish (rvizMarker); -} - + private: + Camera *cam; + cv_bridge::CvImagePtr cv_ptr_; + image_transport::Subscriber cam_sub_; + + // Publishers + rclcpp::Publisher::SharedPtr arMarkerPub_; + rclcpp::Publisher::SharedPtr rvizMarkerPub_; + rclcpp::Publisher::SharedPtr rvizMarkerPub2_; + + // Subscribers + rclcpp::Subscription::SharedPtr cloud_sub_; + + + tf2_ros::TransformBroadcaster tf_broadcaster_; + tf2_ros::TransformListener tf_listener_; + tf2_ros::Buffer tf2_; + + MarkerDetector marker_detector; + MultiMarkerBundle **multi_marker_bundles=NULL; + ar_track_alvar_msgs::msg::AlvarMarkers arPoseMarkers_; + + Pose *bundlePoses; + int *master_id; + int *bundles_seen; + bool *master_visible; + std::vector *bundle_indices; + bool init = true; + ata::MedianFilter **med_filts; + int med_filt_size; + + double marker_size; + double max_new_marker_error; + double max_track_error; + std::string cam_image_topic; + std::string cam_info_topic; + std::string output_frame; + int n_bundles = 0; + + public: -void drawArrow(gm::Point start, tf::Matrix3x3 mat, string frame, int color, int id) -{ - visualization_msgs::msg::Marker rvizMarker; - - rvizMarker.header.frame_id = frame; - rvizMarker.header.stamp = rclcpp::Time::now(); - rvizMarker.id = id; - rvizMarker.ns = "arrow"; - - rvizMarker.scale.x = 0.01; - rvizMarker.scale.y = 0.01; - rvizMarker.scale.z = 0.1; - - rvizMarker.type = visualization_msgs::msg::Marker::ARROW; - rvizMarker.action = visualization_msgs::msg::Marker::ADD; - - for(int i=0; i<3; i++){ - rvizMarker.points.clear(); - rvizMarker.points.push_back(start); - gm::Point end; - end.x = start.x + mat[0][i]; - end.y = start.y + mat[1][i]; - end.z = start.z + mat[2][i]; - rvizMarker.points.push_back(end); - rvizMarker.id += 10*i; - rvizMarker.lifetime = rclcpp::Duration (1.0); - - if(color==1){ - rvizMarker.color.r = 1.0f; - rvizMarker.color.g = 0.0f; - rvizMarker.color.b = 0.0f; - rvizMarker.color.a = 1.0; - } - if(color==2){ - rvizMarker.color.r = 0.0f; - rvizMarker.color.g = 1.0f; - rvizMarker.color.b = 0.0f; - rvizMarker.color.a = 1.0; - } - if(color==3){ - rvizMarker.color.r = 0.0f; - rvizMarker.color.g = 0.0f; - rvizMarker.color.b = 1.0f; - rvizMarker.color.a = 1.0; - } - color += 1; + FindMarkerBundles(int argc, char* argv[]):Node("marker_detect"), tf2_(this->get_clock()), tf_listener_(tf2_), tf_broadcaster_(this) + { + if(argc < 9) + { + std::cout << std::endl; + cout << "Not enough arguments provided." << endl; + cout << "Usage: ./findMarkerBundles " << endl; + std::cout << std::endl; + exit(0); + } - rvizMarkerPub2_.publish (rvizMarker); - } -} + // Get params from command line + marker_size = atof(argv[1]); + max_new_marker_error = atof(argv[2]); + max_track_error = atof(argv[3]); + cam_image_topic = argv[4]; + cam_info_topic = argv[5]; + output_frame = argv[6]; + med_filt_size = atoi(argv[7]); + int n_args_before_list = 8; + n_bundles = argc - n_args_before_list; + + + marker_detector.SetMarkerSize(marker_size); + multi_marker_bundles = new MultiMarkerBundle*[n_bundles]; + bundlePoses = new Pose[n_bundles]; + master_id = new int[n_bundles]; + bundle_indices = new std::vector[n_bundles]; + bundles_seen = new int[n_bundles]; + master_visible = new bool[n_bundles]; + + + + // Set up camera, listeners, and broadcasters + cam = new Camera(cam_info_topic); + arMarkerPub_ = this->create_publisher ("ar_pose_marker", 0); + rvizMarkerPub_ = this->create_publisher ("visualization_marker", 0); + rvizMarkerPub2_ = this->create_publisher ("ARmarker_points", 0); + + //Create median filters + med_filts = new ata::MedianFilter*[n_bundles]; + for(int i=0; i id_vector = loadHelper.getIndices(); + multi_marker_bundles[i] = new MultiMarkerBundle(id_vector); + multi_marker_bundles[i]->Load(argv[i + n_args_before_list], FILE_FORMAT_XML); + master_id[i] = multi_marker_bundles[i]->getMasterId(); + bundle_indices[i] = multi_marker_bundles[i]->getIndices(); + calcAndSaveMasterCoords(*(multi_marker_bundles[i])); + } + else{ + cout<<"Cannot load file "<< argv[i + n_args_before_list] << endl; + exit(0); + } + } + //Give tf a chance to catch up before the camera callback starts asking for transforms + //TODO: come back to this, there's probably a better way to do this + rclcpp::Rate loop_rate(100); + loop_rate.sleep(); + + //Subscribe to topics and set up callbacks + RCLCPP_INFO(this->get_logger(),"Subscribing to image topic"); + -// Infer the master tag corner positons from the other observed tags -// Also does some of the bookkeeping for tracking that MultiMarker::_GetPose does -int InferCorners(const ARCloud &cloud, MultiMarkerBundle &master, ARCloud &bund_corners){ - bund_corners.clear(); - bund_corners.resize(4); - for(int i=0; i<4; i++){ - bund_corners[i].x = 0; - bund_corners[i].y = 0; - bund_corners[i].z = 0; - } + + cloud_sub_ = this->create_subscription(cam_image_topic, 1, std::bind(&FindMarkerBundles::getPointCloudCallback, this, std::placeholders::_1)); - // Reset the marker_status to 1 for all markers in point_cloud for tracking purposes - for (size_t i=0; i 0) master.marker_status[i]=1; - } - int n_est = 0; + } - // For every detected marker - for (size_t i=0; isize(); i++) + //Debugging utility function + void draw3dPoints(ARCloud::Ptr cloud, string frame, int color, int id, double rad) { - const Marker* marker = &((*marker_detector.markers)[i]); - int id = marker->GetId(); - int index = master.get_id_index(id); - int mast_id = master.master_id; - if (index < 0) continue; + visualization_msgs::msg::Marker rvizMarker; - // But only if we have corresponding points in the pointcloud - if (master.marker_status[index] > 0 && marker->valid) { - n_est++; + rvizMarker.header.frame_id = frame; + rvizMarker.header.stamp = this->get_clock()->now(); + rvizMarker.id = id; + rvizMarker.ns = "3dpts"; + + rvizMarker.scale.x = rad; + rvizMarker.scale.y = rad; + rvizMarker.scale.z = rad; + + rvizMarker.type = visualization_msgs::msg::Marker::SPHERE_LIST; + rvizMarker.action = visualization_msgs::msg::Marker::ADD; + + if(color==1){ + rvizMarker.color.r = 0.0f; + rvizMarker.color.g = 1.0f; + rvizMarker.color.b = 1.0f; + rvizMarker.color.a = 1.0; + } + if(color==2){ + rvizMarker.color.r = 1.0f; + rvizMarker.color.g = 0.0f; + rvizMarker.color.b = 1.0f; + rvizMarker.color.a = 1.0; + } + if(color==3){ + rvizMarker.color.r = 1.0f; + rvizMarker.color.g = 1.0f; + rvizMarker.color.b = 0.0f; + rvizMarker.color.a = 1.0; + } + + gm::msg::Point p; + for(int i=0; ipoints.size(); i++){ + p.x = cloud->points[i].x; + p.y = cloud->points[i].y; + p.z = cloud->points[i].z; + rvizMarker.points.push_back(p); + } + + rvizMarker.lifetime = rclcpp::Duration (1.0); + rvizMarkerPub2_->publish (rvizMarker); + } - std::string marker_frame = "ar_marker_"; - std::stringstream mark_out; - mark_out << id; - std::string id_string = mark_out.str(); - marker_frame += id_string; - //Grab the precomputed corner coords and correct for the weird Alvar coord system - for(int j = 0; j < 4; ++j) - { - tf::Vector3 corner_coord = master.rel_corners[index][j]; - gm::PointStamped p, output_p; - p.header.frame_id = marker_frame; - p.point.x = corner_coord.y()/100.0; - p.point.y = -corner_coord.x()/100.0; - p.point.z = corner_coord.z()/100.0; - - try{ - tf_listener->waitForTransform(cloud.header.frame_id, marker_frame, rclcpp::Time(0), rclcpp::Duration(0.1)); - tf_listener->transformPoint(cloud.header.frame_id, p, output_p); - } - catch (tf::TransformException ex){ - RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "ERROR InferCorners: %s",ex.what()); - return -1; - } - - bund_corners[j].x += output_p.point.x; - bund_corners[j].y += output_p.point.y; - bund_corners[j].z += output_p.point.z; + void drawArrow(gm::msg::Point start, tf2::Matrix3x3 mat, string frame, int color, int id) + { + visualization_msgs::msg::Marker rvizMarker; + + rvizMarker.header.frame_id = frame; + rvizMarker.header.stamp = this->get_clock()->now(); + rvizMarker.id = id; + rvizMarker.ns = "arrow"; + + rvizMarker.scale.x = 0.01; + rvizMarker.scale.y = 0.01; + rvizMarker.scale.z = 0.1; + + rvizMarker.type = visualization_msgs::msg::Marker::ARROW; + rvizMarker.action = visualization_msgs::msg::Marker::ADD; + + for(int i=0; i<3; i++){ + rvizMarker.points.clear(); + rvizMarker.points.push_back(start); + gm::msg::Point end; + end.x = start.x + mat[0][i]; + end.y = start.y + mat[1][i]; + end.z = start.z + mat[2][i]; + rvizMarker.points.push_back(end); + rvizMarker.id += 10*i; + rvizMarker.lifetime = rclcpp::Duration (1.0); + + if(color==1){ + rvizMarker.color.r = 1.0f; + rvizMarker.color.g = 0.0f; + rvizMarker.color.b = 0.0f; + rvizMarker.color.a = 1.0; + } + if(color==2){ + rvizMarker.color.r = 0.0f; + rvizMarker.color.g = 1.0f; + rvizMarker.color.b = 0.0f; + rvizMarker.color.a = 1.0; + } + if(color==3){ + rvizMarker.color.r = 0.0f; + rvizMarker.color.g = 0.0f; + rvizMarker.color.b = 1.0f; + rvizMarker.color.a = 1.0; } - master.marker_status[index] = 2; // Used for tracking + color += 1; + + rvizMarkerPub2_->publish (rvizMarker); } } - - //Divide to take the average of the summed estimates - if(n_est > 0){ + + + // Infer the master tag corner positons from the other observed tags + // Also does some of the bookkeeping for tracking that MultiMarker::_GetPose does + int InferCorners(const ARCloud &cloud, MultiMarkerBundle &master, ARCloud &bund_corners){ + bund_corners.clear(); + bund_corners.resize(4); for(int i=0; i<4; i++){ - bund_corners[i].x /= n_est; - bund_corners[i].y /= n_est; - bund_corners[i].z /= n_est; + bund_corners[i].x = 0; + bund_corners[i].y = 0; + bund_corners[i].z = 0; } - } - return 0; -} + // Reset the marker_status to 1 for all markers in point_cloud for tracking purposes + for (size_t i=0; i 0) master.marker_status[i]=1; + } + int n_est = 0; + + // For every detected marker + for (size_t i=0; isize(); i++) + { + const Marker* marker = &((*marker_detector.markers)[i]); + int id = marker->GetId(); + int index = master.get_id_index(id); + int mast_id = master.master_id; + if (index < 0) continue; + + // But only if we have corresponding points in the pointcloud + if (master.marker_status[index] > 0 && marker->valid) { + n_est++; + + std::string marker_frame = "ar_marker_"; + std::stringstream mark_out; + mark_out << id; + std::string id_string = mark_out.str(); + marker_frame += id_string; + + //Grab the precomputed corner coords and correct for the weird Alvar coord system + for(int j = 0; j < 4; ++j) + { + tf2::Vector3 corner_coord = master.rel_corners[index][j]; + gm::msg::PointStamped p, output_p; + p.header.frame_id = marker_frame; + p.point.x = corner_coord.y()/100.0; + p.point.y = -corner_coord.x()/100.0; + p.point.z = corner_coord.z()/100.0; + + try{ + + + tf2_.canTransform(cloud.header.frame_id, marker_frame, rclcpp::Time(0), rclcpp::Duration(0.1)); + geometry_msgs::msg::TransformStamped pt_transform; + pt_transform = tf2_.lookupTransform(cloud.header.frame_id, marker_frame, rclcpp::Time(0), rclcpp::Duration(0.1)); + tf2::doTransform(p, output_p,pt_transform); + } + catch (tf2::TransformException ex){ + RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "ERROR InferCorners: %s",ex.what()); + return -1; + } + + bund_corners[j].x += output_p.point.x; + bund_corners[j].y += output_p.point.y; + bund_corners[j].z += output_p.point.z; + } + master.marker_status[index] = 2; // Used for tracking + } + } + + //Divide to take the average of the summed estimates + if(n_est > 0){ + for(int i=0; i<4; i++){ + bund_corners[i].x /= n_est; + bund_corners[i].y /= n_est; + bund_corners[i].z /= n_est; + } + } -int PlaneFitPoseImprovement(int id, const ARCloud &corners_3D, ARCloud::Ptr selected_points, const ARCloud &cloud, Pose &p){ + return 0; + } + + int PlaneFitPoseImprovement(int id, const ARCloud &corners_3D, ARCloud::Ptr selected_points, const ARCloud &cloud, Pose &p) + { - ata::PlaneFitResult res = ata::fitPlane(selected_points); - gm::PoseStamped pose; - pose.header.stamp = pcl_conversions::fromPCL(cloud.header).stamp; - pose.header.frame_id = cloud.header.frame_id; - pose.pose.position = ata::centroid(*res.inliers); + ata::PlaneFitResult res = ata::fitPlane(selected_points); + gm::msg::PoseStamped pose; + pose.header.stamp = pcl_conversions::fromPCL(cloud.header).stamp; + pose.header.frame_id = cloud.header.frame_id; + pose.pose.position = ata::centroid(*res.inliers); - draw3dPoints(selected_points, cloud.header.frame_id, 1, id, 0.005); - - //Get 2 points that point forward in marker x direction - int i1,i2; - if(isnan(corners_3D[0].x) || isnan(corners_3D[0].y) || isnan(corners_3D[0].z) || - isnan(corners_3D[3].x) || isnan(corners_3D[3].y) || isnan(corners_3D[3].z)) - { - if(isnan(corners_3D[1].x) || isnan(corners_3D[1].y) || isnan(corners_3D[1].z) || - isnan(corners_3D[2].x) || isnan(corners_3D[2].y) || isnan(corners_3D[2].z)) - { - return -1; - } + draw3dPoints(selected_points, cloud.header.frame_id, 1, id, 0.005); + + //Get 2 points that point forward in marker x direction + int i1,i2; + if(isnan(corners_3D[0].x) || isnan(corners_3D[0].y) || isnan(corners_3D[0].z) || + isnan(corners_3D[3].x) || isnan(corners_3D[3].y) || isnan(corners_3D[3].z)) + { + if(isnan(corners_3D[1].x) || isnan(corners_3D[1].y) || isnan(corners_3D[1].z) || + isnan(corners_3D[2].x) || isnan(corners_3D[2].y) || isnan(corners_3D[2].z)) + { + return -1; + } + else{ + i1 = 1; + i2 = 2; + } + } else{ - i1 = 1; - i2 = 2; - } - } - else{ - i1 = 0; - i2 = 3; - } + i1 = 0; + i2 = 3; + } - //Get 2 points the point forward in marker y direction - int i3,i4; - if(isnan(corners_3D[0].x) || isnan(corners_3D[0].y) || isnan(corners_3D[0].z) || - isnan(corners_3D[1].x) || isnan(corners_3D[1].y) || isnan(corners_3D[1].z)) - { - if(isnan(corners_3D[3].x) || isnan(corners_3D[3].y) || isnan(corners_3D[3].z) || - isnan(corners_3D[2].x) || isnan(corners_3D[2].y) || isnan(corners_3D[2].z)) - { - return -1; - } + //Get 2 points the point forward in marker y direction + int i3,i4; + if(isnan(corners_3D[0].x) || isnan(corners_3D[0].y) || isnan(corners_3D[0].z) || + isnan(corners_3D[1].x) || isnan(corners_3D[1].y) || isnan(corners_3D[1].z)) + { + if(isnan(corners_3D[3].x) || isnan(corners_3D[3].y) || isnan(corners_3D[3].z) || + isnan(corners_3D[2].x) || isnan(corners_3D[2].y) || isnan(corners_3D[2].z)) + { + return -1; + } + else{ + i3 = 2; + i4 = 3; + } + } else{ - i3 = 2; - i4 = 3; - } - } - else{ - i3 = 1; - i4 = 0; - } - - ARCloud::Ptr orient_points(new ARCloud()); - orient_points->points.push_back(corners_3D[i1]); - draw3dPoints(orient_points, cloud.header.frame_id, 3, id+1000, 0.008); + i3 = 1; + i4 = 0; + } - orient_points->clear(); - orient_points->points.push_back(corners_3D[i2]); - draw3dPoints(orient_points, cloud.header.frame_id, 2, id+2000, 0.008); - - int succ; - succ = ata::extractOrientation(res.coeffs, corners_3D[i1], corners_3D[i2], corners_3D[i3], corners_3D[i4], pose.pose.orientation); - if(succ < 0) return -1; - - tf::Matrix3x3 mat; - succ = ata::extractFrame(res.coeffs, corners_3D[i1], corners_3D[i2], corners_3D[i3], corners_3D[i4], mat); - if(succ < 0) return -1; - - drawArrow(pose.pose.position, mat, cloud.header.frame_id, 1, id); - - p.translation[0] = pose.pose.position.x * 100.0; - p.translation[1] = pose.pose.position.y * 100.0; - p.translation[2] = pose.pose.position.z * 100.0; - p.quaternion[1] = pose.pose.orientation.x; - p.quaternion[2] = pose.pose.orientation.y; - p.quaternion[3] = pose.pose.orientation.z; - p.quaternion[0] = pose.pose.orientation.w; + ARCloud::Ptr orient_points(new ARCloud()); + orient_points->points.push_back(corners_3D[i1]); + draw3dPoints(orient_points, cloud.header.frame_id, 3, id+1000, 0.008); + + orient_points->clear(); + orient_points->points.push_back(corners_3D[i2]); + draw3dPoints(orient_points, cloud.header.frame_id, 2, id+2000, 0.008); + + int succ; + succ = ata::extractOrientation(res.coeffs, corners_3D[i1], corners_3D[i2], corners_3D[i3], corners_3D[i4], pose.pose.orientation); + if(succ < 0) return -1; - return 0; -} + tf2::Matrix3x3 mat; + succ = ata::extractFrame(res.coeffs, corners_3D[i1], corners_3D[i2], corners_3D[i3], corners_3D[i4], mat); + if(succ < 0) return -1; + drawArrow(pose.pose.position, mat, cloud.header.frame_id, 1, id); -// Updates the bundlePoses of the multi_marker_bundles by detecting markers and -// using all markers in a bundle to infer the master tag's position -void GetMultiMarkerPoses(IplImage *image, ARCloud &cloud) { + p.translation[0] = pose.pose.position.x * 100.0; + p.translation[1] = pose.pose.position.y * 100.0; + p.translation[2] = pose.pose.position.z * 100.0; + p.quaternion[1] = pose.pose.orientation.x; + p.quaternion[2] = pose.pose.orientation.y; + p.quaternion[3] = pose.pose.orientation.z; + p.quaternion[0] = pose.pose.orientation.w; - for(int i=0; isize(); i++) - { - vector > pixels; - Marker *m = &((*marker_detector.markers)[i]); - int id = m->GetId(); - //cout << "******* ID: " << id << endl; + + + // Updates the bundlePoses of the multi_marker_bundles by detecting markers and + // using all markers in a bundle to infer the master tag's position + void GetMultiMarkerPoses(cv::Mat *image, ARCloud &cloud) { + + for(int i=0; isize(); i++) + { + vector > pixels; + Marker *m = &((*marker_detector.markers)[i]); + int id = m->GetId(); + + //Get the 3D inner corner points - more stable than outer corners that can "fall off" object + int resol = m->GetRes(); + int ori = m->ros_orientation; + + PointDouble pt1, pt2, pt3, pt4; + pt4 = m->ros_marker_points_img[0]; + pt3 = m->ros_marker_points_img[resol-1]; + pt1 = m->ros_marker_points_img[(resol*resol)-resol]; + pt2 = m->ros_marker_points_img[(resol*resol)-1]; - //Get the 3D points of the outer corners - /* - PointDouble corner0 = m->marker_corners_img[0]; - PointDouble corner1 = m->marker_corners_img[1]; - PointDouble corner2 = m->marker_corners_img[2]; - PointDouble corner3 = m->marker_corners_img[3]; - m->ros_corners_3D[0] = cloud(corner0.x, corner0.y); - m->ros_corners_3D[1] = cloud(corner1.x, corner1.y); - m->ros_corners_3D[2] = cloud(corner2.x, corner2.y); - m->ros_corners_3D[3] = cloud(corner3.x, corner3.y); - */ - - //Get the 3D inner corner points - more stable than outer corners that can "fall off" object - int resol = m->GetRes(); - int ori = m->ros_orientation; + m->ros_corners_3D[0] = cloud(pt1.x, pt1.y); + m->ros_corners_3D[1] = cloud(pt2.x, pt2.y); + m->ros_corners_3D[2] = cloud(pt3.x, pt3.y); + m->ros_corners_3D[3] = cloud(pt4.x, pt4.y); - PointDouble pt1, pt2, pt3, pt4; - pt4 = m->ros_marker_points_img[0]; - pt3 = m->ros_marker_points_img[resol-1]; - pt1 = m->ros_marker_points_img[(resol*resol)-resol]; - pt2 = m->ros_marker_points_img[(resol*resol)-1]; - - m->ros_corners_3D[0] = cloud(pt1.x, pt1.y); - m->ros_corners_3D[1] = cloud(pt2.x, pt2.y); - m->ros_corners_3D[2] = cloud(pt3.x, pt3.y); - m->ros_corners_3D[3] = cloud(pt4.x, pt4.y); - - if(ori >= 0 && ori < 4){ - if(ori != 0){ - std::rotate(m->ros_corners_3D.begin(), m->ros_corners_3D.begin() + ori, m->ros_corners_3D.end()); - } - } - else - RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "FindMarkerBundles: Bad Orientation: %i for ID: %i", ori, id); - - //Check if we have spotted a master tag - int master_ind = -1; - for(int j=0; j= 0 && ori < 4){ + if(ori != 0){ + std::rotate(m->ros_corners_3D.begin(), m->ros_corners_3D.begin() + ori, m->ros_corners_3D.end()); } } + else + RCLCPP_ERROR(this->get_logger(), "FindMarkerBundles: Bad Orientation: %i for ID: %i", ori, id); + + //Check if we have spotted a master tag + int master_ind = -1; + for(int j=0; jros_marker_points_img) - pixels.push_back(cv::Point(p.x, p.y)); - ARCloud::Ptr selected_points = ata::filterCloud(cloud, pixels); + //Mark the bundle that marker belongs to as "seen" + int bundle_ind = -1; + for(int j=0; jros_corners_3D, selected_points, cloud, m->pose); - - //If the plane fit fails... - if(ret < 0){ - //Mark this tag as invalid - m->valid = false; - //If this was a master tag, reset its visibility - if(master_ind >= 0) - master_visible[master_ind] = false; - //decrement the number of markers seen in this bundle - bundles_seen[bundle_ind] -= 1; - - } - else - m->valid = true; - } - - //For each master tag, infer the 3D position of its corners from other visible tags - //Then, do a plane fit to those new corners - ARCloud inferred_corners; - for(int i=0; i 0){ - //if(master_visible[i] == false){ - if(InferCorners(cloud, *(multi_marker_bundles[i]), inferred_corners) >= 0){ - ARCloud::Ptr inferred_cloud(new ARCloud(inferred_corners)); - PlaneFitPoseImprovement(i+5000, inferred_corners, inferred_cloud, cloud, bundlePoses[i]); - } - //} - //If master is visible, use it directly instead of inferring pose - //else{ - // for (size_t j=0; jsize(); j++){ - // Marker *m = &((*marker_detector.markers)[j]); - // if(m->GetId() == master_id[i]) - // bundlePoses[i] = m->pose; - // } - //} - Pose ret_pose; - if(med_filt_size > 0){ - med_filts[i]->addPose(bundlePoses[i]); - med_filts[i]->getMedian(ret_pose); - bundlePoses[i] = ret_pose; - } - } + //Get the 3D marker points + BOOST_FOREACH (const PointDouble& p, m->ros_marker_points_img) + pixels.push_back(cv::Point(p.x, p.y)); + ARCloud::Ptr selected_points = ata::filterCloud(cloud, pixels); + + //Use the kinect data to find a plane and pose for the marker + int ret = PlaneFitPoseImprovement(i, m->ros_corners_3D, selected_points, cloud, m->pose); + + //If the plane fit fails... + if(ret < 0){ + //Mark this tag as invalid + m->valid = false; + //If this was a master tag, reset its visibility + if(master_ind >= 0) + master_visible[master_ind] = false; + //decrement the number of markers seen in this bundle + bundles_seen[bundle_ind] -= 1; + + } + else + m->valid = true; + } + + //For each master tag, infer the 3D position of its corners from other visible tags + //Then, do a plane fit to those new corners + ARCloud inferred_corners; + for(int i=0; i 0){ + //if(master_visible[i] == false){ + if(InferCorners(cloud, *(multi_marker_bundles[i]), inferred_corners) >= 0){ + ARCloud::Ptr inferred_cloud(new ARCloud(inferred_corners)); + PlaneFitPoseImprovement(i+5000, inferred_corners, inferred_cloud, cloud, bundlePoses[i]); + } + + Pose ret_pose; + if(med_filt_size > 0){ + med_filts[i]->addPose(bundlePoses[i]); + med_filts[i]->getMedian(ret_pose); + bundlePoses[i] = ret_pose; + } + } + } } } -} -// Given the pose of a marker, builds the appropriate ROS messages for later publishing -void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::msg::Image::ConstSharedPtr image_msg, tf2::StampedTransform &CamToOutput, visualization_msgs::msg::Marker *rvizMarker, ar_track_alvar_msgs::AlvarMarker *ar_pose_marker, int confidence){ - double px,py,pz,qx,qy,qz,qw; - - px = p.translation[0]/100.0; - py = p.translation[1]/100.0; - pz = p.translation[2]/100.0; - qx = p.quaternion[1]; - qy = p.quaternion[2]; - qz = p.quaternion[3]; - qw = p.quaternion[0]; - - //Get the marker pose in the camera frame - tf::Quaternion rotation (qx,qy,qz,qw); - tf::Vector3 origin (px,py,pz); - tf::Transform t (rotation, origin); //transform from cam to marker - - tf::Vector3 markerOrigin (0, 0, 0); - tf::Transform m (tf::Quaternion::getIdentity (), markerOrigin); - tf::Transform markerPose = t * m; - - //Publish the cam to marker transform for each marker - std::string markerFrame = "ar_marker_"; - std::stringstream out; - out << id; - std::string id_string = out.str(); - markerFrame += id_string; - tf2::StampedTransform camToMarker (t, image_msg->header.stamp, image_msg->header.frame_id, markerFrame.c_str()); - tf_broadcaster->sendTransform(camToMarker); - - //Create the rviz visualization message - tf::poseTFToMsg (markerPose, rvizMarker->pose); - rvizMarker->header.frame_id = image_msg->header.frame_id; - rvizMarker->header.stamp = image_msg->header.stamp; - rvizMarker->id = id; - - rvizMarker->scale.x = 1.0 * marker_size/100.0; - rvizMarker->scale.y = 1.0 * marker_size/100.0; - rvizMarker->scale.z = 0.2 * marker_size/100.0; - - if(type==MAIN_MARKER) - rvizMarker->ns = "main_shapes"; - else - rvizMarker->ns = "basic_shapes"; - - - rvizMarker->type = visualization_msgs::msg::Marker::CUBE; - rvizMarker->action = visualization_msgs::msg::Marker::ADD; - - //Determine a color and opacity, based on marker type - if(type==MAIN_MARKER){ - rvizMarker->color.r = 1.0f; - rvizMarker->color.g = 0.0f; - rvizMarker->color.b = 0.0f; - rvizMarker->color.a = 1.0; - } - else if(type==VISIBLE_MARKER){ - rvizMarker->color.r = 0.0f; - rvizMarker->color.g = 1.0f; - rvizMarker->color.b = 0.0f; - rvizMarker->color.a = 0.7; - } - else if(type==GHOST_MARKER){ - rvizMarker->color.r = 0.0f; - rvizMarker->color.g = 0.0f; - rvizMarker->color.b = 1.0f; - rvizMarker->color.a = 0.5; - } + // Given the pose of a marker, builds the appropriate ROS messages for later publishing + void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::msg::Image image_msg, geometry_msgs::msg::TransformStamped &CamToOutput, visualization_msgs::msg::Marker *rvizMarker, ar_track_alvar_msgs::msg::AlvarMarker *ar_pose_marker, int confidence) + { + double px,py,pz,qx,qy,qz,qw; + + px = p.translation[0]/100.0; + py = p.translation[1]/100.0; + pz = p.translation[2]/100.0; + qx = p.quaternion[1]; + qy = p.quaternion[2]; + qz = p.quaternion[3]; + qw = p.quaternion[0]; + + //Get the marker pose in the camera frame + tf2::Quaternion rotation (qx,qy,qz,qw); + tf2::Vector3 origin (px,py,pz); + tf2::Transform t (rotation, origin); //transform from cam to marker + + tf2::Vector3 markerOrigin (0, 0, 0); + tf2::Transform m (tf2::Quaternion::getIdentity (), markerOrigin); + tf2::Transform markerPose = t * m; + + //Publish the cam to marker transform for each marker + std::string markerFrame = "ar_marker_"; + std::stringstream out; + out << id; + std::string id_string = out.str(); + markerFrame += id_string; + geometry_msgs::msg::TransformStamped camToMarker; + camToMarker.header.stamp = image_msg.header.stamp; + camToMarker.header.frame_id = image_msg.header.frame_id; + camToMarker.child_frame_id = markerFrame; + + geometry_msgs::msg::Vector3 trans; + trans.x = px; + trans.y = py; + trans.z = pz; + geometry_msgs::msg::Quaternion rot; + rot.x = qx; + rot.y = qy; + rot.z = qz; + rot.w = qw; + + camToMarker.transform.translation = trans; + camToMarker.transform.rotation = rot; + tf_broadcaster_.sendTransform(camToMarker); + + //Create the rviz visualization message + rvizMarker->pose.position.x = markerPose.getOrigin().getX(); + rvizMarker->pose.position.y = markerPose.getOrigin().getY(); + rvizMarker->pose.position.z = markerPose.getOrigin().getZ(); + rvizMarker->pose.orientation.x = markerPose.getRotation().getX(); + rvizMarker->pose.orientation.y = markerPose.getRotation().getY(); + rvizMarker->pose.orientation.z = markerPose.getRotation().getZ(); + rvizMarker->pose.orientation.w = markerPose.getRotation().getW(); + + rvizMarker->header.frame_id = image_msg.header.frame_id; + rvizMarker->header.stamp = image_msg.header.stamp; + rvizMarker->id = id; + + rvizMarker->scale.x = 1.0 * marker_size/100.0; + rvizMarker->scale.y = 1.0 * marker_size/100.0; + rvizMarker->scale.z = 0.2 * marker_size/100.0; + + if(type==MAIN_MARKER) + rvizMarker->ns = "main_shapes"; + else + rvizMarker->ns = "basic_shapes"; + + + rvizMarker->type = visualization_msgs::msg::Marker::CUBE; + rvizMarker->action = visualization_msgs::msg::Marker::ADD; + + //Determine a color and opacity, based on marker type + if(type==MAIN_MARKER){ + rvizMarker->color.r = 1.0f; + rvizMarker->color.g = 0.0f; + rvizMarker->color.b = 0.0f; + rvizMarker->color.a = 1.0; + } + else if(type==VISIBLE_MARKER){ + rvizMarker->color.r = 0.0f; + rvizMarker->color.g = 1.0f; + rvizMarker->color.b = 0.0f; + rvizMarker->color.a = 0.7; + } + else if(type==GHOST_MARKER){ + rvizMarker->color.r = 0.0f; + rvizMarker->color.g = 0.0f; + rvizMarker->color.b = 1.0f; + rvizMarker->color.a = 0.5; + } - rvizMarker->lifetime = rclcpp::Duration (0.1); + rvizMarker->lifetime = rclcpp::Duration (0.1); - // Only publish the pose of the master tag in each bundle, since that's all we really care about aside from visualization - if(type==MAIN_MARKER){ - //Take the pose of the tag in the camera frame and convert to the output frame (usually torso_lift_link for the PR2) - tf::Transform tagPoseOutput = CamToOutput * markerPose; + // Only publish the pose of the master tag in each bundle, since that's all we really care about aside from visualization + if(type==MAIN_MARKER){ + //Take the pose of the tag in the camera frame and convert to the output frame (usually torso_lift_link for the PR2) + //tf2::Transform tagPoseOutput = CamToOutput * markerPose; - //Create the pose marker message - tf::poseTFToMsg (tagPoseOutput, ar_pose_marker->pose.pose); - ar_pose_marker->header.frame_id = output_frame; - ar_pose_marker->header.stamp = image_msg->header.stamp; - ar_pose_marker->id = id; - ar_pose_marker->confidence = confidence; + // Create the pose marker message + // tf2::poseTFToMsg (tagPoseOutput, ar_pose_marker->pose.pose); + + tf2::doTransform(ar_pose_marker->pose, ar_pose_marker->pose,CamToOutput); + ar_pose_marker->header.frame_id = output_frame; + ar_pose_marker->header.stamp = image_msg.header.stamp; + ar_pose_marker->id = id; + ar_pose_marker->confidence = confidence; + } + else + ar_pose_marker = NULL; } - else - ar_pose_marker = NULL; -} -//Callback to handle getting kinect point clouds and processing them -void getPointCloudCallback (const sensor_msgs::msg::PointCloud2::ConstSharedPtr &msg) -{ - sensor_msgs::msg::ImagePtr image_msg(new sensor_msgs::msg::Image); + //Callback to handle getting kinect point clouds and processing them + void getPointCloudCallback (const sensor_msgs::msg::PointCloud2::SharedPtr msg) + { + sensor_msgs::msg::Image image_msg; - //If we've already gotten the cam info, then go ahead - if(cam->getCamInfo_){ - try{ - //Get the transformation from the Camera to the output frame for this image capture - tf2::StampedTransform CamToOutput; + //If we've already gotten the cam info, then go ahead + if(cam->getCamInfo_){ try{ - tf_listener->waitForTransform(output_frame, msg->header.frame_id, msg->header.stamp, rclcpp::Duration(1.0)); - tf_listener->lookupTransform(output_frame, msg->header.frame_id, msg->header.stamp, CamToOutput); - } - catch (tf::TransformException ex){ - RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "%s",ex.what()); - } - - //Init and clear visualization markers - visualization_msgs::msg::Marker rvizMarker; - ar_track_alvar_msgs::msg::AlvarMarker ar_pose_marker; - arPoseMarkers_.markers.clear (); + //Get the transformation from the Camera to the output frame for this image capture + geometry_msgs::msg::TransformStamped CamToOutput; + try + { + tf2_.canTransform(output_frame, msg->header.frame_id, msg->header.stamp, rclcpp::Duration(1.0)); + CamToOutput = tf2_.lookupTransform(output_frame, msg->header.frame_id, msg->header.stamp, rclcpp::Duration(1.0)); + } + catch (tf2::TransformException ex){ + RCLCPP_ERROR(this->get_logger(), "%s",ex.what()); + } - //Convert cloud to PCL - ARCloud cloud; - pcl::fromROSMsg(*msg, cloud); + //Init and clear visualization markers + visualization_msgs::msg::Marker rvizMarker; + ar_track_alvar_msgs::msg::AlvarMarker ar_pose_marker; + arPoseMarkers_.markers.clear (); - //Get an OpenCV image from the cloud - pcl::toROSMsg (*msg, *image_msg); - image_msg->header.stamp = msg->header.stamp; - image_msg->header.frame_id = msg->header.frame_id; - - //Convert the image - cv_ptr_ = cv_bridge::toCvCopy(image_msg, sensor_msgs::image_encodings::BGR8); - - //Get the estimated pose of the main markers by using all the markers in each bundle - - // GetMultiMarkersPoses expects an IplImage*, but as of ros groovy, cv_bridge gives - // us a cv::Mat. I'm too lazy to change to cv::Mat throughout right now, so I - // do this conversion here -jbinney - IplImage ipl_image = cv_ptr_->image; - GetMultiMarkerPoses(&ipl_image, cloud); - - for (size_t i=0; isize(); i++) - { - int id = (*(marker_detector.markers))[i].GetId(); - - // Draw if id is valid - if(id >= 0){ - - // Don't draw if it is a master tag...we do this later, a bit differently - bool should_draw = true; - for(int j=0; j 0){ - makeMarkerMsgs(MAIN_MARKER, master_id[i], bundlePoses[i], image_msg, CamToOutput, &rvizMarker, &ar_pose_marker, bundles_seen[i]); - rvizMarkerPub_.publish (rvizMarker); - arPoseMarkers_.markers.push_back (ar_pose_marker); - } - } - - //Publish the marker messages - arMarkerPub_.publish (arPoseMarkers_); - } - catch (cv_bridge::Exception& e){ - ROS_ERROR ("ar_track_alvar: Image error: %s", image_msg->encoding.c_str ()); - } - } -} + //Convert cloud to PCL + ARCloud cloud; + pcl::fromROSMsg(*msg, cloud); + //Get an OpenCV image from the cloud + pcl::toROSMsg (*msg, image_msg); + image_msg.header.stamp = msg->header.stamp; + image_msg.header.frame_id = msg->header.frame_id; + + //Convert the image + cv_ptr_ = cv_bridge::toCvCopy(image_msg, sensor_msgs::image_encodings::BGR8); -//Create a ROS frame out of the known corners of a tag in the weird marker coord frame used by Alvar markers (x right y forward z up) -//p0-->p1 should point in Alvar's pos X direction -//p1-->p2 should point in Alvar's pos Y direction -int makeMasterTransform (const CvPoint3D64f& p0, const CvPoint3D64f& p1, - const CvPoint3D64f& p2, const CvPoint3D64f& p3, - tf::Transform &retT) - { - const tf::Vector3 q0(p0.x, p0.y, p0.z); - const tf::Vector3 q1(p1.x, p1.y, p1.z); - const tf::Vector3 q2(p2.x, p2.y, p2.z); - const tf::Vector3 q3(p3.x, p3.y, p3.z); - - // (inverse) matrix with the given properties - const tf::Vector3 v = (q1-q0).normalized(); - const tf::Vector3 w = (q2-q1).normalized(); - const tf::Vector3 n = v.cross(w); - tf::Matrix3x3 m(v[0], v[1], v[2], w[0], w[1], w[2], n[0], n[1], n[2]); - m = m.inverse(); - - //Translate to quaternion - if(m.determinant() <= 0) - return -1; - - //Use Eigen for this part instead, because the ROS version of bullet appears to have a bug - Eigen::Matrix3f eig_m; - for(int i=0; i<3; i++){ - for(int j=0; j<3; j++){ - eig_m(i,j) = m[i][j]; - } - } - Eigen::Quaternion eig_quat(eig_m); - - // Translate back to bullet - tfScalar ex = eig_quat.x(); - tfScalar ey = eig_quat.y(); - tfScalar ez = eig_quat.z(); - tfScalar ew = eig_quat.w(); - tf::Quaternion quat(ex,ey,ez,ew); - quat = quat.normalized(); - - double qx = (q0.x() + q1.x() + q2.x() + q3.x()) / 4.0; - double qy = (q0.y() + q1.y() + q2.y() + q3.y()) / 4.0; - double qz = (q0.z() + q1.z() + q2.z() + q3.z()) / 4.0; - tf::Vector3 origin (qx,qy,qz); - - tf::Transform tform (quat, origin); //transform from master to marker - retT = tform; - - return 0; - } + //Get the estimated pose of the main markers by using all the markers in each bundle + // GetMultiMarkersPoses expects an IplImage*, but as of ros groovy, cv_bridge gives + // us a cv::Mat. I'm too lazy to change to cv::Mat throughout right now, so I + // do this conversion here -jbinney + cv::Mat ipl_image = cv_ptr_->image; + GetMultiMarkerPoses(&ipl_image, cloud); -//Find the coordinates of the Master marker with respect to the coord frame of each of it's child markers -//This data is used for later estimation of the Master marker pose from the child poses -int calcAndSaveMasterCoords(MultiMarkerBundle &master) -{ - int mast_id = master.master_id; - std::vector rel_corner_coords; - - //Go through all the markers associated with this bundle - for (size_t i=0; isize(); i++) + { + int id = (*(marker_detector.markers))[i].GetId(); + + // Draw if id is valid + if(id >= 0) + { + + // Don't draw if it is a master tag...we do this later, a bit differently + bool should_draw = true; + for(int j=0; jpublish (rvizMarker); + } + } } - //Use them to find a transform from the master frame to the child frame - tf::Transform tform; - makeMasterTransform(mark_corners[0], mark_corners[1], mark_corners[2], mark_corners[3], tform); - - //Finally, find the coords of the corners of the master in the child frame - for(int j=0; j<4; j++){ - - CvPoint3D64f corner_coord = master.pointcloud[master.pointcloud_index(mast_id, j)]; - double px = corner_coord.x; - double py = corner_coord.y; - double pz = corner_coord.z; - - tf::Vector3 corner_vec (px, py, pz); - tf::Vector3 ans = (tform.inverse()) * corner_vec; - rel_corner_coords.push_back(ans); + //Draw the main markers, whether they are visible or not -- but only if at least 1 marker from their bundle is currently seen + for(int i=0; i 0) + { + makeMarkerMsgs(MAIN_MARKER, master_id[i], bundlePoses[i], image_msg, CamToOutput, &rvizMarker, &ar_pose_marker, bundles_seen[i]); + rvizMarkerPub_->publish (rvizMarker); + arPoseMarkers_.markers.push_back (ar_pose_marker); + } } - - master.rel_corners.push_back(rel_corner_coords); - } - - return 0; -} - -int main(int argc, char *argv[]) -{ - - rclcpp::init(argc, argv); - auto node = rclcpp::Node::make_shared("marker_detect"); + //Publish the marker messages + arMarkerPub_->publish (arPoseMarkers_); + } - if(argc < 9){ - std::cout << std::endl; - cout << "Not enough arguments provided." << endl; - cout << "Usage: ./findMarkerBundles " << endl; - std::cout << std::endl; - return 0; + catch (cv_bridge::Exception& e) + { + RCLCPP_ERROR (this->get_logger(),"ar_track_alvar: Image error: %s", image_msg.encoding.c_str ()); + } + } } - // Get params from command line - marker_size = atof(argv[1]); - max_new_marker_error = atof(argv[2]); - max_track_error = atof(argv[3]); - cam_image_topic = argv[4]; - cam_info_topic = argv[5]; - output_frame = argv[6]; - med_filt_size = atoi(argv[7]); - int n_args_before_list = 8; - n_bundles = argc - n_args_before_list; - - marker_detector.SetMarkerSize(marker_size); - multi_marker_bundles = new MultiMarkerBundle*[n_bundles]; - bundlePoses = new Pose[n_bundles]; - master_id = new int[n_bundles]; - bundle_indices = new std::vector[n_bundles]; - bundles_seen = new int[n_bundles]; - master_visible = new bool[n_bundles]; - - //Create median filters - med_filts = new ata::MedianFilter*[n_bundles]; - for(int i=0; i id_vector = loadHelper.getIndices(); - multi_marker_bundles[i] = new MultiMarkerBundle(id_vector); - multi_marker_bundles[i]->Load(argv[i + n_args_before_list], FILE_FORMAT_XML); - master_id[i] = multi_marker_bundles[i]->getMasterId(); - bundle_indices[i] = multi_marker_bundles[i]->getIndices(); - calcAndSaveMasterCoords(*(multi_marker_bundles[i])); - } - else{ - cout<<"Cannot load file "<< argv[i + n_args_before_list] << endl; + //Create a ROS frame out of the known corners of a tag in the weird marker coord frame used by Alvar markers (x right y forward z up) + //p0-->p1 should point in Alvar's pos X direction + //p1-->p2 should point in Alvar's pos Y direction + int makeMasterTransform (const cv::Point3f& p0, const cv::Point3f& p1, + const cv::Point3f& p2, const cv::Point3f& p3, + tf2::Transform &retT) + { + const tf2::Vector3 q0(p0.x, p0.y, p0.z); + const tf2::Vector3 q1(p1.x, p1.y, p1.z); + const tf2::Vector3 q2(p2.x, p2.y, p2.z); + const tf2::Vector3 q3(p3.x, p3.y, p3.z); + + // (inverse) matrix with the given properties + const tf2::Vector3 v = (q1-q0).normalized(); + const tf2::Vector3 w = (q2-q1).normalized(); + const tf2::Vector3 n = v.cross(w); + tf2::Matrix3x3 m(v[0], v[1], v[2], w[0], w[1], w[2], n[0], n[1], n[2]); + m = m.inverse(); + + //Translate to quaternion + if(m.determinant() <= 0) + return -1; + + //Use Eigen for this part instead, because the ROS version of bullet appears to have a bug + Eigen::Matrix3f eig_m; + for(int i=0; i<3; i++){ + for(int j=0; j<3; j++){ + eig_m(i,j) = m[i][j]; + } + } + Eigen::Quaternion eig_quat(eig_m); + + // Translate back to bullet + tf2Scalar ex = eig_quat.x(); + tf2Scalar ey = eig_quat.y(); + tf2Scalar ez = eig_quat.z(); + tf2Scalar ew = eig_quat.w(); + tf2::Quaternion quat(ex,ey,ez,ew); + quat = quat.normalized(); + + double qx = (q0.x() + q1.x() + q2.x() + q3.x()) / 4.0; + double qy = (q0.y() + q1.y() + q2.y() + q3.y()) / 4.0; + double qz = (q0.z() + q1.z() + q2.z() + q3.z()) / 4.0; + tf2::Vector3 origin (qx,qy,qz); + + tf2::Transform tform (quat, origin); //transform from master to marker + retT = tform; + return 0; - } - } + } - // Set up camera, listeners, and broadcasters - cam = new Camera(n, cam_info_topic); - tf_listener = new tf2_ros::TransformListener(n); - tf_broadcaster = new tf::TransformBroadcaster(); + //Find the coordinates of the Master marker with respect to the coord frame of each of it's child markers + //This data is used for later estimation of the Master marker pose from the child poses + int calcAndSaveMasterCoords(MultiMarkerBundle &master) + { + int mast_id = master.master_id; + std::vector rel_corner_coords; + + //Go through all the markers associated with this bundle + for (size_t i=0; icreate_publisher ("ar_pose_marker", 0); - rvizMarkerPub_ = node->create_publisher ("visualization_marker", 0); - rvizMarkerPub2_ = node->create_publisher ("ARmarker_points", 0); - - //Give tf a chance to catch up before the camera callback starts asking for transforms - rclcpp::Duration(1.0).sleep(); - rclcpp::spin_some(node); - - //Subscribe to topics and set up callbacks - ROS_INFO ("Subscribing to image topic"); +}; - cloud_sub_ = this->create_subscription(cam_image_topic, 1, getPointCloudCallback); - rclcpp::spin(node); +int main(int argc, char *argv[]) +{ + rclcpp::init(argc, argv); + rclcpp::spin(std::make_shared(argc,argv)); rclcpp::shutdown(); - return 0; } diff --git a/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp b/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp index d189bd3..0d1cc93 100644 --- a/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp +++ b/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp @@ -172,6 +172,8 @@ class IndividualMarkersNoKinect : public rclcpp::Node rclcpp::Rate loop_rate(100); loop_rate.sleep(); + arMarkerPub_ = this->create_publisher ("ar_pose_marker", 0); + rvizMarkerPub_ = this->create_publisher ("visualization_marker", 0); cam_sub_ = this->create_subscription(cam_image_topic, 1, std::bind(&IndividualMarkersNoKinect::getCapCallback, this, std::placeholders::_1)); diff --git a/ar_track_alvar/nodes/TrainMarkerBundle.cpp b/ar_track_alvar/nodes/TrainMarkerBundle.cpp index 302fbc5..06be1ed 100644 --- a/ar_track_alvar/nodes/TrainMarkerBundle.cpp +++ b/ar_track_alvar/nodes/TrainMarkerBundle.cpp @@ -46,8 +46,12 @@ #include "tf2_ros/buffer.h" #include "tf2_ros/transform_listener.h" #include +#include +#include +#include "tf2_ros/create_timer_ros.h" #include #include +#include using namespace alvar; using namespace std; @@ -56,360 +60,406 @@ using namespace std; #define VISIBLE_MARKER 2 #define GHOST_MARKER 3 -Camera *cam; -cv_bridge::CvImagePtr cv_ptr_; -image_transport::Subscriber cam_sub_; -ros::Publisher arMarkerPub_; -ros::Publisher rvizMarkerPub_; -ar_track_alvar_msgs::AlvarMarkers arPoseMarkers_; -tf2_ros::TransformListener *tf_listener; -tf::TransformBroadcaster *tf_broadcaster; -MarkerDetector marker_detector; -MultiMarkerInitializer *multi_marker_init=NULL; -MultiMarkerBundle *multi_marker_bundle=NULL; -int auto_count; -bool auto_collect; - -bool init=true; -bool add_measurement=false; -bool optimize = false; -bool optimize_done = false; - -double marker_size; -double max_new_marker_error; -double max_track_error; -std::string cam_image_topic; -std::string cam_info_topic; -std::string output_frame; -int nof_markers; - -double GetMultiMarkerPose(IplImage *image, Pose &pose); -void getCapCallback (const sensor_msgs::msg::Image::ConstSharedPtr & image_msg); -int keyCallback(int key); -void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::msg::Image::ConstSharedPtr image_msg, tf2::StampedTransform &CamToOutput, visualization_msgs::msg::Marker *rvizMarker, ar_track_alvar_msgs::AlvarMarker *ar_pose_marker); - - -double GetMultiMarkerPose(IplImage *image, Pose &pose) { - static bool init=true; - - if (init) { - init=false; - vector id_vector; - for(int i = 0; i < nof_markers; ++i) - id_vector.push_back(i); - // We make the initialization for MultiMarkerBundle using MultiMarkerInitializer - // Each marker needs to be visible in at least two images and at most 64 image are used. - multi_marker_init = new MultiMarkerInitializer(id_vector, 2, 64); - pose.Reset(); - multi_marker_init->PointCloudAdd(id_vector[0], marker_size, pose); - multi_marker_bundle = new MultiMarkerBundle(id_vector); - marker_detector.SetMarkerSize(marker_size); - } - - double error = -1; - if (!optimize_done) { - if (marker_detector.Detect(image, cam, true, false, max_new_marker_error, max_track_error, CVSEQ, true)) { - error = multi_marker_init->Update(marker_detector.markers, cam, pose); - } - } - else { - if (marker_detector.Detect(image, cam, true, false, max_new_marker_error, max_track_error, CVSEQ, true)) { - error = multi_marker_bundle->Update(marker_detector.markers, cam, pose); - if ((multi_marker_bundle->SetTrackMarkers(marker_detector, cam, pose, image) > 0) && (marker_detector.DetectAdditional(image, cam, false) > 0)) - { - error = multi_marker_bundle->Update(marker_detector.markers, cam, pose); - } - } - } - - if (add_measurement) { - cout << "Markers seen: " << marker_detector.markers->size() << "\n"; - if (marker_detector.markers->size() >= 2) { - cout<<"Adding measurement..."<MeasurementsAdd(marker_detector.markers); - } - else{ - cout << "Not enough markers to capture measurement\n"; - } - add_measurement=false; - } +class TrainMarkerBundle : public rclcpp::Node +{ + private: + Camera *cam; + cv_bridge::CvImagePtr cv_ptr_; + image_transport::Subscriber cam_sub_; + rclcpp::Publisher::SharedPtr arMarkerPub_; + rclcpp::Publisher::SharedPtr rvizMarkerPub_; + tf2_ros::TransformBroadcaster tf_broadcaster_; + tf2_ros::TransformListener tf_listener_; + tf2_ros::Buffer tf2_; + MarkerDetector marker_detector; + MultiMarkerInitializer *multi_marker_init=NULL; + MultiMarkerBundle *multi_marker_bundle=NULL; + + ar_track_alvar_msgs::msg::AlvarMarkers arPoseMarkers_; + visualization_msgs::msg::Marker rvizMarker_; + + + int auto_count; + bool auto_collect; + bool init=true; + bool add_measurement=false; + bool optimize = false; + bool optimize_done = false; + + double marker_size; + double max_new_marker_error; + double max_track_error; + std::string cam_image_topic; + std::string cam_info_topic; + std::string output_frame; + int nof_markers; + + + public: + TrainMarkerBundle(int argc, char* argv[]):Node("marker_detect"), tf2_(this->get_clock()), tf_listener_(tf2_), tf_broadcaster_(this) + { + + + if(argc < 8){ + std::cout << std::endl; + cout << "Not enough arguments provided." << endl; + cout << "Usage: ./trainMarkerBundle " << endl; + std::cout << std::endl; + exit(0); + } - if (optimize) { - cout<<"Initializing..."<Initialize(cam)) { - cout<<"Initialization failed, add some more measurements."<Reset(); - multi_marker_bundle->MeasurementsReset(); - // Copy all measurements into the bundle adjuster. - for (int i = 0; i < multi_marker_init->getMeasurementCount(); ++i) { - Pose p2; - multi_marker_init->getMeasurementPose(i, cam, p2); - const std::vector > markers = multi_marker_init->getMeasurementMarkers(i); - multi_marker_bundle->MeasurementsAdd(&markers, p2); - } - // Initialize the bundle adjuster with initial marker poses. - multi_marker_bundle->PointCloudCopy(multi_marker_init); - cout<<"Optimizing..."<Optimize(cam, 0.01, 20)) { - cout<<"Optimizing done"<header.stamp, image_msg->header.frame_id, markerFrame.c_str()); - tf_broadcaster->sendTransform(camToMarker); - } - //Create the rviz visualization message - tf::poseTFToMsg (markerPose, rvizMarker->pose); - rvizMarker->header.frame_id = image_msg->header.frame_id; - rvizMarker->header.stamp = image_msg->header.stamp; - rvizMarker->id = id; - - rvizMarker->scale.x = 1.0 * marker_size/100.0; - rvizMarker->scale.y = 1.0 * marker_size/100.0; - rvizMarker->scale.z = 0.2 * marker_size/100.0; - rvizMarker->ns = "basic_shapes"; - rvizMarker->type = visualization_msgs::msg::Marker::CUBE; - rvizMarker->action = visualization_msgs::msg::Marker::ADD; - - //Determine a color and opacity, based on marker type - if(type==MAIN_MARKER){ - rvizMarker->color.r = 1.0f; - rvizMarker->color.g = 0.0f; - rvizMarker->color.b = 0.0f; - rvizMarker->color.a = 1.0; - } - else if(type==VISIBLE_MARKER){ - rvizMarker->color.r = 0.0f; - rvizMarker->color.g = 1.0f; - rvizMarker->color.b = 0.0f; - rvizMarker->color.a = 1.0; - } - else if(type==GHOST_MARKER){ - rvizMarker->color.r = 0.0f; - rvizMarker->color.g = 0.0f; - rvizMarker->color.b = 1.0f; - rvizMarker->color.a = 0.5; - } + //Give tf a chance to catch up before the camera callback starts asking for transforms + // It will also reconfigure parameters for the first time, setting the default values + //TODO: come back to this, there's probably a better way to do this + rclcpp::Rate loop_rate(100); + loop_rate.sleep(); - rvizMarker->lifetime = rclcpp::Duration (1.0); - //Get the pose of the tag in the camera frame, then convert to the output frame (usually torso) - tf::Transform tagPoseOutput = CamToOutput * markerPose; + arMarkerPub_ = this->create_publisher ("ar_pose_marker", 0); - //Create the pose marker message - tf::poseTFToMsg (tagPoseOutput, ar_pose_marker->pose.pose); - ar_pose_marker->header.frame_id = output_frame; - ar_pose_marker->header.stamp = image_msg->header.stamp; - ar_pose_marker->id = id; -} + rvizMarkerPub_ = this->create_publisher ("visualization_marker", 0); -void getCapCallback (const sensor_msgs::msg::Image::ConstSharedPtr & image_msg) -{ - //Check if automatic measurement collection should be triggered - if(auto_collect){ - auto_count++; - add_measurement = true; - if(auto_count >= 5) - auto_collect = false; - } + //Subscribe to camera message + RCLCPP_INFO(this->get_logger(),"Subscribing to image topic"); + cam_sub_ = image_transport::create_subscription(this, cam_image_topic, [&](auto& msg) { this->getCapCallback(msg); },"raw"); + } + + void getCapCallback (const sensor_msgs::msg::Image::ConstSharedPtr& image_msg) + { + //Check if automatic measurement collection should be triggered + if(auto_collect){ + auto_count++; + add_measurement = true; + if(auto_count >= 5) + auto_collect = false; + } + + //If we've already gotten the cam info, then go ahead + if(cam->getCamInfo_){ + try{ + //Get the transformation from the Camera to the output frame for this image capture + geometry_msgs::msg::TransformStamped CamToOutput; + try{ + tf2_.canTransform(image_msg->header.frame_id, output_frame, image_msg->header.stamp, rclcpp::Duration(1.0)); + CamToOutput= tf2_.lookupTransform(image_msg->header.frame_id, output_frame, image_msg->header.stamp,rclcpp::Duration(1.0)); + } + catch (tf2::TransformException ex){ + RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "%s",ex.what()); + } + + visualization_msgs::msg::Marker rvizMarker; + ar_track_alvar_msgs::msg::AlvarMarker ar_pose_marker; + arPoseMarkers_.markers.clear (); + + //Convert the image + cv_ptr_ = cv_bridge::toCvCopy(image_msg, sensor_msgs::image_encodings::BGR8); + + //Get the estimated pose of the main markers by using all the markers in each bundle + + // GetMultiMarkersPoses expects an cv::Mat*, but as of ros groovy, cv_bridge gives + // us a cv::Mat. I'm too lazy to change to cv::Mat throughout right now, so I + // do this conversion here -jbinney + cv::Mat ipl_image = cv_ptr_->image; + + //Get the estimated pose of the main marker using the whole bundle + static Pose bundlePose; + double error = GetMultiMarkerPose(&ipl_image, bundlePose); + + if (optimize_done){ + //Draw the main marker + makeMarkerMsgs(MAIN_MARKER, 0, bundlePose, image_msg, CamToOutput, &rvizMarker, &ar_pose_marker); + rvizMarkerPub_->publish (rvizMarker); + arPoseMarkers_.markers.push_back (ar_pose_marker); + } + //Now grab the poses of the other markers that are visible + for (size_t i=0; isize(); i++) + { + int id = (*(marker_detector.markers))[i].GetId(); + + //Don't do the main marker (id=0) if we've already drawn it + if(id > 0 || ((!optimize_done) && id==0)){ + Pose p = (*(marker_detector.markers))[i].pose; + makeMarkerMsgs(VISIBLE_MARKER, id, p, image_msg, CamToOutput, &rvizMarker, &ar_pose_marker); + rvizMarkerPub_->publish (rvizMarker); + arPoseMarkers_.markers.push_back (ar_pose_marker); + } + } + arMarkerPub_->publish (arPoseMarkers_); + } + catch (cv_bridge::Exception& e){ + RCLCPP_ERROR (this->get_logger(),"Could not convert from '%s' to 'rgb8'.", image_msg->encoding.c_str ()); + } + } + + //Sleep if we are auto collecting + if(auto_collect) + usleep(1000000); + } + + double GetMultiMarkerPose(cv::Mat *image, Pose &pose) + { + static bool init=true; + + if (init) { + init=false; + vector id_vector; + for(int i = 0; i < nof_markers; ++i) + id_vector.push_back(i); + // We make the initialization for MultiMarkerBundle using MultiMarkerInitializer + // Each marker needs to be visible in at least two images and at most 64 image are used. + multi_marker_init = new MultiMarkerInitializer(id_vector, 2, 64); + pose.Reset(); + multi_marker_init->PointCloudAdd(id_vector[0], marker_size, pose); + multi_marker_bundle = new MultiMarkerBundle(id_vector); + marker_detector.SetMarkerSize(marker_size); + } + + double error = -1; + if (!optimize_done) { + if (marker_detector.Detect(image, cam, true, false, max_new_marker_error, max_track_error, CVSEQ, true)) { + error = multi_marker_init->Update(marker_detector.markers, cam, pose); + } + } + else { + if (marker_detector.Detect(image, cam, true, false, max_new_marker_error, max_track_error, CVSEQ, true)) { + error = multi_marker_bundle->Update(marker_detector.markers, cam, pose); + if ((multi_marker_bundle->SetTrackMarkers(marker_detector, cam, pose, image) > 0) && (marker_detector.DetectAdditional(image, cam, false) > 0)) + { + error = multi_marker_bundle->Update(marker_detector.markers, cam, pose); + } + } + } + + if (add_measurement) { + cout << "Markers seen: " << marker_detector.markers->size() << "\n"; + if (marker_detector.markers->size() >= 2) { + cout<<"Adding measurement..."<MeasurementsAdd(marker_detector.markers); + } + else{ + cout << "Not enough markers to capture measurement\n"; + } + add_measurement=false; + } + + if (optimize) { + cout<<"Initializing..."<Initialize(cam)) { + cout<<"Initialization failed, add some more measurements."<Reset(); + multi_marker_bundle->MeasurementsReset(); + // Copy all measurements into the bundle adjuster. + for (int i = 0; i < multi_marker_init->getMeasurementCount(); ++i) { + Pose p2; + multi_marker_init->getMeasurementPose(i, cam, p2); + const std::vector > markers = multi_marker_init->getMeasurementMarkers(i); + multi_marker_bundle->MeasurementsAdd(&markers, p2); + } + // Initialize the bundle adjuster with initial marker poses. + multi_marker_bundle->PointCloudCopy(multi_marker_init); + cout<<"Optimizing..."<Optimize(cam, 0.01, 20)) { + cout<<"Optimizing done"<header.stamp; + camToMarker.header.frame_id = image_msg->header.frame_id; + camToMarker.child_frame_id = markerFrame; + + geometry_msgs::msg::Vector3 trans; + trans.x = px; + trans.y = py; + trans.z = pz; + geometry_msgs::msg::Quaternion rot; + rot.x = qx; + rot.y = qy; + rot.z = qz; + rot.w = qw; + + camToMarker.transform.translation = trans; + camToMarker.transform.rotation = rot; + tf_broadcaster_.sendTransform(camToMarker); + } - //If we've already gotten the cam info, then go ahead - if(cam->getCamInfo_){ - try{ - //Get the transformation from the Camera to the output frame for this image capture - tf2::StampedTransform CamToOutput; - try{ - tf_listener->waitForTransform(image_msg->header.frame_id, output_frame, image_msg->header.stamp, rclcpp::Duration(1.0)); - tf_listener->lookupTransform(image_msg->header.frame_id, output_frame, image_msg->header.stamp, CamToOutput); - } - catch (tf::TransformException ex){ - RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "%s",ex.what()); - } - - visualization_msgs::msg::Marker rvizMarker; - ar_track_alvar_msgs::AlvarMarker ar_pose_marker; - arPoseMarkers_.markers.clear (); - - //Convert the image - cv_ptr_ = cv_bridge::toCvCopy(image_msg, sensor_msgs::image_encodings::BGR8); - - //Get the estimated pose of the main markers by using all the markers in each bundle - - // GetMultiMarkersPoses expects an IplImage*, but as of ros groovy, cv_bridge gives - // us a cv::Mat. I'm too lazy to change to cv::Mat throughout right now, so I - // do this conversion here -jbinney - IplImage ipl_image = cv_ptr_->image; - - //Get the estimated pose of the main marker using the whole bundle - static Pose bundlePose; - double error = GetMultiMarkerPose(&ipl_image, bundlePose); - - if (optimize_done){ - //Draw the main marker - makeMarkerMsgs(MAIN_MARKER, 0, bundlePose, image_msg, CamToOutput, &rvizMarker, &ar_pose_marker); - rvizMarkerPub_.publish (rvizMarker); - arPoseMarkers_.markers.push_back (ar_pose_marker); + //Create the rviz visualization message + rvizMarker_.pose.position.x = markerPose.getOrigin().getX(); + rvizMarker_.pose.position.y = markerPose.getOrigin().getY(); + rvizMarker_.pose.position.z = markerPose.getOrigin().getZ(); + rvizMarker_.pose.orientation.x = markerPose.getRotation().getX(); + rvizMarker_.pose.orientation.y = markerPose.getRotation().getY(); + rvizMarker_.pose.orientation.z = markerPose.getRotation().getZ(); + rvizMarker_.pose.orientation.w = markerPose.getRotation().getW(); + rvizMarker->header.frame_id = image_msg->header.frame_id; + rvizMarker->header.stamp = image_msg->header.stamp; + rvizMarker->id = id; + + rvizMarker->scale.x = 1.0 * marker_size/100.0; + rvizMarker->scale.y = 1.0 * marker_size/100.0; + rvizMarker->scale.z = 0.2 * marker_size/100.0; + rvizMarker->ns = "basic_shapes"; + rvizMarker->type = visualization_msgs::msg::Marker::CUBE; + rvizMarker->action = visualization_msgs::msg::Marker::ADD; + + //Determine a color and opacity, based on marker type + if(type==MAIN_MARKER){ + rvizMarker->color.r = 1.0f; + rvizMarker->color.g = 0.0f; + rvizMarker->color.b = 0.0f; + rvizMarker->color.a = 1.0; } - //Now grab the poses of the other markers that are visible - for (size_t i=0; isize(); i++) + else if(type==VISIBLE_MARKER){ + rvizMarker->color.r = 0.0f; + rvizMarker->color.g = 1.0f; + rvizMarker->color.b = 0.0f; + rvizMarker->color.a = 1.0; + } + else if(type==GHOST_MARKER){ + rvizMarker->color.r = 0.0f; + rvizMarker->color.g = 0.0f; + rvizMarker->color.b = 1.0f; + rvizMarker->color.a = 0.5; + } + + rvizMarker->lifetime = rclcpp::Duration (1.0); + + + //Create the pose marker messages + //ar_track_alvar_msgs::msg::AlvarMarker ar_pose_marker; + //Get the pose of the tag in the camera frame, then the output frame (usually torso) + tf2::doTransform(ar_pose_marker->pose, ar_pose_marker->pose,CamToOutput); + + + //Get the pose of the tag in the camera frame, then convert to the output frame (usually torso) + //tf2::Transform tagPoseOutput = CamToOutput * markerPose; + + //Create the pose marker message + // tf::poseTFToMsg (tagPoseOutput, ar_pose_marker->pose.pose); + ar_pose_marker->header.frame_id = output_frame; + ar_pose_marker->header.stamp = image_msg->header.stamp; + ar_pose_marker->id = id; + } + + //Do something based on keystrokes from menu + int keyProcess(int key) + { + if(key == 'r') + { + cout<<"Reseting multi marker"<Reset(); + multi_marker_init->MeasurementsReset(); + multi_marker_bundle->Reset(); + multi_marker_bundle->MeasurementsReset(); + add_measurement = false; + optimize = false; + optimize_done = false; + } + else if(key == 'l') + { + if(multi_marker_bundle->Load("mmarker.xml", FILE_FORMAT_XML)) + { + cout<<"Multi marker loaded"<PointCloudCopy(multi_marker_bundle); + optimize_done = true; + } + else + cout<<"Cannot load multi marker"<Save("mmarker.xml", FILE_FORMAT_XML)) + cout<<"Multi marker saved"< 0 || ((!optimize_done) && id==0)){ - Pose p = (*(marker_detector.markers))[i].pose; - makeMarkerMsgs(VISIBLE_MARKER, id, p, image_msg, CamToOutput, &rvizMarker, &ar_pose_marker); - rvizMarkerPub_.publish (rvizMarker); - arPoseMarkers_.markers.push_back (ar_pose_marker); - } + exit(0); } - arMarkerPub_.publish (arPoseMarkers_); + else return key; + + return 0; } - catch (cv_bridge::Exception& e){ - ROS_ERROR ("Could not convert from '%s' to 'rgb8'.", image_msg->encoding.c_str ()); - } - } - //Sleep if we are auto collecting - if(auto_collect) - usleep(1000000); -} +}; -//Do something based on keystrokes from menu -int keyProcess(int key) -{ - if(key == 'r') - { - cout<<"Reseting multi marker"<Reset(); - multi_marker_init->MeasurementsReset(); - multi_marker_bundle->Reset(); - multi_marker_bundle->MeasurementsReset(); - add_measurement = false; - optimize = false; - optimize_done = false; - } - else if(key == 'l') - { - if(multi_marker_bundle->Load("mmarker.xml", FILE_FORMAT_XML)) - { - cout<<"Multi marker loaded"<PointCloudCopy(multi_marker_bundle); - optimize_done = true; - } - else - cout<<"Cannot load multi marker"<Save("mmarker.xml", FILE_FORMAT_XML)) - cout<<"Multi marker saved"< " << endl; - std::cout << std::endl; - return 0; - } + rclcpp::init(argc, argv); + + auto node = std::make_shared(argc,argv); - // Get params from command line - nof_markers = atoi(argv[1]); - marker_size = atof(argv[2]); - max_new_marker_error = atof(argv[3]); - max_track_error = atof(argv[4]); - cam_image_topic = argv[5]; - cam_info_topic = argv[6]; - output_frame = argv[7]; - marker_detector.SetMarkerSize(marker_size); - - cam = new Camera(n, cam_info_topic); - tf_listener = new tf2_ros::TransformListener(n); - tf_broadcaster = new tf::TransformBroadcaster(); - arMarkerPub_ = n.advertise < ar_track_alvar_msgs::AlvarMarkers > ("ar_pose_marker", 0); - rvizMarkerPub_ = n.advertise < visualization_msgs::msg::Marker > ("visualization_marker", 0); - - //Give tf a chance to catch up before the camera callback starts asking for transforms - rclcpp::Duration(1.0).sleep(); - rclcpp::spin_some(node); - - //Subscribe to camera message - ROS_INFO ("Subscribing to image topic"); - image_transport::ImageTransport it_(n); - cam_sub_ = it_.subscribe (cam_image_topic, 1, &getCapCallback); - - // Output usage message + // Output usage message std::cout << std::endl; std::cout << "Keyboard Shortcuts:" << std::endl; std::cout << " l: load marker configuration from mmarker.txt" << std::endl; @@ -423,14 +473,16 @@ int main(int argc, char *argv[]) std::cout << "Please type commands with the openCV window selected" << std::endl; std::cout << std::endl; - cvNamedWindow("Command input window", CV_WINDOW_AUTOSIZE); + cv::namedWindow("Command input window", cv::WINDOW_AUTOSIZE); while(1){ - int key = cvWaitKey(20); + int key = cv::waitKey(20); if(key >= 0) - keyProcess(key); + node->keyProcess(key); rclcpp::spin_some(node); } - return 0; + rclcpp::shutdown(); + return 0; + } From 9f984aebb12013077e4f89c9a7eca04b634bbc5e Mon Sep 17 00:00:00 2001 From: Patrick Musau Date: Tue, 27 Jul 2021 06:17:22 -0500 Subject: [PATCH 09/27] Package builds successfully --- ar_track_alvar/CMakeLists.txt | 12 +- .../nodes/FindMarkerBundlesNoKinect.cpp | 585 ++++++++++-------- ar_track_alvar/src/SampleMarkerCreator.cpp | 49 +- 3 files changed, 360 insertions(+), 286 deletions(-) diff --git a/ar_track_alvar/CMakeLists.txt b/ar_track_alvar/CMakeLists.txt index ca84834..360c6ce 100644 --- a/ar_track_alvar/CMakeLists.txt +++ b/ar_track_alvar/CMakeLists.txt @@ -141,13 +141,13 @@ add_executable(findMarkerBundles nodes/FindMarkerBundles.cpp) target_link_libraries(findMarkerBundles ar_track_alvar kinect_filtering medianFilter) ament_target_dependencies(findMarkerBundles ${dependencies}) -# add_executable(findMarkerBundlesNoKinect nodes/FindMarkerBundlesNoKinect.cpp) -# target_link_libraries(findMarkerBundlesNoKinect ar_track_alvar) -# ament_target_dependencies(findMarkerBundlesNoKinect ${dependencies}) +add_executable(findMarkerBundlesNoKinect nodes/FindMarkerBundlesNoKinect.cpp) +target_link_libraries(findMarkerBundlesNoKinect ar_track_alvar) +ament_target_dependencies(findMarkerBundlesNoKinect ${dependencies}) -# add_executable(createMarker src/SampleMarkerCreator.cpp) -# target_link_libraries(createMarker ar_track_alvar) -# ament_target_dependencies(createMarker ${dependencies}) +add_executable(createMarker src/SampleMarkerCreator.cpp) +target_link_libraries(createMarker ar_track_alvar) +ament_target_dependencies(createMarker ${dependencies}) ament_export_include_directories(include) diff --git a/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp b/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp index 31d0689..9362ea8 100644 --- a/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp +++ b/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp @@ -47,6 +47,9 @@ #include "tf2_ros/transform_listener.h" #include #include +#include +#include +#include "tf2_ros/create_timer_ros.h" using namespace alvar; using namespace std; @@ -55,292 +58,344 @@ using namespace std; #define VISIBLE_MARKER 2 #define GHOST_MARKER 3 -Camera *cam; -cv_bridge::CvImagePtr cv_ptr_; -image_transport::Subscriber cam_sub_; -ros::Publisher arMarkerPub_; -ros::Publisher rvizMarkerPub_; -ar_track_alvar_msgs::AlvarMarkers arPoseMarkers_; -tf2_ros::TransformListener *tf_listener; -tf::TransformBroadcaster *tf_broadcaster; -MarkerDetector marker_detector; -MultiMarkerBundle **multi_marker_bundles=NULL; -Pose *bundlePoses; -int *master_id; -bool *bundles_seen; -std::vector *bundle_indices; -bool init = true; - -double marker_size; -double max_new_marker_error; -double max_track_error; -std::string cam_image_topic; -std::string cam_info_topic; -std::string output_frame; -int n_bundles = 0; - -void GetMultiMarkerPoses(IplImage *image); -void getCapCallback (const sensor_msgs::msg::Image::ConstSharedPtr & image_msg); -void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::msg::Image::ConstSharedPtr image_msg, tf2::StampedTransform &CamToOutput, visualization_msgs::msg::Marker *rvizMarker, ar_track_alvar_msgs::AlvarMarker *ar_pose_marker); - - -// Updates the bundlePoses of the multi_marker_bundles by detecting markers and using all markers in a bundle to infer the master tag's position -void GetMultiMarkerPoses(IplImage *image) { - - if (marker_detector.Detect(image, cam, true, false, max_new_marker_error, max_track_error, CVSEQ, true)){ - for(int i=0; iUpdate(marker_detector.markers, cam, bundlePoses[i]); - - if(marker_detector.DetectAdditional(image, cam, false) > 0){ - for(int i=0; iSetTrackMarkers(marker_detector, cam, bundlePoses[i], image) > 0)) - multi_marker_bundles[i]->Update(marker_detector.markers, cam, bundlePoses[i]); - } - } - } -} - -// Given the pose of a marker, builds the appropriate ROS messages for later publishing -void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::msg::Image::ConstSharedPtr image_msg, tf2::StampedTransform &CamToOutput, visualization_msgs::msg::Marker *rvizMarker, ar_track_alvar_msgs::AlvarMarker *ar_pose_marker){ - double px,py,pz,qx,qy,qz,qw; - - px = p.translation[0]/100.0; - py = p.translation[1]/100.0; - pz = p.translation[2]/100.0; - qx = p.quaternion[1]; - qy = p.quaternion[2]; - qz = p.quaternion[3]; - qw = p.quaternion[0]; - - //Get the marker pose in the camera frame - tf::Quaternion rotation (qx,qy,qz,qw); - tf::Vector3 origin (px,py,pz); - tf::Transform t (rotation, origin); //transform from cam to marker - - tf::Vector3 markerOrigin (0, 0, 0); - tf::Transform m (tf::Quaternion::getIdentity (), markerOrigin); - tf::Transform markerPose = t * m; - - //Publish the cam to marker transform for main marker in each bundle - if(type==MAIN_MARKER){ - std::string markerFrame = "ar_marker_"; - std::stringstream out; - out << id; - std::string id_string = out.str(); - markerFrame += id_string; - tf2::StampedTransform camToMarker (t, image_msg->header.stamp, image_msg->header.frame_id, markerFrame.c_str()); - tf_broadcaster->sendTransform(camToMarker); - } +class FindMarkerBundlesNoKinect : public rclcpp::Node +{ + private: + + Camera *cam; + cv_bridge::CvImagePtr cv_ptr_; + image_transport::Subscriber cam_sub_; + ar_track_alvar_msgs::msg::AlvarMarkers arPoseMarkers_; + + tf2_ros::TransformBroadcaster tf_broadcaster_; + tf2_ros::TransformListener tf_listener_; + tf2_ros::Buffer tf2_; + + rclcpp::Publisher::SharedPtr arMarkerPub_; + rclcpp::Publisher::SharedPtr rvizMarkerPub_; + + + MarkerDetector marker_detector; + MultiMarkerBundle **multi_marker_bundles=NULL; + Pose *bundlePoses; + int *master_id; + bool *bundles_seen; + std::vector *bundle_indices; + bool init = true; + + double marker_size; + double max_new_marker_error; + double max_track_error; + std::string cam_image_topic; + std::string cam_info_topic; + std::string output_frame; + int n_bundles = 0; + + public: + + FindMarkerBundlesNoKinect(int argc, char* argv[]):Node("marker_detect"), tf2_(this->get_clock()), tf_listener_(tf2_), tf_broadcaster_(this) + { + if(argc < 8) + { + std::cout << std::endl; + cout << "Not enough arguments provided." << endl; + cout << "Usage: ./findMarkerBundles " << endl; + std::cout << std::endl; + exit(0); + } + + // Get params from command line + marker_size = atof(argv[1]); + max_new_marker_error = atof(argv[2]); + max_track_error = atof(argv[3]); + cam_image_topic = argv[4]; + cam_info_topic = argv[5]; + output_frame = argv[6]; + int n_args_before_list = 7; + n_bundles = argc - n_args_before_list; + + marker_detector.SetMarkerSize(marker_size); + multi_marker_bundles = new MultiMarkerBundle*[n_bundles]; + bundlePoses = new Pose[n_bundles]; + master_id = new int[n_bundles]; + bundle_indices = new std::vector[n_bundles]; + bundles_seen = new bool[n_bundles]; + + // Load the marker bundle XML files + for(int i=0; i id_vector = loadHelper.getIndices(); + multi_marker_bundles[i] = new MultiMarkerBundle(id_vector); + multi_marker_bundles[i]->Load(argv[i + n_args_before_list], FILE_FORMAT_XML); + master_id[i] = multi_marker_bundles[i]->getMasterId(); + bundle_indices[i] = multi_marker_bundles[i]->getIndices(); + } + else{ + cout<<"Cannot load file "<< argv[i + n_args_before_list] << endl; + exit(0); + } + } + + + // Set up camera, listeners, and broadcasters + cam = new Camera(cam_info_topic); + + // Publishers + rclcpp::Publisher::SharedPtr arMarkerPub_; + rclcpp::Publisher::SharedPtr rvizMarkerPub_; + + + //Give tf a chance to catch up before the camera callback starts asking for transforms + //TODO: come back to this, there's probably a better way to do this + rclcpp::Rate loop_rate(100); + loop_rate.sleep(); + + + //Subscribe to topics and set up callbacks + RCLCPP_INFO (this->get_logger(),"Subscribing to image topic"); + cam_sub_ = image_transport::create_subscription(this, cam_image_topic, [&](auto& msg) { this->getCapCallback(msg); },"raw"); - //Create the rviz visualization message - tf::poseTFToMsg (markerPose, rvizMarker->pose); - rvizMarker->header.frame_id = image_msg->header.frame_id; - rvizMarker->header.stamp = image_msg->header.stamp; - rvizMarker->id = id; + } - rvizMarker->scale.x = 1.0 * marker_size/100.0; - rvizMarker->scale.y = 1.0 * marker_size/100.0; - rvizMarker->scale.z = 0.2 * marker_size/100.0; + // Updates the bundlePoses of the multi_marker_bundles by detecting markers and using all markers in a bundle to infer the master tag's position + void GetMultiMarkerPoses(cv::Mat *image) { + + if (marker_detector.Detect(image, cam, true, false, max_new_marker_error, max_track_error, CVSEQ, true)){ + for(int i=0; iUpdate(marker_detector.markers, cam, bundlePoses[i]); + + if(marker_detector.DetectAdditional(image, cam, false) > 0){ + for(int i=0; iSetTrackMarkers(marker_detector, cam, bundlePoses[i], image) > 0)) + multi_marker_bundles[i]->Update(marker_detector.markers, cam, bundlePoses[i]); + } + } + } + } - if(type==MAIN_MARKER) - rvizMarker->ns = "main_shapes"; - else - rvizMarker->ns = "basic_shapes"; + // Given the pose of a marker, builds the appropriate ROS messages for later publishing + void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::msg::Image::ConstSharedPtr image_msg, geometry_msgs::msg::TransformStamped &CamToOutput, visualization_msgs::msg::Marker *rvizMarker, ar_track_alvar_msgs::msg::AlvarMarker *ar_pose_marker){ + double px,py,pz,qx,qy,qz,qw; + + px = p.translation[0]/100.0; + py = p.translation[1]/100.0; + pz = p.translation[2]/100.0; + qx = p.quaternion[1]; + qy = p.quaternion[2]; + qz = p.quaternion[3]; + qw = p.quaternion[0]; + + //Get the marker pose in the camera frame + tf2::Quaternion rotation (qx,qy,qz,qw); + tf2::Vector3 origin (px,py,pz); + tf2::Transform t (rotation, origin); //transform from cam to marker + + tf2::Vector3 markerOrigin (0, 0, 0); + tf2::Transform m (tf2::Quaternion::getIdentity (), markerOrigin); + tf2::Transform markerPose = t * m; + + //Publish the cam to marker transform for main marker in each bundle + if(type==MAIN_MARKER){ + std::string markerFrame = "ar_marker_"; + std::stringstream out; + out << id; + std::string id_string = out.str(); + markerFrame += id_string; + geometry_msgs::msg::TransformStamped camToMarker; + camToMarker.header.stamp = image_msg->header.stamp; + camToMarker.header.frame_id = image_msg->header.frame_id; + camToMarker.child_frame_id = markerFrame; + + geometry_msgs::msg::Vector3 trans; + trans.x = px; + trans.y = py; + trans.z = pz; + geometry_msgs::msg::Quaternion rot; + rot.x = qx; + rot.y = qy; + rot.z = qz; + rot.w = qw; + + camToMarker.transform.translation = trans; + camToMarker.transform.rotation = rot; + tf_broadcaster_.sendTransform(camToMarker); + } - rvizMarker->type = visualization_msgs::msg::Marker::CUBE; - rvizMarker->action = visualization_msgs::msg::Marker::ADD; + //Create the rviz visualization message + rvizMarker->pose.position.x = markerPose.getOrigin().getX(); + rvizMarker->pose.position.y = markerPose.getOrigin().getY(); + rvizMarker->pose.position.z = markerPose.getOrigin().getZ(); + rvizMarker->pose.orientation.x = markerPose.getRotation().getX(); + rvizMarker->pose.orientation.y = markerPose.getRotation().getY(); + rvizMarker->pose.orientation.z = markerPose.getRotation().getZ(); + rvizMarker->pose.orientation.w = markerPose.getRotation().getW(); + rvizMarker->header.frame_id = image_msg->header.frame_id; + rvizMarker->header.stamp = image_msg->header.stamp; + rvizMarker->id = id; + + rvizMarker->scale.x = 1.0 * marker_size/100.0; + rvizMarker->scale.y = 1.0 * marker_size/100.0; + rvizMarker->scale.z = 0.2 * marker_size/100.0; + + if(type==MAIN_MARKER) + rvizMarker->ns = "main_shapes"; + else + rvizMarker->ns = "basic_shapes"; + + + rvizMarker->type = visualization_msgs::msg::Marker::CUBE; + rvizMarker->action = visualization_msgs::msg::Marker::ADD; + + //Determine a color and opacity, based on marker type + if(type==MAIN_MARKER){ + rvizMarker->color.r = 1.0f; + rvizMarker->color.g = 0.0f; + rvizMarker->color.b = 0.0f; + rvizMarker->color.a = 1.0; + } + else if(type==VISIBLE_MARKER){ + rvizMarker->color.r = 0.0f; + rvizMarker->color.g = 1.0f; + rvizMarker->color.b = 0.0f; + rvizMarker->color.a = 0.7; + } + else if(type==GHOST_MARKER){ + rvizMarker->color.r = 0.0f; + rvizMarker->color.g = 0.0f; + rvizMarker->color.b = 1.0f; + rvizMarker->color.a = 0.5; + } - //Determine a color and opacity, based on marker type - if(type==MAIN_MARKER){ - rvizMarker->color.r = 1.0f; - rvizMarker->color.g = 0.0f; - rvizMarker->color.b = 0.0f; - rvizMarker->color.a = 1.0; - } - else if(type==VISIBLE_MARKER){ - rvizMarker->color.r = 0.0f; - rvizMarker->color.g = 1.0f; - rvizMarker->color.b = 0.0f; - rvizMarker->color.a = 0.7; - } - else if(type==GHOST_MARKER){ - rvizMarker->color.r = 0.0f; - rvizMarker->color.g = 0.0f; - rvizMarker->color.b = 1.0f; - rvizMarker->color.a = 0.5; - } + rvizMarker->lifetime = rclcpp::Duration (1.0); - rvizMarker->lifetime = rclcpp::Duration (1.0); + // Only publish the pose of the master tag in each bundle, since that's all we really care about aside from visualization + if(type==MAIN_MARKER){ + //Take the pose of the tag in the camera frame and convert to the output frame (usually torso_lift_link for the PR2) + // tf2::Transform tagPoseOutput = CamToOutput * markerPose; - // Only publish the pose of the master tag in each bundle, since that's all we really care about aside from visualization - if(type==MAIN_MARKER){ - //Take the pose of the tag in the camera frame and convert to the output frame (usually torso_lift_link for the PR2) - tf::Transform tagPoseOutput = CamToOutput * markerPose; + // //Create the pose marker message + // tf2::poseTFToMsg (tagPoseOutput, ar_pose_marker->pose.pose); - //Create the pose marker message - tf::poseTFToMsg (tagPoseOutput, ar_pose_marker->pose.pose); - ar_pose_marker->header.frame_id = output_frame; - ar_pose_marker->header.stamp = image_msg->header.stamp; - ar_pose_marker->id = id; + tf2::doTransform(ar_pose_marker->pose, ar_pose_marker->pose,CamToOutput); + ar_pose_marker->header.frame_id = output_frame; + ar_pose_marker->header.stamp = image_msg->header.stamp; + ar_pose_marker->id = id; + } + else + ar_pose_marker = NULL; } - else - ar_pose_marker = NULL; -} - -//Callback to handle getting video frames and processing them -void getCapCallback (const sensor_msgs::msg::Image::ConstSharedPtr & image_msg) -{ - //If we've already gotten the cam info, then go ahead - if(cam->getCamInfo_){ - try{ - //Get the transformation from the Camera to the output frame for this image capture - tf2::StampedTransform CamToOutput; - try{ - tf_listener->waitForTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp, rclcpp::Duration(1.0)); - tf_listener->lookupTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp, CamToOutput); + //Callback to handle getting video frames and processing them + void getCapCallback (const sensor_msgs::msg::Image::ConstSharedPtr & image_msg) + { + //If we've already gotten the cam info, then go ahead + if(cam->getCamInfo_) + { + try + { + //Get the transformation from the Camera to the output frame for this image capture + geometry_msgs::msg::TransformStamped CamToOutput; + try{ + tf2_.canTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp, rclcpp::Duration(1.0)); + CamToOutput = tf2_.lookupTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp, rclcpp::Duration(1.0)); + } + catch (tf2::TransformException ex){ + RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "%s",ex.what()); + } + + visualization_msgs::msg::Marker rvizMarker; + ar_track_alvar_msgs::msg::AlvarMarker ar_pose_marker; + arPoseMarkers_.markers.clear (); + + + //Convert the image + cv_ptr_ = cv_bridge::toCvCopy(image_msg, sensor_msgs::image_encodings::BGR8); + + //Get the estimated pose of the main markers by using all the markers in each bundle + + // GetMultiMarkersPoses expects an IplImage*, but as of ros groovy, cv_bridge gives + // us a cv::Mat. I'm too lazy to change to cv::Mat throughout right now, so I + // do this conversion here -jbinney + cv::Mat ipl_image = cv_ptr_->image; + GetMultiMarkerPoses(&ipl_image); + + //Draw the observed markers that are visible and note which bundles have at least 1 marker seen + for(int i=0; isize(); i++) + { + int id = (*(marker_detector.markers))[i].GetId(); + + // Draw if id is valid + if(id >= 0){ + + //Mark the bundle that marker belongs to as "seen" + for(int j=0; jpublish (rvizMarker); + } } - catch (tf::TransformException ex){ - RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "%s",ex.what()); - } - - visualization_msgs::msg::Marker rvizMarker; - ar_track_alvar_msgs::AlvarMarker ar_pose_marker; - arPoseMarkers_.markers.clear (); - - - //Convert the image - cv_ptr_ = cv_bridge::toCvCopy(image_msg, sensor_msgs::image_encodings::BGR8); - - //Get the estimated pose of the main markers by using all the markers in each bundle - - // GetMultiMarkersPoses expects an IplImage*, but as of ros groovy, cv_bridge gives - // us a cv::Mat. I'm too lazy to change to cv::Mat throughout right now, so I - // do this conversion here -jbinney - IplImage ipl_image = cv_ptr_->image; - GetMultiMarkerPoses(&ipl_image); - - //Draw the observed markers that are visible and note which bundles have at least 1 marker seen - for(int i=0; isize(); i++) - { - int id = (*(marker_detector.markers))[i].GetId(); - - // Draw if id is valid - if(id >= 0){ - - //Mark the bundle that marker belongs to as "seen" - for(int j=0; jencoding.c_str ()); + + //Draw the main markers, whether they are visible or not -- but only if at least 1 marker from their bundle is currently seen + for(int i=0; ipublish (rvizMarker); + arPoseMarkers_.markers.push_back (ar_pose_marker); + } + } + + //Publish the marker messages + arMarkerPub_->publish (arPoseMarkers_); + } + catch (cv_bridge::Exception& e) + { + RCLCPP_ERROR (this->get_logger(),"Could not convert from '%s' to 'rgb8'.", image_msg->encoding.c_str ()); + } } } -} +}; + int main(int argc, char *argv[]) { - ros::init (argc, argv, "marker_detect"); - rclcpp::Node n; - - if(argc < 8){ - std::cout << std::endl; - cout << "Not enough arguments provided." << endl; - cout << "Usage: ./findMarkerBundles " << endl; - std::cout << std::endl; - return 0; - } - - // Get params from command line - marker_size = atof(argv[1]); - max_new_marker_error = atof(argv[2]); - max_track_error = atof(argv[3]); - cam_image_topic = argv[4]; - cam_info_topic = argv[5]; - output_frame = argv[6]; - int n_args_before_list = 7; - n_bundles = argc - n_args_before_list; - - marker_detector.SetMarkerSize(marker_size); - multi_marker_bundles = new MultiMarkerBundle*[n_bundles]; - bundlePoses = new Pose[n_bundles]; - master_id = new int[n_bundles]; - bundle_indices = new std::vector[n_bundles]; - bundles_seen = new bool[n_bundles]; - - // Load the marker bundle XML files - for(int i=0; i id_vector = loadHelper.getIndices(); - multi_marker_bundles[i] = new MultiMarkerBundle(id_vector); - multi_marker_bundles[i]->Load(argv[i + n_args_before_list], FILE_FORMAT_XML); - master_id[i] = multi_marker_bundles[i]->getMasterId(); - bundle_indices[i] = multi_marker_bundles[i]->getIndices(); - } - else{ - cout<<"Cannot load file "<< argv[i + n_args_before_list] << endl; - return 0; - } - } - - // Set up camera, listeners, and broadcasters - cam = new Camera(n, cam_info_topic); - tf_listener = new tf2_ros::TransformListener(n); - tf_broadcaster = new tf::TransformBroadcaster(); - arMarkerPub_ = n.advertise < ar_track_alvar_msgs::AlvarMarkers > ("ar_pose_marker", 0); - rvizMarkerPub_ = n.advertise < visualization_msgs::msg::Marker > ("visualization_marker", 0); - - //Give tf a chance to catch up before the camera callback starts asking for transforms - rclcpp::Duration(1.0).sleep(); - rclcpp::spin_some(node); - - //Subscribe to topics and set up callbacks - ROS_INFO ("Subscribing to image topic"); - image_transport::ImageTransport it_(n); - cam_sub_ = it_.subscribe (cam_image_topic, 1, &getCapCallback); - - rclcpp::spin(node); + rclcpp::init(argc, argv); + rclcpp::spin(std::make_shared(argc,argv)); + rclcpp::shutdown(); return 0; } diff --git a/ar_track_alvar/src/SampleMarkerCreator.cpp b/ar_track_alvar/src/SampleMarkerCreator.cpp index f2b4fb8..b7a51f8 100644 --- a/ar_track_alvar/src/SampleMarkerCreator.cpp +++ b/ar_track_alvar/src/SampleMarkerCreator.cpp @@ -4,7 +4,7 @@ using namespace std; using namespace alvar; struct State { - cv::Mat img; + cv::Mat *img; stringstream filename; double minx, miny, maxx, maxy; // top-left and bottom-right in pixel units MultiMarker multi_marker; @@ -36,7 +36,6 @@ struct State { marker_data_force_strong_hamming(false) {} ~State() { - if (img) cvReleaseImage(&img); } void AddMarker(const char *id) { std::cout<<"ADDING MARKER "< new_maxx) new_maxx = maxx; if (maxy > new_maxy) new_maxy = maxy; - IplImage *new_img = cvCreateImage(cvSize(int(new_maxx-new_minx+0.5), int(new_maxy-new_miny+0.5)), IPL_DEPTH_8U, 1); - cvSet(new_img, cvScalar(255)); - CvRect roi = cvRect(int(minx-new_minx+0.5), int(miny-new_miny+0.5), img->width, img->height); - cvSetImageROI(new_img, roi); - cvCopy(img, new_img); - cvReleaseImage(&img); - img = new_img; + + //TODO I don't know about all this tbh + + cv::Mat new_img = cv::Mat(cv::Size(int(new_maxx-new_minx+0.5), int(new_maxy-new_miny+0.5)),CV_8UC1); + new_img = cv::Scalar(255); + // CvRect roi = cvRect(int(minx-new_minx+0.5), int(miny-new_miny+0.5), img->rows, img->cols); + + cv::Rect roi(int(minx-new_minx+0.5), int(miny-new_miny+0.5), img->rows, img->cols); + cv::Mat image_roi = new_img(roi); + + + CvMat img2 = cvMat(image_roi); + + + + // cvSetImageROI(new_img, roi); + cvCopy(img, &img2); + + + roi.x = int((posx*units) - (marker_side_len*units/2.0) - new_minx + 0.5); roi.y = int((posy*units) - (marker_side_len*units/2.0) - new_miny + 0.5); roi.width = int(marker_side_len*units+0.5); roi.height = int(marker_side_len*units+0.5); - cvSetImageROI(img, roi); + + cv::Mat im2 = img->clone(); + cv::Mat image_roi2 = im2(roi); + CvMat img3 = cvMat(image_roi2); + cv::Mat temp_img = cv::cvarrToMat(&img3); + img = &temp_img; + minx = new_minx; miny = new_miny; maxx = new_maxx; maxy = new_maxy; } @@ -96,14 +115,14 @@ struct State { } } md.ScaleMarkerToImage(img); - cvResetImageROI(img); + // cvResetImageROI(img); } else if (marker_type == 1) { // Create and save MarkerArtoolkit marker (Note, that this doesn't support multi markers) MarkerArtoolkit md(marker_side_len, content_res, margin_res); int side_len = int(marker_side_len*units+0.5); - if (img != 0) cvReleaseImage(&img); - img = cvCreateImage(cvSize(side_len, side_len), IPL_DEPTH_8U, 1); + cv::Mat temp_img = cv::Mat(cv::Size(side_len, side_len),CV_8UC1); + img = &temp_img; filename.str(""); filename<<"MarkerArtoolkit"; md.SetContent(atoi(id)); @@ -117,7 +136,7 @@ struct State { filenamexml< 1) { std::cout<<"Saving: "< Date: Tue, 27 Jul 2021 06:28:18 -0500 Subject: [PATCH 10/27] temporary fix for cmake dev warnings --- ar_track_alvar/CMakeLists.txt | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/ar_track_alvar/CMakeLists.txt b/ar_track_alvar/CMakeLists.txt index 360c6ce..8138ce1 100644 --- a/ar_track_alvar/CMakeLists.txt +++ b/ar_track_alvar/CMakeLists.txt @@ -19,6 +19,10 @@ if(OpenCV_VERSION VERSION_GREATER 4.5.0) target_compile_definitions(library PUBLIC USE_LEGACY_TRACKING=1) endif() +if(NOT DEFINED CMAKE_SUPPRESS_DEVELOPER_WARNINGS) + set(CMAKE_SUPPRESS_DEVELOPER_WARNINGS 1 CACHE INTERNAL "No dev warnings") +endif() + find_package(rclcpp REQUIRED) @@ -210,5 +214,3 @@ ament_package() include(CheckCXXCompilerFlag) CHECK_CXX_COMPILER_FLAG("-std=c++11" COMPILER_SUPPORTS_CXX11) CHECK_CXX_COMPILER_FLAG("-std=c++0x" COMPILER_SUPPORTS_CXX0X) - -cmake_policy(SET CMP0046 OLD) From 35f4e5c792ff08ade19c5f0b3b5a87c37a5bdfff Mon Sep 17 00:00:00 2001 From: Patrick Musau Date: Tue, 27 Jul 2021 14:47:06 -0500 Subject: [PATCH 11/27] type mismatch fix, and beginning unit tests --- ar_track_alvar/CMakeLists.txt | 68 ++++++++----------- .../include/ar_track_alvar/Camera.h | 1 + ar_track_alvar/package.xml | 2 + ar_track_alvar/src/Marker.cpp | 15 ++-- ar_track_alvar/src/SampleMarkerCreator.cpp | 17 +++-- ar_track_alvar/test/CMakeLists.txt | 7 ++ 6 files changed, 61 insertions(+), 49 deletions(-) create mode 100644 ar_track_alvar/test/CMakeLists.txt diff --git a/ar_track_alvar/CMakeLists.txt b/ar_track_alvar/CMakeLists.txt index 8138ce1..9098503 100644 --- a/ar_track_alvar/CMakeLists.txt +++ b/ar_track_alvar/CMakeLists.txt @@ -1,15 +1,6 @@ cmake_minimum_required(VERSION 3.5) project(ar_track_alvar) -# if(NOT WIN32) -# if(NOT CMAKE_CXX_STANDARD) -# set(CMAKE_CXX_STANDARD 14) -# endif() -# if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") -# add_compile_options(-Wall -Wextra -Wpedantic) -# endif() -# endif() - find_package(ament_cmake REQUIRED) find_package(ament_cmake_auto REQUIRED) ament_auto_find_build_dependencies() @@ -34,12 +25,13 @@ find_package(cv_bridge REQUIRED) find_package(perception_pcl REQUIRED) find_package(pcl_ros) find_package(pcl_conversions REQUIRED) -find_package(ar_track_alvar_msgs) -find_package(std_msgs) -find_package(sensor_msgs) +find_package(ar_track_alvar_msgs REQUIRED) +find_package(std_msgs REQUIRED) +find_package(sensor_msgs REQUIRED) find_package(tf2_geometry_msgs REQUIRED) -find_package(geometry_msgs) -find_package(visualization_msgs) +find_package(geometry_msgs REQUIRED) +find_package(rosbag2_bag_v2_plugins REQUIRED) +find_package(visualization_msgs REQUIRED) find_package(PCL REQUIRED QUIET COMPONENTS common io) find_package(Eigen3 REQUIRED) @@ -48,15 +40,10 @@ find_package(TinyXML2 REQUIRED) include_directories(include) -#set(GENCPP_DEPS ar_track_alvar_msgs_gencpp std_msgs_gencpp sensor_msgs_gencpp geometry_msgs_gencpp visualization_msgs_gencpp) - # Kinect filtering code set(KINECT_FILTERING_TARGETS kinect_filtering medianFilter) set(ALVAR_TARGETS ar_track_alvar individualMarkers individualMarkersNoKinect trainMarkerBundle findMarkerBundles findMarkerBundlesNoKinect createMarker ar_track_alvar) -# dynamic reconfigure support -#generate_dynamic_reconfigure_options(cfg/Params.cfg) - set(dependencies OpenCV tf2_ros @@ -159,29 +146,32 @@ ament_export_libraries(ar_track_alvar) ament_export_dependencies(OpenCV ar_track_alvar_msgs std_msgs rclcpp tf2_ros tf2 message_runtime image_transport sensor_msgs geometry_msgs visualization_msgs resource_retriever cv_bridge perception_pcl pcl_conversions) ament_package() -# install(TARGETS ${ALVAR_TARGETS} ${KINECT_FILTERING_TARGETS} -# ARCHIVE DESTINATION lib -# LIBRARY DESTINATION lib -# RUNTIME DESTINATION lib/${PROJECT_NAME} -# ) - -# install(DIRECTORY include/${PROJECT_NAME}/ -# DESTINATION include/${PROJECT_NAME} -# ) - -# install(DIRECTORY launch bundles -# DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/ -# ) -# if(COMPILER_SUPPORTS_CXX11) -# set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") -# set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x") -# elseif(COMPILER_SUPPORTS_CXX0X) -# else() -# message(STATUS "The compiler ${CMAKE_CXX_COMPILER} has no C++11 support. Please use a different C++ compiler.") -# endif() +install(TARGETS ${ALVAR_TARGETS} ${KINECT_FILTERING_TARGETS} + ARCHIVE DESTINATION lib + LIBRARY DESTINATION lib + RUNTIME DESTINATION lib/${PROJECT_NAME} +) +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION include/${PROJECT_NAME} +) + +install(DIRECTORY launch bundles + DESTINATION share/${PROJECT_NAME} +) + +if(BUILD_TESTING) + find_package(ament_cmake_gtest REQUIRED) + find_package(ament_download) + ament_download(http://download.ros.org/data/ar_track_alvar/ar_track_alvar_4markers_tork_2017-02-08-11-21-14.bag + FILENAME ar_track_alvar_4markers_tork_2017-02-08-11-21-14.bag + DESTINATION ${CMAKE_INSTALL_PREFIX} + MD5 627aa0316bbfe4334e06023d7c2b4087 + ) + #ament_add_gtest(test) +endif() #TODO fix these tests # if(BUILD_TESTING) # add_subdirectory(test) diff --git a/ar_track_alvar/include/ar_track_alvar/Camera.h b/ar_track_alvar/include/ar_track_alvar/Camera.h index 64f6e93..b1f5805 100644 --- a/ar_track_alvar/include/ar_track_alvar/Camera.h +++ b/ar_track_alvar/include/ar_track_alvar/Camera.h @@ -48,6 +48,7 @@ #include #include #include "opencv2/imgproc/types_c.h" +#include "opencv2/opencv.hpp" namespace alvar { diff --git a/ar_track_alvar/package.xml b/ar_track_alvar/package.xml index e56a1df..2b3268d 100644 --- a/ar_track_alvar/package.xml +++ b/ar_track_alvar/package.xml @@ -23,6 +23,8 @@ This package is a ROS wrapper for Alvar, an open source AR tag tracking library. sensor_msgs std_msgs tf2 +ament_download +rosbag2_bag_v2_plugins tf2_ros tf2_geometry_msgs tinyxml2 diff --git a/ar_track_alvar/src/Marker.cpp b/ar_track_alvar/src/Marker.cpp index 31e72e5..62bef81 100644 --- a/ar_track_alvar/src/Marker.cpp +++ b/ar_track_alvar/src/Marker.cpp @@ -189,7 +189,7 @@ bool Marker::UpdateContent(std::vector &_marker_corners_img, cv::Ma } bool Marker::UpdateContentBasic(std::vector &_marker_corners_img, cv::Mat *gray, Camera *cam, int frame_no /*= 0*/) { - vector marker_corners_img_undist; + vector marker_corners_img_undist; marker_corners_img_undist.resize(_marker_corners_img.size()); copy(_marker_corners_img.begin(), _marker_corners_img.end(), marker_corners_img_undist.begin()); @@ -267,6 +267,7 @@ bool Marker::UpdateContentBasic(std::vector &_marker_corners_img, c // Threshold the marker content + std::cout << "potential problem 1" << std::endl; cv::Mat temp = cv::cvarrToMat(marker_content); cv::threshold(temp, temp, (max+min)/2.0, 255, cv::THRESH_BINARY); @@ -349,6 +350,7 @@ void Marker::SaveMarkerImage(const char *filename, int save_res) const { cvZero(img); CvMat submat; cvGetSubRect(img, &submat, cvRect(int(margin*scale), int(margin*scale), int(res*scale), int(res*scale))); + std::cout << "potential problem 2" << std::endl; cv::resize(cv::cvarrToMat(marker_content), *img_content,img_content->size(), cv::INTER_NEAREST); cvCopy(img_content, &submat); cv::imwrite(filename, *img); @@ -366,12 +368,15 @@ void Marker::ScaleMarkerToImage(cv::Mat *image) const { - - cvZero(img); + img->setTo(cv::Scalar::all(0)); CvMat submat; - cvGetSubRect(img, &submat, cvRect(int(multiplier*margin+0.5), int(multiplier*margin+0.5), int(multiplier*res+0.5), int(multiplier*res+0.5))); + CvMat temp3 = cvMat(*img); + cvGetSubRect(&temp3, &submat, cvRect(int(multiplier*margin+0.5), int(multiplier*margin+0.5), int(multiplier*res+0.5), int(multiplier*res+0.5))); cv::resize(cv::cvarrToMat(marker_content), *img_content, img_content->size(), cv::INTER_NEAREST); - cvCopy(img_content, &submat); + + CvMat temp4 = cvMat(*img_content); + cvCopy(&temp4, &submat); + cv::resize(*img, *image,image->size(), cv::INTER_NEAREST); // cvReleaseImage(&img_content); // cvReleaseImage(&img); diff --git a/ar_track_alvar/src/SampleMarkerCreator.cpp b/ar_track_alvar/src/SampleMarkerCreator.cpp index b7a51f8..a773e2a 100644 --- a/ar_track_alvar/src/SampleMarkerCreator.cpp +++ b/ar_track_alvar/src/SampleMarkerCreator.cpp @@ -1,5 +1,6 @@ #include "ar_track_alvar/MultiMarker.h" #include +#include "opencv2/opencv.hpp" using namespace std; using namespace alvar; @@ -51,7 +52,10 @@ struct State { miny = (posy*units) - (marker_side_len*units/2.0); maxx = (posx*units) + (marker_side_len*units/2.0); maxy = (posy*units) + (marker_side_len*units/2.0); - } else { + } + + else + { double new_minx = (posx*units) - (marker_side_len*units/2.0); double new_miny = (posy*units) - (marker_side_len*units/2.0); double new_maxx = (posx*units) + (marker_side_len*units/2.0); @@ -83,16 +87,18 @@ struct State { roi.x = int((posx*units) - (marker_side_len*units/2.0) - new_minx + 0.5); roi.y = int((posy*units) - (marker_side_len*units/2.0) - new_miny + 0.5); roi.width = int(marker_side_len*units+0.5); roi.height = int(marker_side_len*units+0.5); - + cv::Mat im2 = img->clone(); cv::Mat image_roi2 = im2(roi); CvMat img3 = cvMat(image_roi2); + cv::Mat temp_img = cv::cvarrToMat(&img3); img = &temp_img; minx = new_minx; miny = new_miny; maxx = new_maxx; maxy = new_maxy; } + if (marker_data_content_type == MarkerData::MARKER_CONTENT_TYPE_NUMBER) { int idi = atoi(id); md.SetContent(marker_data_content_type, idi, 0); @@ -102,7 +108,9 @@ struct State { pose.Reset(); pose.SetTranslation(posx, -posy, 0); multi_marker.PointCloudAdd(idi, marker_side_len, pose); - } else { + } + else + { md.SetContent(marker_data_content_type, 0, id); const char *p = id; int counter=0; @@ -112,10 +120,9 @@ struct State { else filename<<(char)tolower(*p); p++; counter++; if (counter > 8) break; - } } + } md.ScaleMarkerToImage(img); - // cvResetImageROI(img); } else if (marker_type == 1) { // Create and save MarkerArtoolkit marker (Note, that this doesn't support multi markers) diff --git a/ar_track_alvar/test/CMakeLists.txt b/ar_track_alvar/test/CMakeLists.txt new file mode 100644 index 0000000..d61a1c4 --- /dev/null +++ b/ar_track_alvar/test/CMakeLists.txt @@ -0,0 +1,7 @@ +find_package(ros_test REQUIRED) + +# add_rostest(test/marker_arg_config-basic.test DEPENDENCIES ${PROJECT_NAME}_4markers_tork.bag) +# add_rostest(test/marker_arg_config-full.test DEPENDENCIES ${PROJECT_NAME}_4markers_tork.bag) +# add_rostest(test/marker_param_config-basic.test DEPENDENCIES ${PROJECT_NAME}_4markers_tork.bag) +# add_rostest(test/marker_param_config-full.test DEPENDENCIES ${PROJECT_NAME}_4markers_tork.bag) + \ No newline at end of file From 145691df56574d1070be867d09c3fbf954947b3a Mon Sep 17 00:00:00 2001 From: Patrick Musau Date: Tue, 27 Jul 2021 17:27:45 -0500 Subject: [PATCH 12/27] well then --- ar_track_alvar/CMakeLists.txt | 28 ++++++++-- ar_track_alvar/package.xml | 15 ++++-- ar_track_alvar/test/CMakeLists.txt | 7 --- ar_track_alvar/test/test_ar_track_alvar.py | 63 ++++++++++++++++++++++ 4 files changed, 99 insertions(+), 14 deletions(-) delete mode 100644 ar_track_alvar/test/CMakeLists.txt create mode 100644 ar_track_alvar/test/test_ar_track_alvar.py diff --git a/ar_track_alvar/CMakeLists.txt b/ar_track_alvar/CMakeLists.txt index 9098503..84527ad 100644 --- a/ar_track_alvar/CMakeLists.txt +++ b/ar_track_alvar/CMakeLists.txt @@ -14,7 +14,8 @@ if(NOT DEFINED CMAKE_SUPPRESS_DEVELOPER_WARNINGS) set(CMAKE_SUPPRESS_DEVELOPER_WARNINGS 1 CACHE INTERNAL "No dev warnings") endif() - +find_package(ament_cmake_auto REQUIRED) +ament_auto_find_build_dependencies() find_package(rclcpp REQUIRED) find_package(tf2_ros REQUIRED) @@ -144,7 +145,7 @@ ament_target_dependencies(createMarker ${dependencies}) ament_export_include_directories(include) ament_export_libraries(ar_track_alvar) ament_export_dependencies(OpenCV ar_track_alvar_msgs std_msgs rclcpp tf2_ros tf2 message_runtime image_transport sensor_msgs geometry_msgs visualization_msgs resource_retriever cv_bridge perception_pcl pcl_conversions) -ament_package() + install(TARGETS ${ALVAR_TARGETS} ${KINECT_FILTERING_TARGETS} ARCHIVE DESTINATION lib @@ -163,13 +164,30 @@ install(DIRECTORY launch bundles if(BUILD_TESTING) - find_package(ament_cmake_gtest REQUIRED) - find_package(ament_download) + + find_package(ament_lint_auto REQUIRED) + ament_lint_auto_find_test_dependencies() + # find_package(ament_cmake_gtest REQUIRED) + # find_package(ament_download) + + + ament_download(http://download.ros.org/data/ar_track_alvar/ar_track_alvar_4markers_tork_2017-02-08-11-21-14.bag FILENAME ar_track_alvar_4markers_tork_2017-02-08-11-21-14.bag DESTINATION ${CMAKE_INSTALL_PREFIX} MD5 627aa0316bbfe4334e06023d7c2b4087 ) + + set(_PYTHON_EXECUTABLE "${PYTHON_EXECUTABLE}") + if(WIN32 AND CMAKE_BUILD_TYPE STREQUAL "Debug") + set(PYTHON_EXECUTABLE "${PYTHON_EXECUTABLE_DEBUG}") + endif() + + + # Test DisparityNode in launch test + add_launch_test("test/test_ar_track_alvar.py" + PYTHON_EXECUTABLE "${PYTHON_EXECUTABLE}" + ) #ament_add_gtest(test) endif() #TODO fix these tests @@ -204,3 +222,5 @@ endif() include(CheckCXXCompilerFlag) CHECK_CXX_COMPILER_FLAG("-std=c++11" COMPILER_SUPPORTS_CXX11) CHECK_CXX_COMPILER_FLAG("-std=c++0x" COMPILER_SUPPORTS_CXX0X) + +ament_package() \ No newline at end of file diff --git a/ar_track_alvar/package.xml b/ar_track_alvar/package.xml index 2b3268d..092b9a7 100644 --- a/ar_track_alvar/package.xml +++ b/ar_track_alvar/package.xml @@ -32,11 +32,20 @@ This package is a ROS wrapper for Alvar, an open source AR tag tracking library. libopencv-dev eigen libpcl-all-dev - -ament_cmake_gtest -rosbag2 + ament_cmake_pytest + ament_lint_auto + ament_lint_common + launch + launch_ros + launch_testing + launch_testing_ament_cmake + python3-opencv + python_cmake_module + rclpy + ros_testing + rosbag2 ament_cmake diff --git a/ar_track_alvar/test/CMakeLists.txt b/ar_track_alvar/test/CMakeLists.txt deleted file mode 100644 index d61a1c4..0000000 --- a/ar_track_alvar/test/CMakeLists.txt +++ /dev/null @@ -1,7 +0,0 @@ -find_package(ros_test REQUIRED) - -# add_rostest(test/marker_arg_config-basic.test DEPENDENCIES ${PROJECT_NAME}_4markers_tork.bag) -# add_rostest(test/marker_arg_config-full.test DEPENDENCIES ${PROJECT_NAME}_4markers_tork.bag) -# add_rostest(test/marker_param_config-basic.test DEPENDENCIES ${PROJECT_NAME}_4markers_tork.bag) -# add_rostest(test/marker_param_config-full.test DEPENDENCIES ${PROJECT_NAME}_4markers_tork.bag) - \ No newline at end of file diff --git a/ar_track_alvar/test/test_ar_track_alvar.py b/ar_track_alvar/test/test_ar_track_alvar.py new file mode 100644 index 0000000..666b8d2 --- /dev/null +++ b/ar_track_alvar/test/test_ar_track_alvar.py @@ -0,0 +1,63 @@ +import os +import sys +import time +import unittest + +from ament_index_python.packages import get_package_share_directory +from launch import LaunchDescription +from launch_ros.actions import Node +import launch_testing + +import pytest +import rclpy + +from stereo_msgs.msg import DisparityImage +from sensor_msgs.msg import Image + + +bag_name = os.path.join(get_package_share_directory('ar_track_alvar'),'rosbag_v2 ar_track_alvar_4markers_tork_2017-02-08-11-21-14.bag') + +@pytest.mark.rostest +def generate_test_description(): + + return LaunchDescription([ + # DisparityNode + Node( + package='rosbag2', + executable='bag', + name='bag_node', + parameters=["play","-s","rosbag_v2",bag_name], + output='screen' + ), + launch_testing.actions.ReadyToTest(), + ]) + +class TestArTrack(unittest.TestCase): + @classmethod + def setUpClass(cls): + rclpy.init() + cls.node = rclpy.create_node('test_ar_track_alvar_node') + + @classmethod + def tearDownClass(cls): + cls.node.destroy_node() + rclpy.shutdown() + + + def test_message_received(self): + # Expect the disparity node to publish on '/disparity' topic + msgs_received = [] + self.node.create_subscription( + Image, + '/camera/image_raw', + lambda msg: msgs_received.append(msg), + 1 + ) + + # Wait up to 60 seconds to receive message + start_time = time.time() + while len(msgs_received) == 0 and (time.time() - start_time) < 60: + rclpy.spin_once(self.node, timeout_sec=0.1) + + assert len(msgs_received) > 0 + From 83657f68b519f1ac28ac99fa798099005c27ba6c Mon Sep 17 00:00:00 2001 From: Patrick Musau Date: Wed, 28 Jul 2021 07:00:37 -0500 Subject: [PATCH 13/27] playing from bag works --- README | 14 ++++++++++++++ ar_track_alvar/CMakeLists.txt | 5 ++--- ar_track_alvar/package.xml | 3 +-- ar_track_alvar/test/test_ar_track_alvar.py | 20 ++++++++++++++------ 4 files changed, 31 insertions(+), 11 deletions(-) diff --git a/README b/README index 60a5efb..aaf0091 100644 --- a/README +++ b/README @@ -1,3 +1,17 @@ See [ROS wiki](http://wiki.ros.org/ar_track_alvar) for the users document. +### Testing (Build from Source) +The testing suite for this package makes use of a ros1 bag, and depends on the following [repo](https://github.com/ros2/rosbag2_bag_v2). In order to get it to work +you need to source both the ros1 underlay and the ros2 underlay in your ~/.bashrc file. Do this in a new terminal. + +``` +# ROS1 +source /opt/ros/noetic/setup.bash + +# ROS2 +source /opt/ros/foxy/setup.bash +source /usr/share/colcon_cd/function/colcon_cd.sh +export _colcon_cd_root=/opt/ros/foxy + +``` diff --git a/ar_track_alvar/CMakeLists.txt b/ar_track_alvar/CMakeLists.txt index 84527ad..c72e0dd 100644 --- a/ar_track_alvar/CMakeLists.txt +++ b/ar_track_alvar/CMakeLists.txt @@ -167,14 +167,13 @@ if(BUILD_TESTING) find_package(ament_lint_auto REQUIRED) ament_lint_auto_find_test_dependencies() - # find_package(ament_cmake_gtest REQUIRED) - # find_package(ament_download) + find_package(ament_download) ament_download(http://download.ros.org/data/ar_track_alvar/ar_track_alvar_4markers_tork_2017-02-08-11-21-14.bag FILENAME ar_track_alvar_4markers_tork_2017-02-08-11-21-14.bag - DESTINATION ${CMAKE_INSTALL_PREFIX} + DESTINATION ${CMAKE_INSTALL_PREFIX}/share/${PROJECT_NAME} MD5 627aa0316bbfe4334e06023d7c2b4087 ) diff --git a/ar_track_alvar/package.xml b/ar_track_alvar/package.xml index 092b9a7..c2b7128 100644 --- a/ar_track_alvar/package.xml +++ b/ar_track_alvar/package.xml @@ -34,9 +34,8 @@ This package is a ROS wrapper for Alvar, an open source AR tag tracking library. libpcl-all-dev - ament_cmake_pytest + ament_cmake_pytest ament_lint_auto - ament_lint_common launch launch_ros launch_testing diff --git a/ar_track_alvar/test/test_ar_track_alvar.py b/ar_track_alvar/test/test_ar_track_alvar.py index 666b8d2..d979547 100644 --- a/ar_track_alvar/test/test_ar_track_alvar.py +++ b/ar_track_alvar/test/test_ar_track_alvar.py @@ -7,6 +7,7 @@ from launch import LaunchDescription from launch_ros.actions import Node import launch_testing +import launch import pytest import rclpy @@ -15,20 +16,27 @@ from sensor_msgs.msg import Image -bag_name = os.path.join(get_package_share_directory('ar_track_alvar'),'rosbag_v2 ar_track_alvar_4markers_tork_2017-02-08-11-21-14.bag') +bag_name = os.path.join(get_package_share_directory('ar_track_alvar'),'ar_track_alvar_4markers_tork_2017-02-08-11-21-14.bag') @pytest.mark.rostest def generate_test_description(): return LaunchDescription([ # DisparityNode - Node( - package='rosbag2', - executable='bag', - name='bag_node', - parameters=["play","-s","rosbag_v2",bag_name], + + + launch.actions.ExecuteProcess( + cmd=['ros2', 'bag', 'play', '-s',"rosbag_v2",bag_name], output='screen' ), + + # Node( + # package='ros2bag', + # executable='bag', + # name='bag_node', + # arguments=["play","-s","rosbag_v2",bag_name], + # output='screen' + # ), launch_testing.actions.ReadyToTest(), ]) From 93e880a4aa12d4ba7051f9ec8cadab22849f7b60 Mon Sep 17 00:00:00 2001 From: Patrick Musau Date: Wed, 28 Jul 2021 07:45:37 -0500 Subject: [PATCH 14/27] Individual marker test --- ar_track_alvar/CMakeLists.txt | 12 +-- ...ck_alvar.py => test_ar_track_alvar_bag.py} | 16 +--- .../test_ar_track_alvar_individual_markers.py | 84 +++++++++++++++++++ 3 files changed, 94 insertions(+), 18 deletions(-) rename ar_track_alvar/test/{test_ar_track_alvar.py => test_ar_track_alvar_bag.py} (84%) create mode 100644 ar_track_alvar/test/test_ar_track_alvar_individual_markers.py diff --git a/ar_track_alvar/CMakeLists.txt b/ar_track_alvar/CMakeLists.txt index c72e0dd..581598f 100644 --- a/ar_track_alvar/CMakeLists.txt +++ b/ar_track_alvar/CMakeLists.txt @@ -169,8 +169,6 @@ if(BUILD_TESTING) ament_lint_auto_find_test_dependencies() find_package(ament_download) - - ament_download(http://download.ros.org/data/ar_track_alvar/ar_track_alvar_4markers_tork_2017-02-08-11-21-14.bag FILENAME ar_track_alvar_4markers_tork_2017-02-08-11-21-14.bag DESTINATION ${CMAKE_INSTALL_PREFIX}/share/${PROJECT_NAME} @@ -182,12 +180,16 @@ if(BUILD_TESTING) set(PYTHON_EXECUTABLE "${PYTHON_EXECUTABLE_DEBUG}") endif() + # Test bag playing (if this fails all other tests will fail) + add_launch_test("test/test_ar_track_alvar_bag.py" + PYTHON_EXECUTABLE "${PYTHON_EXECUTABLE}" + ) - # Test DisparityNode in launch test - add_launch_test("test/test_ar_track_alvar.py" + # Test Individual Markers in launch test + add_launch_test("test/test_ar_track_alvar_individual_markers.py" PYTHON_EXECUTABLE "${PYTHON_EXECUTABLE}" ) - #ament_add_gtest(test) + endif() #TODO fix these tests # if(BUILD_TESTING) diff --git a/ar_track_alvar/test/test_ar_track_alvar.py b/ar_track_alvar/test/test_ar_track_alvar_bag.py similarity index 84% rename from ar_track_alvar/test/test_ar_track_alvar.py rename to ar_track_alvar/test/test_ar_track_alvar_bag.py index d979547..ea5d517 100644 --- a/ar_track_alvar/test/test_ar_track_alvar.py +++ b/ar_track_alvar/test/test_ar_track_alvar_bag.py @@ -12,31 +12,21 @@ import pytest import rclpy -from stereo_msgs.msg import DisparityImage from sensor_msgs.msg import Image - +# Test Parameters bag_name = os.path.join(get_package_share_directory('ar_track_alvar'),'ar_track_alvar_4markers_tork_2017-02-08-11-21-14.bag') @pytest.mark.rostest def generate_test_description(): return LaunchDescription([ - # DisparityNode - - + + # Launch Bag launch.actions.ExecuteProcess( cmd=['ros2', 'bag', 'play', '-s',"rosbag_v2",bag_name], output='screen' ), - - # Node( - # package='ros2bag', - # executable='bag', - # name='bag_node', - # arguments=["play","-s","rosbag_v2",bag_name], - # output='screen' - # ), launch_testing.actions.ReadyToTest(), ]) diff --git a/ar_track_alvar/test/test_ar_track_alvar_individual_markers.py b/ar_track_alvar/test/test_ar_track_alvar_individual_markers.py new file mode 100644 index 0000000..6bf4ecb --- /dev/null +++ b/ar_track_alvar/test/test_ar_track_alvar_individual_markers.py @@ -0,0 +1,84 @@ +import os +import sys +import time +import unittest + +from ament_index_python.packages import get_package_share_directory +from launch import LaunchDescription +from launch_ros.actions import Node +import launch_testing +import launch + +import pytest +import rclpy + +from stereo_msgs.msg import DisparityImage +from sensor_msgs.msg import Image +from ar_track_alvar_msgs.msg import AlvarMarkers + +# Test Parameters +bag_name = os.path.join(get_package_share_directory('ar_track_alvar'),'ar_track_alvar_4markers_tork_2017-02-08-11-21-14.bag') +cam_image_topic ="/camera/image_raw" +cam_info_topic = "/camera/camera_info" +marker_margin="2" +marker_resolution="5" +marker_size="2.3" +max_new_marker_error="0.08" +max_frequency="100" +max_track_error="0.2" +output_frame="/camera" + +@pytest.mark.rostest +def generate_test_description(): + + return LaunchDescription([ + # DisparityNode + + launch.actions.ExecuteProcess( + cmd=['ros2', 'bag', 'play', '-s',"rosbag_v2",bag_name], + output='screen' + ), + + Node( + package='ar_track_alvar', + executable='individualMarkers', + name='invidual_markers', + remappings=[ + ("camera_image", cam_image_topic), + ("camera_info",cam_info_topic) + ], + arguments=[marker_size, max_new_marker_error, max_track_error, cam_image_topic, cam_info_topic, output_frame], + output='screen' + ), + launch_testing.actions.ReadyToTest(), + ]) + +class TestArTrack(unittest.TestCase): + @classmethod + def setUpClass(cls): + rclpy.init() + cls.node = rclpy.create_node('test_ar_track_alvar_node') + + @classmethod + def tearDownClass(cls): + cls.node.destroy_node() + rclpy.shutdown() + + + def test_message_received(self): + # Expect the disparity node to publish on '/disparity' topic + msgs_received = [] + self.node.create_subscription( + AlvarMarkers, + "ar_pose_marker", + lambda msg: msgs_received.append(msg), + 1 + ) + + # Wait up to 60 seconds to receive message + start_time = time.time() + while len(msgs_received) == 0 and (time.time() - start_time) < 60: + rclpy.spin_once(self.node, timeout_sec=0.1) + + assert len(msgs_received) > 0 + From d86804b5aa3ca92443229c2a63cfe67c9403c2e9 Mon Sep 17 00:00:00 2001 From: Patrick Musau Date: Wed, 28 Jul 2021 14:52:53 -0500 Subject: [PATCH 15/27] publishes at least --- ar_track_alvar/CMakeLists.txt | 2 +- ar_track_alvar/nodes/.vscode/settings.json | 5 +- ar_track_alvar/nodes/FindMarkerBundles.cpp | 16 +++++- .../nodes/FindMarkerBundlesNoKinect.cpp | 18 +++++- ar_track_alvar/nodes/IndividualMarkers.cpp | 16 +++++- .../nodes/IndividualMarkersNoKinect.cpp | 19 ++++++- ar_track_alvar/nodes/TrainMarkerBundle.cpp | 18 +++++- ar_track_alvar/src/Camera.cpp | 55 ++++++++----------- .../test/marker_arg_config_basic.py | 20 +++++++ .../test_ar_track_alvar_individual_markers.py | 6 +- 10 files changed, 134 insertions(+), 41 deletions(-) create mode 100644 ar_track_alvar/test/marker_arg_config_basic.py diff --git a/ar_track_alvar/CMakeLists.txt b/ar_track_alvar/CMakeLists.txt index 581598f..ee55a6b 100644 --- a/ar_track_alvar/CMakeLists.txt +++ b/ar_track_alvar/CMakeLists.txt @@ -43,7 +43,7 @@ include_directories(include) # Kinect filtering code set(KINECT_FILTERING_TARGETS kinect_filtering medianFilter) -set(ALVAR_TARGETS ar_track_alvar individualMarkers individualMarkersNoKinect trainMarkerBundle findMarkerBundles findMarkerBundlesNoKinect createMarker ar_track_alvar) +set(ALVAR_TARGETS ar_track_alvar individualMarkers individualMarkersNoKinect trainMarkerBundle findMarkerBundles findMarkerBundlesNoKinect createMarker) set(dependencies OpenCV diff --git a/ar_track_alvar/nodes/.vscode/settings.json b/ar_track_alvar/nodes/.vscode/settings.json index 411983b..c7a8172 100644 --- a/ar_track_alvar/nodes/.vscode/settings.json +++ b/ar_track_alvar/nodes/.vscode/settings.json @@ -70,5 +70,8 @@ "typeindex": "cpp", "typeinfo": "cpp", "variant": "cpp" - } + }, + "python.analysis.extraPaths": [ + "/opt/ros/foxy/lib/python3.8/site-packages" + ] } \ No newline at end of file diff --git a/ar_track_alvar/nodes/FindMarkerBundles.cpp b/ar_track_alvar/nodes/FindMarkerBundles.cpp index 91222c9..b7d6cb5 100644 --- a/ar_track_alvar/nodes/FindMarkerBundles.cpp +++ b/ar_track_alvar/nodes/FindMarkerBundles.cpp @@ -100,6 +100,7 @@ class FindMarkerBundles : public rclcpp::Node // Subscribers rclcpp::Subscription::SharedPtr cloud_sub_; + rclcpp::Subscription::SharedPtr info_sub_; tf2_ros::TransformBroadcaster tf_broadcaster_; @@ -163,7 +164,7 @@ class FindMarkerBundles : public rclcpp::Node // Set up camera, listeners, and broadcasters - cam = new Camera(cam_info_topic); + cam = new Camera(); arMarkerPub_ = this->create_publisher ("ar_pose_marker", 0); rvizMarkerPub_ = this->create_publisher ("visualization_marker", 0); rvizMarkerPub2_ = this->create_publisher ("ARmarker_points", 0); @@ -204,9 +205,22 @@ class FindMarkerBundles : public rclcpp::Node cloud_sub_ = this->create_subscription(cam_image_topic, 1, std::bind(&FindMarkerBundles::getPointCloudCallback, this, std::placeholders::_1)); + RCLCPP_INFO(this->get_logger(),"Subscribing to info topic"); + info_sub_ = this->create_subscription(cam_info_topic, 1, std::bind(&FindMarkerBundles::InfoCallback, this, std::placeholders::_1)); } + void InfoCallback (const sensor_msgs::msg::CameraInfo::SharedPtr cam_info) + { + RCLCPP_INFO(this->get_logger(),"this executed"); + if (!cam->getCamInfo_) + { + cam->SetCameraInfo(cam_info); + cam->getCamInfo_ = true; + //sub_.reset(); + } + } + //Debugging utility function void draw3dPoints(ARCloud::Ptr cloud, string frame, int color, int id, double rad) { diff --git a/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp b/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp index 9362ea8..0cfeef6 100644 --- a/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp +++ b/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp @@ -74,6 +74,7 @@ class FindMarkerBundlesNoKinect : public rclcpp::Node rclcpp::Publisher::SharedPtr arMarkerPub_; rclcpp::Publisher::SharedPtr rvizMarkerPub_; + rclcpp::Subscription::SharedPtr info_sub_; MarkerDetector marker_detector; @@ -142,7 +143,7 @@ class FindMarkerBundlesNoKinect : public rclcpp::Node // Set up camera, listeners, and broadcasters - cam = new Camera(cam_info_topic); + cam = new Camera(); // Publishers rclcpp::Publisher::SharedPtr arMarkerPub_; @@ -159,6 +160,10 @@ class FindMarkerBundlesNoKinect : public rclcpp::Node RCLCPP_INFO (this->get_logger(),"Subscribing to image topic"); cam_sub_ = image_transport::create_subscription(this, cam_image_topic, [&](auto& msg) { this->getCapCallback(msg); },"raw"); + RCLCPP_INFO(this->get_logger(),"Subscribing to info topic"); + info_sub_ = this->create_subscription(cam_info_topic, 1, std::bind(&FindMarkerBundlesNoKinect::InfoCallback, this, std::placeholders::_1)); + + } // Updates the bundlePoses of the multi_marker_bundles by detecting markers and using all markers in a bundle to infer the master tag's position @@ -177,6 +182,17 @@ class FindMarkerBundlesNoKinect : public rclcpp::Node } } + void InfoCallback (const sensor_msgs::msg::CameraInfo::SharedPtr cam_info) + { + RCLCPP_INFO(this->get_logger(),"this executed"); + if (!cam->getCamInfo_) + { + cam->SetCameraInfo(cam_info); + cam->getCamInfo_ = true; + //sub_.reset(); + } + } + // Given the pose of a marker, builds the appropriate ROS messages for later publishing void makeMarkerMsgs(int type, int id, Pose &p, sensor_msgs::msg::Image::ConstSharedPtr image_msg, geometry_msgs::msg::TransformStamped &CamToOutput, visualization_msgs::msg::Marker *rvizMarker, ar_track_alvar_msgs::msg::AlvarMarker *ar_pose_marker){ diff --git a/ar_track_alvar/nodes/IndividualMarkers.cpp b/ar_track_alvar/nodes/IndividualMarkers.cpp index 5869183..733e051 100644 --- a/ar_track_alvar/nodes/IndividualMarkers.cpp +++ b/ar_track_alvar/nodes/IndividualMarkers.cpp @@ -74,6 +74,7 @@ class IndividualMarkers : public rclcpp::Node rclcpp::Publisher::SharedPtr arMarkerPub_; rclcpp::Publisher::SharedPtr rvizMarkerPub_; rclcpp::Publisher::SharedPtr rvizMarkerPub2_; + rclcpp::Subscription::SharedPtr info_sub_; tf2_ros::TransformBroadcaster tf_broadcaster_; tf2_ros::TransformListener tf_listener_; @@ -167,7 +168,7 @@ class IndividualMarkers : public rclcpp::Node } marker_detector.SetMarkerSize(marker_size, marker_resolution, marker_margin); - cam = new Camera(cam_info_topic); + cam = new Camera(); arMarkerPub_ = this->create_publisher ("ar_pose_marker", 0); @@ -189,8 +190,21 @@ class IndividualMarkers : public rclcpp::Node cloud_sub_ = this->create_subscription(cam_image_topic, 1, std::bind(&IndividualMarkers::getPointCloudCallback, this, std::placeholders::_1)); } + + RCLCPP_INFO(this->get_logger(),"Subscribing to info topic"); + info_sub_ = this->create_subscription(cam_info_topic, 1, std::bind(&IndividualMarkers::InfoCallback, this, std::placeholders::_1)); } + void InfoCallback (const sensor_msgs::msg::CameraInfo::SharedPtr cam_info) + { + RCLCPP_INFO(this->get_logger(),"this executed"); + if (!cam->getCamInfo_) + { + cam->SetCameraInfo(cam_info); + cam->getCamInfo_ = true; + //sub_.reset(); + } + } //Debugging utility function void draw3dPoints(ARCloud::Ptr cloud, string frame, int color, int id, double rad) diff --git a/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp b/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp index 0d1cc93..0c13491 100644 --- a/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp +++ b/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp @@ -69,6 +69,7 @@ class IndividualMarkersNoKinect : public rclcpp::Node rclcpp::Subscription::SharedPtr cam_sub_; rclcpp::Publisher::SharedPtr arMarkerPub_; rclcpp::Publisher::SharedPtr rvizMarkerPub_; + rclcpp::Subscription::SharedPtr info_sub_; tf2_ros::TransformBroadcaster tf_broadcaster_; tf2_ros::TransformListener tf_listener_; @@ -164,7 +165,7 @@ class IndividualMarkersNoKinect : public rclcpp::Node } marker_detector.SetMarkerSize(marker_size, marker_resolution, marker_margin); - cam = new Camera(cam_info_topic); + cam = new Camera(); //Give tf a chance to catch up before the camera callback starts asking for transforms // It will also reconfigure parameters for the first time, setting the default values @@ -182,6 +183,9 @@ class IndividualMarkersNoKinect : public rclcpp::Node enable_sub_ = this->create_subscription("enable_detection", 1, std::bind(&IndividualMarkersNoKinect::enableCallback, this, std::placeholders::_1)); + RCLCPP_INFO(this->get_logger(),"Subscribing to info topic"); + info_sub_ = this->create_subscription(cam_info_topic, 1, std::bind(&IndividualMarkersNoKinect::InfoCallback, this, std::placeholders::_1)); + } void enableCallback(const std_msgs::msg::Bool::SharedPtr msg) @@ -190,6 +194,17 @@ class IndividualMarkersNoKinect : public rclcpp::Node enabled = msg->data; } + void InfoCallback (const sensor_msgs::msg::CameraInfo::SharedPtr cam_info) + { + RCLCPP_INFO(this->get_logger(),"this executed"); + if (!cam->getCamInfo_) + { + cam->SetCameraInfo(cam_info); + cam->getCamInfo_ = true; + //sub_.reset(); + } + } + void getCapCallback (const sensor_msgs::msg::Image::SharedPtr image_msg) { @@ -220,6 +235,8 @@ class IndividualMarkersNoKinect : public rclcpp::Node cv::Mat ipl_image = cv_ptr_->image; marker_detector.Detect(&ipl_image, cam, true, false, max_new_marker_error, max_track_error, CVSEQ, true); arPoseMarkers_.markers.clear (); + + RCLCPP_WARN(this->get_logger(),"%d",marker_detector.markers->size()); for (size_t i=0; isize(); i++) { //Get the pose relative to the camera diff --git a/ar_track_alvar/nodes/TrainMarkerBundle.cpp b/ar_track_alvar/nodes/TrainMarkerBundle.cpp index 06be1ed..c2cde86 100644 --- a/ar_track_alvar/nodes/TrainMarkerBundle.cpp +++ b/ar_track_alvar/nodes/TrainMarkerBundle.cpp @@ -77,6 +77,7 @@ class TrainMarkerBundle : public rclcpp::Node ar_track_alvar_msgs::msg::AlvarMarkers arPoseMarkers_; visualization_msgs::msg::Marker rvizMarker_; + rclcpp::Subscription::SharedPtr info_sub_; int auto_count; @@ -117,7 +118,7 @@ class TrainMarkerBundle : public rclcpp::Node cam_info_topic = argv[6]; output_frame = argv[7]; marker_detector.SetMarkerSize(marker_size); - cam = new Camera(cam_info_topic); + cam = new Camera(); //Give tf a chance to catch up before the camera callback starts asking for transforms @@ -135,6 +136,21 @@ class TrainMarkerBundle : public rclcpp::Node //Subscribe to camera message RCLCPP_INFO(this->get_logger(),"Subscribing to image topic"); cam_sub_ = image_transport::create_subscription(this, cam_image_topic, [&](auto& msg) { this->getCapCallback(msg); },"raw"); + + RCLCPP_INFO(this->get_logger(),"Subscribing to info topic"); + info_sub_ = this->create_subscription(cam_info_topic, 1, std::bind(&TrainMarkerBundle::InfoCallback, this, std::placeholders::_1)); + + } + + void InfoCallback (const sensor_msgs::msg::CameraInfo::SharedPtr cam_info) + { + RCLCPP_INFO(this->get_logger(),"this executed"); + if (!cam->getCamInfo_) + { + cam->SetCameraInfo(cam_info); + cam->getCamInfo_ = true; + //sub_.reset(); + } } void getCapCallback (const sensor_msgs::msg::Image::ConstSharedPtr& image_msg) diff --git a/ar_track_alvar/src/Camera.cpp b/ar_track_alvar/src/Camera.cpp index 3fe016c..723ebf9 100644 --- a/ar_track_alvar/src/Camera.cpp +++ b/ar_track_alvar/src/Camera.cpp @@ -126,7 +126,8 @@ bool ProjPoints::AddPointsUsingMarkers(vector &marker_corners, vect return true; } -Camera::Camera(): Node("camera_node") { +Camera::Camera()//:Node("camera_node") +{ calib_K = cvMat(3, 3, CV_64F, calib_K_data); calib_D = cvMat(4, 1, CV_64F, calib_D_data); memset(calib_K_data,0,sizeof(double)*3*3); @@ -140,30 +141,32 @@ Camera::Camera(): Node("camera_node") { calib_y_res = 480; x_res = 640; y_res = 480; + getCamInfo_ = false; } -Camera::Camera(std::string cam_info_topic):Node("camera_node") -{ - calib_K = cvMat(3, 3, CV_64F, calib_K_data); - calib_D = cvMat(4, 1, CV_64F, calib_D_data); - memset(calib_K_data,0,sizeof(double)*3*3); - memset(calib_D_data,0,sizeof(double)*4); - calib_K_data[0][0] = 550; // Just some focal length by default - calib_K_data[1][1] = 550; // Just some focal length by default - calib_K_data[0][2] = 320; - calib_K_data[1][2] = 240; - calib_K_data[2][2] = 1; - calib_x_res = 640; - calib_y_res = 480; - x_res = 640; - y_res = 480; - cameraInfoTopic_ = cam_info_topic; +// Camera::Camera(std::string cam_info_topic)//:Node("camera_node") +// { +// RCLCPP_INFO(this->get_logger(),"Subscribing to info topic"); +// calib_K = cvMat(3, 3, CV_64F, calib_K_data); +// calib_D = cvMat(4, 1, CV_64F, calib_D_data); +// memset(calib_K_data,0,sizeof(double)*3*3); +// memset(calib_D_data,0,sizeof(double)*4); +// calib_K_data[0][0] = 550; // Just some focal length by default +// calib_K_data[1][1] = 550; // Just some focal length by default +// calib_K_data[0][2] = 320; +// calib_K_data[1][2] = 240; +// calib_K_data[2][2] = 1; +// calib_x_res = 640; +// calib_y_res = 480; +// x_res = 640; +// y_res = 480; +// cameraInfoTopic_ = cam_info_topic; - RCLCPP_INFO(this->get_logger(),"Subscribing to info topic"); - sub_ = this->create_subscription(cameraInfoTopic_, 1,std::bind(&Camera::camInfoCallback, this, _1)); - getCamInfo_ = false; -} +// RCLCPP_INFO(this->get_logger(),"Subscribing to info topic"); +// sub_ = this->create_subscription(cameraInfoTopic_, 1,std::bind(&Camera::camInfoCallback, this, _1)); +// getCamInfo_ = false; +// } void Camera::SetSimpleCalib(int _x_res, int _y_res, double f_fac) @@ -290,16 +293,6 @@ void Camera::SetCameraInfo(const sensor_msgs::msg::CameraInfo::SharedPtr cam_inf } } -void Camera::camInfoCallback (const sensor_msgs::msg::CameraInfo::SharedPtr cam_info) - { - if (!getCamInfo_) - { - SetCameraInfo(cam_info); - getCamInfo_ = true; - sub_.reset(); - } - } - bool Camera::SetCalib(const char *calibfile, int _x_res, int _y_res, FILE_FORMAT format) { x_res = _x_res; y_res = _y_res; diff --git a/ar_track_alvar/test/marker_arg_config_basic.py b/ar_track_alvar/test/marker_arg_config_basic.py new file mode 100644 index 0000000..5a5ceaa --- /dev/null +++ b/ar_track_alvar/test/marker_arg_config_basic.py @@ -0,0 +1,20 @@ +import os +import sys +import time +import unittest + +from launch import LaunchDescription + +from launch_ros.actions import Node + +import launch_testing + +import pytest + +import rclpy + + +@pytest.mark.rostest +def generate_test_description(): + + diff --git a/ar_track_alvar/test/test_ar_track_alvar_individual_markers.py b/ar_track_alvar/test/test_ar_track_alvar_individual_markers.py index 6bf4ecb..eeac05b 100644 --- a/ar_track_alvar/test/test_ar_track_alvar_individual_markers.py +++ b/ar_track_alvar/test/test_ar_track_alvar_individual_markers.py @@ -41,8 +41,8 @@ def generate_test_description(): Node( package='ar_track_alvar', - executable='individualMarkers', - name='invidual_markers', + executable='individualMarkersNoKinect', + name='individual_markers', remappings=[ ("camera_image", cam_image_topic), ("camera_info",cam_info_topic) @@ -57,7 +57,7 @@ class TestArTrack(unittest.TestCase): @classmethod def setUpClass(cls): rclpy.init() - cls.node = rclpy.create_node('test_ar_track_alvar_node') + cls.node = rclpy.create_node('test_ar_track_alvar_markers') @classmethod def tearDownClass(cls): From 6c1d44c4d0236650806d72630e9d253dd07e6477 Mon Sep 17 00:00:00 2001 From: Patrick Musau Date: Wed, 28 Jul 2021 16:07:58 -0500 Subject: [PATCH 16/27] ported most of the tests --- ar_track_alvar/CMakeLists.txt | 5 + .../include/ar_track_alvar/Camera.h | 6 +- ar_track_alvar/test/test_ar_legacy.py | 185 ++++++++++++++++++ .../test_ar_track_alvar_individual_markers.py | 6 +- 4 files changed, 196 insertions(+), 6 deletions(-) create mode 100644 ar_track_alvar/test/test_ar_legacy.py diff --git a/ar_track_alvar/CMakeLists.txt b/ar_track_alvar/CMakeLists.txt index ee55a6b..4d8ca54 100644 --- a/ar_track_alvar/CMakeLists.txt +++ b/ar_track_alvar/CMakeLists.txt @@ -190,6 +190,11 @@ if(BUILD_TESTING) PYTHON_EXECUTABLE "${PYTHON_EXECUTABLE}" ) + # Legacy Tests + add_launch_test("test/test_ar_legacy.py" + PYTHON_EXECUTABLE "${PYTHON_EXECUTABLE}" + ) + endif() #TODO fix these tests # if(BUILD_TESTING) diff --git a/ar_track_alvar/include/ar_track_alvar/Camera.h b/ar_track_alvar/include/ar_track_alvar/Camera.h index b1f5805..bc9003a 100644 --- a/ar_track_alvar/include/ar_track_alvar/Camera.h +++ b/ar_track_alvar/include/ar_track_alvar/Camera.h @@ -84,7 +84,7 @@ struct ALVAR_EXPORT ProjPoints { /** * \brief Simple \e Camera class for calculating distortions, orientation or projections with pre-calibrated camera */ -class ALVAR_EXPORT Camera: public rclcpp::Node { +class ALVAR_EXPORT Camera { //: public rclcpp::Node { public: @@ -99,8 +99,8 @@ class ALVAR_EXPORT Camera: public rclcpp::Node { protected: std::string cameraInfoTopic_; sensor_msgs::msg::CameraInfo cam_info_; - void camInfoCallback (const sensor_msgs::msg::CameraInfo::SharedPtr cam_info); - rclcpp::Subscription::SharedPtr sub_; + // void camInfoCallback (const sensor_msgs::msg::CameraInfo::SharedPtr cam_info); + // rclcpp::Subscription::SharedPtr sub_; private: bool LoadCalibXML(const char *calibfile); diff --git a/ar_track_alvar/test/test_ar_legacy.py b/ar_track_alvar/test/test_ar_legacy.py new file mode 100644 index 0000000..59c5389 --- /dev/null +++ b/ar_track_alvar/test/test_ar_legacy.py @@ -0,0 +1,185 @@ +# import os +# import sys +# import time +# import unittest + +# from ament_index_python.packages import get_package_share_directory +# from launch import LaunchDescription +# from launch_ros.actions import Node +# import launch_testing +# import launch + +# import pytest +# import rclpy +# import numpy +# import math + +# from geometry_msgs.msg import Pose + +from rclpy.duration import Duration +# from rclpy.node import Node + + +# from sensor_msgs.msg import Image +# from ar_track_alvar_msgs.msg import AlvarMarkers + +import os +import sys +import time +import unittest + +from ament_index_python.packages import get_package_share_directory +from launch import LaunchDescription +from launch_ros.actions import Node +import launch_testing +import launch + +import pytest +import rclpy + +from stereo_msgs.msg import DisparityImage +from sensor_msgs.msg import Image +from ar_track_alvar_msgs.msg import AlvarMarkers +from tf2_ros import TransformBroadcaster, TransformListener, TransformStamped, Buffer, LookupException, ConnectivityException, ExtrapolationException + +# Test Parameters +bag_name = os.path.join(get_package_share_directory('ar_track_alvar'),'ar_track_alvar_4markers_tork_2017-02-08-11-21-14.bag') +cam_image_topic ="camera/image_raw" +cam_info_topic = "camera/camera_info" +marker_margin="2" +marker_resolution="5" +marker_size="2.3" +max_new_marker_error="0.08" +max_frequency="100" +max_track_error="0.2" +output_frame="camera" + +# @pytest.mark.rostest +# def generate_test_description(): +# return LaunchDescription([ +# # DisparityNode + +# launch.actions.ExecuteProcess( +# cmd=['ros2', 'bag', 'play', '-s',"rosbag_v2",bag_name], +# output='screen' +# ), + +# Node( +# package='ar_track_alvar', +# executable='individualMarkersNoKinect', +# name='individual_markers', +# remappings=[ +# ("camera_image", cam_image_topic), +# ("camera_info",cam_info_topic) +# ], +# arguments=[marker_size, max_new_marker_error, max_track_error, cam_image_topic, cam_info_topic, output_frame], +# output='screen' +# ), +# launch_testing.actions.ReadyToTest(), +# ]) + +# class TestArTrack(unittest.TestCase): +# @classmethod +# def setUpClass(cls): +# rclpy.init() +# cls.node = rclpy.create_node('test_ar_track_alvar_legacy') + + +# @classmethod +# def tearDownClass(cls): +# cls.node.destroy_node() +# rclpy.shutdown() + + +# def test_markers(self): +# ''' +# Aiming the real output taken from a bag file. +# ''' +# tfBuffer = Buffer() +# self.tflistener = TransformListener(tfBuffer, self) + +# # Wait up to 60 seconds to receive message +# start_time = time.time() + +@pytest.mark.rostest +def generate_test_description(): + + return LaunchDescription([ + # DisparityNode + + launch.actions.ExecuteProcess( + cmd=['ros2', 'bag', 'play', '-s',"rosbag_v2",bag_name], + output='screen' + ), + + Node( + package='ar_track_alvar', + executable='individualMarkersNoKinect', + name='individual_markers', + remappings=[ + ("camera_image", cam_image_topic), + ("camera_info",cam_info_topic) + ], + arguments=[marker_size, max_new_marker_error, max_track_error, cam_image_topic, cam_info_topic, output_frame], + output='screen' + ), + launch_testing.actions.ReadyToTest(), + ]) + +class TestArTrack(unittest.TestCase): + @classmethod + def setUpClass(cls): + rclpy.init() + cls.tfBuffer = Buffer() + cls.node = rclpy.create_node('test_ar_track_alvar_markers') + cls.tflistener = TransformListener(cls.tfBuffer, cls.node) + + @classmethod + def tearDownClass(cls): + cls.node.destroy_node() + rclpy.shutdown() + + + def test_markers_received(self): + # # Expect the disparity node to publish on '/disparity' topic + # msgs_received = [] + # self.node.create_subscription( + # AlvarMarkers, + # "ar_pose_marker", + # lambda msg: msgs_received.append(msg), + # 1 + # ) + start_time = time.time() + while rclpy.ok() and (time.time() - start_time) < 120: + rclpy.spin_once(self.node, timeout_sec=0.1) + + now = self.node.get_clock().now() + + dur = Duration() + dur.sec = 40 + dur.nsec = 0 + + #The values in this list are ordered in the marker's number. + tf_expected = [[[0.04464459977845401, 0.05341412598745035, 0.26796900627543024], [0.6726999185589797, 0.7391474391806558, -0.01739267319701629, -0.028868280906256056]], + [[-0.04805772245624329, 0.039528315926071665, 0.26775882622136327], [0.48207151562664247, 0.8758763282975102, -0.016363763970395625, -0.013414118615296202]], + [[0.007233278235745441, 0.015615692018491452, 0.26619586686955365], [0.08546919545682985, 0.9959809257461487, 0.00424040439, -0.02677659572186436]], + [[0.06223014382428272, 0.014613815037010106, 0.26226145707174475], [-0.46400320825216246, 0.8850390875261293, 0.032644264656690035, -0.018471282241381157]]] + for i in range (0, len(tf_expected)): + try: + target_frame = 'ar_marker_{}'.format(i) + (trans, rot) = self.tfBuffer.lookup_transform('camera', target_frame, now, dur) + break + except (LookupException, ConnectivityException, ExtrapolationException) as e: + self.node.get_logger().error(str(e) + ' target_frame={}'.format(target_frame)) + continue + + # Compare each translation element (x, y, z) + for v_ret, v_expected in zip(trans, tf_expected[i][0]): + # Given that sigfig ignores the leading zero, we only compare the first sigfig. + numpy.testing.assert_approx_equal( + v_ret, v_expected, significant=1) + # Compare each orientation element (x, y, z, w) + for v_ret, v_expected in zip(rot, tf_expected[i][1]): + numpy.testing.assert_approx_equal( + v_ret, v_expected, significant=1) + diff --git a/ar_track_alvar/test/test_ar_track_alvar_individual_markers.py b/ar_track_alvar/test/test_ar_track_alvar_individual_markers.py index eeac05b..d459acf 100644 --- a/ar_track_alvar/test/test_ar_track_alvar_individual_markers.py +++ b/ar_track_alvar/test/test_ar_track_alvar_individual_markers.py @@ -18,15 +18,15 @@ # Test Parameters bag_name = os.path.join(get_package_share_directory('ar_track_alvar'),'ar_track_alvar_4markers_tork_2017-02-08-11-21-14.bag') -cam_image_topic ="/camera/image_raw" -cam_info_topic = "/camera/camera_info" +cam_image_topic ="camera/image_raw" +cam_info_topic = "camera/camera_info" marker_margin="2" marker_resolution="5" marker_size="2.3" max_new_marker_error="0.08" max_frequency="100" max_track_error="0.2" -output_frame="/camera" +output_frame="camera" @pytest.mark.rostest def generate_test_description(): From b3111c5a79dcd0c4e946041a7716c39c5d076d1b Mon Sep 17 00:00:00 2001 From: Patrick Musau Date: Thu, 29 Jul 2021 13:10:58 -0500 Subject: [PATCH 17/27] connected component changes --- .../ar_track_alvar/ConnectedComponents.h | 153 ++-- ar_track_alvar/include/ar_track_alvar/Draw.h | 247 +++--- .../nodes/IndividualMarkersNoKinect.cpp | 12 +- ar_track_alvar/src/ConnectedComponents.cpp | 734 +++++++++--------- ar_track_alvar/src/Draw.cpp | 465 +++++------ ar_track_alvar/src/MarkerDetector.cpp | 7 +- ar_track_alvar/test/test_ar_legacy.py | 72 +- 7 files changed, 812 insertions(+), 878 deletions(-) diff --git a/ar_track_alvar/include/ar_track_alvar/ConnectedComponents.h b/ar_track_alvar/include/ar_track_alvar/ConnectedComponents.h index 35d9317..7063eb6 100644 --- a/ar_track_alvar/include/ar_track_alvar/ConnectedComponents.h +++ b/ar_track_alvar/include/ar_track_alvar/ConnectedComponents.h @@ -35,105 +35,104 @@ #include "Line.h" #include "Camera.h" -namespace alvar { - +namespace alvar +{ /** * \brief Connected components labeling methods. -*/ + */ enum ALVAR_EXPORT LabelingMethod { - CVSEQ + CVSEQ }; /** * \brief Base class for labeling connected components from binary image. -*/ + */ class ALVAR_EXPORT Labeling { +protected: + Camera* cam; + int thresh_param1, thresh_param2; -protected : - - Camera *cam; - int thresh_param1, thresh_param2; - -public : - - /** - * \brief Pointer to grayscale image that is thresholded for labeling. - */ - cv::Mat *gray; - /** - * \brief Pointer to binary image that is then labeled. - */ - cv::Mat *bw; - - /** - * \brief Vector of 4-length vectors where the corners of detected blobs are stored. - */ - std::vector > blob_corners; - - /** - * \brief Two alternatives for thresholding the gray image. ADAPT (adaptive threshold) is only supported currently. - */ - enum ThresholdMethod - { - THRESH, - ADAPT - }; - - /** Constructor */ - Labeling(); - - /** Destructor*/ - virtual ~Labeling(); - - /** - * \brief Sets the camera object that is used to correct lens distortions. - */ - void SetCamera(Camera* camera) {cam = camera;} - - /** - * \brief Labels image and filters blobs to obtain square-shaped objects from the scene. - */ - virtual void LabelSquares(cv::Mat* image, bool visualize=false) = 0; - - bool CheckBorder(CvSeq* contour, int width, int height); - - void SetThreshParams(int param1, int param2) - { - thresh_param1 = param1; - thresh_param2 = param2; - } +public: + /** + * \brief Pointer to grayscale image that is thresholded for labeling. + */ + cv::Mat gray; + /** + * \brief Pointer to binary image that is then labeled. + */ + cv::Mat bw; + + /** + * \brief Vector of 4-length vectors where the corners of detected blobs are + * stored. + */ + std::vector> blob_corners; + + /** + * \brief Two alternatives for thresholding the gray image. ADAPT (adaptive + * threshold) is only supported currently. + */ + enum ThresholdMethod + { + THRESH, + ADAPT + }; + + /** Constructor */ + Labeling(); + + /** Destructor*/ + virtual ~Labeling(); + + /** + * \brief Sets the camera object that is used to correct lens distortions. + */ + void SetCamera(Camera* camera) + { + cam = camera; + } + + /** + * \brief Labels image and filters blobs to obtain square-shaped objects from + * the scene. + */ + virtual void LabelSquares(cv::Mat& image, bool visualize = false) = 0; + + bool CheckBorder(const std::vector& contour, int width, + int height); + + void SetThreshParams(int param1, int param2) + { + thresh_param1 = param1; + thresh_param2 = param2; + } }; /** * \brief Labeling class that uses OpenCV routines to find connected components. -*/ + */ class ALVAR_EXPORT LabelingCvSeq : public Labeling { - -protected : - - int _n_blobs; - int _min_edge; - int _min_area; - bool detect_pose_grayscale; - - CvMemStorage* storage; +protected: + int _n_blobs; + int _min_edge; + int _min_area; + bool detect_pose_grayscale; public: + LabelingCvSeq(); + ~LabelingCvSeq(); - LabelingCvSeq(); - ~LabelingCvSeq(); - - void SetOptions(bool _detect_pose_grayscale=false); + void SetOptions(bool _detect_pose_grayscale = false); - void LabelSquares(cv::Mat* image, bool visualize=false); + void LabelSquares(cv::Mat& image, bool visualize = false); - // TODO: Releases memory inside, cannot return CvSeq* - std::vector LabelImage(cv::Mat* image, int min_size, bool approx=false); + std::vector> LabelImage(cv::Mat& image, int min_size, + bool approx = false); }; -} // namespace alvar +} // namespace alvar -#endif +#endif \ No newline at end of file diff --git a/ar_track_alvar/include/ar_track_alvar/Draw.h b/ar_track_alvar/include/ar_track_alvar/Draw.h index f7f66f5..1836915 100644 --- a/ar_track_alvar/include/ar_track_alvar/Draw.h +++ b/ar_track_alvar/include/ar_track_alvar/Draw.h @@ -36,140 +36,165 @@ #include "Camera.h" #include "Line.h" #include +#include -namespace alvar { - +namespace alvar +{ /** \brief Draws the bounding box of a connected component (Blob). * \param image Pointer to the destination image. * \param points Vector of points that determine the bounding box. - * \param color Use CV_RGB(red,green,blue) to determine the color of the bounding box. - * \param label A label to show in the center of the bounding box. + * \param color Use CV_RGB(red,green,blue) to determine the color of the + * bounding box. \param label A label to show in the center of the bounding box. */ -template -void inline DrawBB(cv::Mat *image, const std::vector& points, CvScalar color, std::string label="") +template +void inline DrawBB(cv::Mat& image, const std::vector& points, + const cv::Scalar& color, const std::string& label = "") { - if (points.size() < 2) { - return; - } - PointType minimum = PointType(image->rows, image->cols); - PointType maximum = PointType(0, 0); - for (int i = 0; i < (int)points.size(); ++i) { - PointType current = points.at(i); - if (current.x < minimum.x) minimum.x = current.x; - if (current.x > maximum.x) maximum.x = current.x; - if (current.y < minimum.y) minimum.y = current.y; - if (current.y > maximum.y) maximum.y = current.y; - } - cv::line(*image, cv::Point((int)minimum.x,(int)minimum.y), cv::Point((int)maximum.x,(int)minimum.y), color); - cv::line(*image, cv::Point((int)maximum.x,(int)minimum.y), cv::Point((int)maximum.x,(int)maximum.y), color); - cv::line(*image, cv::Point((int)maximum.x,(int)maximum.y), cv::Point((int)minimum.x,(int)maximum.y), color); - cv::line(*image, cv::Point((int)minimum.x,(int)maximum.y), cv::Point((int)minimum.x,(int)minimum.y), color); - // if (!label.empty()) { - // CvFont font; - //cvInitFont(&font, 0, 0.5, 0.5, 0); - cv::putText(*image, label.c_str(), cv::Point((int)minimum.x+1,(int)minimum.y+2), cv::FONT_HERSHEY_DUPLEX,1,cv::Scalar(0,255,0),2,false); - } + if (points.size() < 2) + { + return; + } + PointType minimum = PointType(image.cols, image.rows); + PointType maximum = PointType(0, 0); + for (int i = 0; i < (int)points.size(); ++i) + { + PointType current = points.at(i); + if (current.x < minimum.x) + minimum.x = current.x; + if (current.x > maximum.x) + maximum.x = current.x; + if (current.y < minimum.y) + minimum.y = current.y; + if (current.y > maximum.y) + maximum.y = current.y; + } + cv::line(image, cv::Point((int)minimum.x, (int)minimum.y), + cv::Point((int)maximum.x, (int)minimum.y), color); + cv::line(image, cv::Point((int)maximum.x, (int)minimum.y), + cv::Point((int)maximum.x, (int)maximum.y), color); + cv::line(image, cv::Point((int)maximum.x, (int)maximum.y), + cv::Point((int)minimum.x, (int)maximum.y), color); + cv::line(image, cv::Point((int)minimum.x, (int)maximum.y), + cv::Point((int)minimum.x, (int)minimum.y), color); + if (!label.empty()) + { + cv::putText(image, label, cv::Point((int)minimum.x + 1, (int)minimum.y + 2), + 0, 0.5, CV_RGB(255, 255, 0)); + } } -/** \brief Draws a set of points. - * \param image Pointer to the destination image. - * \param points Array of CvPoints to be visualzed. - * \param color Use CV_RGB(red,green,blue) to determine the color. -*/ -void ALVAR_EXPORT DrawPoints(cv::Mat *image, const std::vector& points, CvScalar color); - /** \brief Draws lines between consecutive points stored in vector (polyline). - * \param image Pointer to the destination image. - * \param points Vector of points that determine the polyline. - * \param color Use CV_RGB(red,green,blue) to determine the color. - * \param loop If true, the polyline is closed. -*/ -template -void inline DrawLines(cv::Mat *image, const std::vector& points, CvScalar color, bool loop=true) + * \param image Pointer to the destination image. + * \param points Vector of points that determine the polyline. + * \param color Use CV_RGB(red,green,blue) to determine the color. + * \param loop If true, the polyline is closed. + */ +template +void inline DrawLines(cv::Mat& image, const std::vector& points, + const cv::Scalar& color, bool loop = true) { - for(unsigned i = 1; i < points.size(); ++i) - cv::line(*image, cv::Point((int)points[i-1].x,(int)points[i-1].y), cv::Point((int)points[i].x,(int)points[i].y), color); - if (loop) { - cv::line(*image, cv::Point((int)points[points.size()-1].x,(int)points[points.size()-1].y), cv::Point((int)points[0].x,(int)points[0].y), color); - } + for (unsigned i = 1; i < points.size(); ++i) + cv::line(image, cv::Point((int)points[i - 1].x, (int)points[i - 1].y), + cv::Point((int)points[i].x, (int)points[i].y), color); + if (loop) + { + cv::line(image, + cv::Point((int)points[points.size() - 1].x, + (int)points[points.size() - 1].y), + cv::Point((int)points[0].x, (int)points[0].y), color); + } } /** \brief Draws a line. - * \param image Pointer to the destination image. - * \param line Line struct to be drawn. - * \param color Use CV_RGB(red,green,blue) to determine the color. -*/ -void ALVAR_EXPORT DrawLine(cv::Mat* image, const alvar::Line line, cv::Scalar color = cv::Scalar(0,255,0)); + * \param image Pointer to the destination image. + * \param line Line struct to be drawn. + * \param color Use CV_RGB(red,green,blue) to determine the color. + */ +void ALVAR_EXPORT DrawLine(cv::Mat& image, const Line line, + const cv::Scalar& color = CV_RGB(0, 255, 0)); /** \brief Draws points of the contour that is obtained by \e Labeling class. - * \param image Pointer to the destination image. - * \param contour Controur sequence. - * \param color Use CV_RGB(red,green,blue) to determine the color. -*/ -void ALVAR_EXPORT DrawPoints(cv::Mat* image, const CvSeq* contour, cv::Scalar color = cv::Scalar(255,0,0)); - - -/** \brief Draws circles to the contour points that are obtained by \e Labeling class. - * \param image Pointer to the destination image. - * \param contour Controur sequence. - * \param radius Circle radius in pixels. - * \param color Use CV_RGB(red,green,blue) to determine the color. -*/ -void ALVAR_EXPORT DrawCircles(cv::Mat* image, const CvSeq* contour, int radius, cv::Scalar color = cv::Scalar(255,0,0)); + * \param image Pointer to the destination image. + * \param contour Controur sequence. + * \param color Use CV_RGB(red,green,blue) to determine the color. + */ +void ALVAR_EXPORT DrawPoints(cv::Mat& image, + const std::vector& contour, + const cv::Scalar& color = CV_RGB(255, 0, 0)); + +/** \brief Draws circles to the contour points that are obtained by \e Labeling + * class. \param image Pointer to the destination image. \param contour + * Controur sequence. \param radius Circle radius in pixels. \param color Use + * CV_RGB(red,green,blue) to determine the color. + */ +void ALVAR_EXPORT DrawCircles(cv::Mat& image, + const std::vector& contour, int radius, + const cv::Scalar& color = CV_RGB(255, 0, 0)); /** \brief Draws lines between consecutive contour points. - * \param image Pointer to the destination image. - * \param contour Controur sequence. - * \param color Use CV_RGB(red,green,blue) to determine the color. -*/ -void ALVAR_EXPORT DrawLines(cv::Mat* image, const CvSeq* contour, cv::Scalar color = cv::Scalar(255,0,0)); + * \param image Pointer to the destination image. + * \param contour Controur sequence. + * \param color Use CV_RGB(red,green,blue) to determine the color. + */ +void ALVAR_EXPORT DrawLines(cv::Mat& image, + const std::vector& contour, + const cv::Scalar& color = CV_RGB(255, 0, 0)); /** \brief Draws circles to the array of points. - * \param image Pointer to the destination image. - * \param points Vector of points to be visualized. - * \param color Use CV_RGB(red,green,blue) to determine the color. - * \param radius Circle radius in pixels. -*/ -template -void inline DrawPoints(cv::Mat *image, const std::vector& points, cv::Scalar color, int radius=1) + * \param image Pointer to the destination image. + * \param points Vector of points to be visualized. + * \param color Use CV_RGB(red,green,blue) to determine the color. + * \param radius Circle radius in pixels. + */ +template +void inline DrawPoints(cv::Mat& image, const std::vector& points, + const cv::Scalar& color, int radius = 1) { - for(unsigned i = 0; i < points.size(); ++i) - cv::circle(*image, cv::Point((int)points[i].x,(int)points[i].y), radius, color); + for (unsigned i = 0; i < points.size(); ++i) + cv::circle(image, cv::Point((int)points[i].x, (int)points[i].y), radius, + color); } /** \brief Draws OpenCV ellipse. - * \param image Pointer to the destination image. - * \param ellipse Ellipse struct in OpenCV format. - * \param color Use CV_RGB(red,green,blue) to determine the color. - * \param fill If false, only the outline is drawn. - * \param par The ellipse width and height are grown by \e par pixels. -*/ -void ALVAR_EXPORT DrawCVEllipse(cv::Mat* image, CvBox2D& ellipse, cv::Scalar color, bool fill=false, double par=0); - -/** \brief This function is used to construct a texture image which is needed to hide a marker from the original video frame. See \e SampleMarkerHide.cpp for example implementation. - * \param image Pointer to the original video frame from where the hiding texture is calculated. - * \param hide_texture Pointer to the destination image where the resulting texture is stored. - * \param cam Camera object that is used for marker tracking. - * \param gl_modelview Current model view matrix. - * \param topleft Top left limit of the texture area in marker coordinate frame. - * \param botright Bottom right limit of the texture area in marker coordinate frame. + * \param image Pointer to the destination image. + * \param ellipse Ellipse struct in OpenCV format. + * \param color Use CV_RGB(red,green,blue) to determine the color. + * \param fill If false, only the outline is drawn. + * \param par The ellipse width and height are grown by \e par pixels. */ -void ALVAR_EXPORT BuildHideTexture(cv::Mat *image, cv::Mat *hide_texture, - alvar::Camera *cam, double gl_modelview[16], - alvar::PointDouble topleft, alvar::PointDouble botright); - -/** \brief Draws the texture generated by \e BuildHideTexture to given video frame. For better performance, use OpenGL instead. See \e SampleMarkerHide.cpp for example implementation. - * \param image Pointer to the destination image where the texture is drawn. - * \param texure Pointer to the texture image genereated by \e BuildHideTexture. - * \param cam Camera object that is used for marker tracking. - * \param gl_modelview Current model view matrix. - * \param topleft Top left limit of the texture area in marker coordinate frame. - * \param botright Bottom right limit of the texture area in marker coordinate frame. +void ALVAR_EXPORT DrawCVEllipse(cv::Mat& image, const cv::RotatedRect& ellipse, + const cv::Scalar& color, bool fill = false, + double par = 0); + +/** \brief This function is used to construct a texture image which is needed to + * hide a marker from the original video frame. See \e SampleMarkerHide.cpp for + * example implementation. \param image Pointer to the original video frame + * from where the hiding texture is calculated. \param hide_texture Pointer to + * the destination image where the resulting texture is stored. \param cam + * Camera object that is used for marker tracking. \param gl_modelview Current + * model view matrix. \param topleft Top left limit of the texture area in + * marker coordinate frame. \param botright Bottom right limit of the texture + * area in marker coordinate frame. */ -void ALVAR_EXPORT DrawTexture(cv::Mat *image, cv::Mat *texture, - alvar::Camera *cam, double gl_modelview[16], - alvar::PointDouble topleft, alvar::PointDouble botright); - -//}// namespace alvar +void ALVAR_EXPORT BuildHideTexture(cv::Mat& image, cv::Mat& hide_texture, + Camera* cam, double gl_modelview[16], + PointDouble topleft, PointDouble botright); + +/** \brief Draws the texture generated by \e BuildHideTexture to given video + * frame. For better performance, use OpenGL instead. See \e + * SampleMarkerHide.cpp for example implementation. \param image Pointer to + * the destination image where the texture is drawn. \param texure Pointer to + * the texture image genereated by \e BuildHideTexture. \param cam Camera + * object that is used for marker tracking. \param gl_modelview Current model + * view matrix. \param topleft Top left limit of the texture area in marker + * coordinate frame. + * \param botright Bottom right limit of the texture area in marker + * coordinate frame. + */ +void ALVAR_EXPORT DrawTexture(cv::Mat& image, cv::Mat& texture, Camera* cam, + double gl_modelview[16], PointDouble topleft, + PointDouble botright); + +} // namespace alvar -#endif +#endif \ No newline at end of file diff --git a/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp b/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp index 0c13491..3fbdc30 100644 --- a/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp +++ b/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp @@ -196,7 +196,6 @@ class IndividualMarkersNoKinect : public rclcpp::Node void InfoCallback (const sensor_msgs::msg::CameraInfo::SharedPtr cam_info) { - RCLCPP_INFO(this->get_logger(),"this executed"); if (!cam->getCamInfo_) { cam->SetCameraInfo(cam_info); @@ -215,28 +214,27 @@ class IndividualMarkersNoKinect : public rclcpp::Node geometry_msgs::msg::TransformStamped CamToOutput; try { - tf2_.canTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp, rclcpp::Duration(1.0)); - CamToOutput = tf2_.lookupTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp, rclcpp::Duration(1.0)); + tf2_.canTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp); + CamToOutput = tf2_.lookupTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp); } catch (tf2::TransformException ex){ RCLCPP_ERROR(this->get_logger(), "%s",ex.what()); } - //Convert the image cv_ptr_ = cv_bridge::toCvCopy(image_msg, sensor_msgs::image_encodings::BGR8); //Get the estimated pose of the main markers by using all the markers in each bundle - // GetMultiMarkersPoses expects an IplImage*, but as of ros groovy, cv_bridge gives // us a cv::Mat. I'm too lazy to change to cv::Mat throughout right now, so I // do this conversion here -jbinney cv::Mat ipl_image = cv_ptr_->image; - marker_detector.Detect(&ipl_image, cam, true, false, max_new_marker_error, max_track_error, CVSEQ, true); + marker_detector.Detect(&ipl_image, cam, true, true, max_new_marker_error, max_track_error, CVSEQ, true); arPoseMarkers_.markers.clear (); - RCLCPP_WARN(this->get_logger(),"%d",marker_detector.markers->size()); + RCLCPP_WARN(this->get_logger(),"Markers detected %d",marker_detector.markers->size()); + for (size_t i=0; isize(); i++) { //Get the pose relative to the camera diff --git a/ar_track_alvar/src/ConnectedComponents.cpp b/ar_track_alvar/src/ConnectedComponents.cpp index f0004ad..70daeab 100644 --- a/ar_track_alvar/src/ConnectedComponents.cpp +++ b/ar_track_alvar/src/ConnectedComponents.cpp @@ -27,354 +27,291 @@ using namespace std; -namespace alvar { +namespace alvar +{ using namespace std; Labeling::Labeling() { - gray = 0; - bw = 0; - cam = 0; - thresh_param1 = 31; - thresh_param2 = 5; + cam = 0; + thresh_param1 = 31; + thresh_param2 = 5; } Labeling::~Labeling() { } -bool Labeling::CheckBorder(CvSeq* contour, int width, int height) +bool Labeling::CheckBorder(const std::vector& contour, int width, + int height) { - bool ret = true; - for(int i = 0; i < contour->total; ++i) - { - CvPoint* pt = (CvPoint*)cv::getSeqElem(contour, i); - if((pt->x <= 1) || (pt->x >= width-2) || (pt->y <= 1) || (pt->y >= height-2)) ret = false; - } - return ret; + bool ret = true; + for (const auto& pt : contour) + { + if ((pt.x <= 1) || (pt.x >= width - 2) || (pt.y <= 1) || + (pt.y >= height - 2)) + ret = false; + } + return ret; } LabelingCvSeq::LabelingCvSeq() : _n_blobs(0), _min_edge(20), _min_area(25) { - SetOptions(); - storage = cvCreateMemStorage(0); + SetOptions(); } LabelingCvSeq::~LabelingCvSeq() { - if(storage) - cvReleaseMemStorage(&storage); } -void LabelingCvSeq::SetOptions(bool _detect_pose_grayscale) { - detect_pose_grayscale = _detect_pose_grayscale; +void LabelingCvSeq::SetOptions(bool _detect_pose_grayscale) +{ + detect_pose_grayscale = _detect_pose_grayscale; } -void LabelingCvSeq::LabelSquares(cv::Mat* image, bool visualize) +void LabelingCvSeq::LabelSquares(cv::Mat& image, bool visualize) { - - if (gray && ((gray->rows != image->rows) || (gray->cols != image->cols))) { - // cvReleaseImage(&gray); - gray=NULL; - // if (bw) cvReleaseImage(&bw); - bw=NULL; - } - if (gray == NULL) { - - cv::Mat temp = cv::Mat(cv::Size(image->rows, image->cols),CV_8UC1); - - gray = &temp; - - cv::Mat temp2 = cv::Mat(cv::Size(image->rows, image->cols),CV_8UC1); - //gray->origin = image->origin; - bw = &temp2; - //bw->origin = image->origin; + if (!gray.empty() && ((gray.cols != image.cols) || (gray.rows != image.rows))) + { + gray.release(); + bw.release(); + } + if (gray.empty()) + { + gray = cv::Mat(image.rows, image.cols, CV_8UC1); + bw = cv::Mat(image.rows, image.cols, CV_8UC1); + } + + // Convert grayscale and threshold + if (image.channels() == 4) + cv::cvtColor(image, gray, cv::COLOR_BGRA2GRAY); + else if (image.channels() == 3) + cv::cvtColor(image, gray, cv::COLOR_BGR2GRAY); + else if (image.channels() == 1) + image.copyTo(gray); + else + { + cerr << "Unsupported image format" << endl; + } + + cv::adaptiveThreshold(gray, bw, 255, cv::ADAPTIVE_THRESH_MEAN_C, + cv::THRESH_BINARY_INV, thresh_param1, thresh_param2); + + std::vector> contours; + std::vector> squares; + std::vector> square_contours; + + cv::findContours(bw, contours, cv::RETR_LIST, cv::CHAIN_APPROX_NONE, + cv::Point(0, 0)); + + for (const auto& contour : contours) + { + if (contour.size() < _min_edge) + { + continue; } - - // Convert grayscale and threshold - if(image->channels() == 4) - { - cv::cvtColor(*image, *gray, cv::COLOR_RGB2GRAY); - } - else if(image->channels() == 3) - { - cv::cvtColor(*image, *gray, cv::COLOR_RGB2GRAY); - } - else if(image->channels() == 1) - { - cv::Mat tmp = image->clone(); - gray = &tmp; - //cvCopy(image, gray); - } - else - { - cerr<<"Unsupported image format"< result; + cv::approxPolyDP(contour, result, cv::arcLength(contour, false) * 0.035, + true); // TODO: Parameters? + if (result.size() == 4 && CheckBorder(result, image.cols, image.rows) && + cv::contourArea(result) > _min_area && // TODO check limits + cv::isContourConvex(result)) // ttehop: Changed to 'contours' instead + // of 'result' + { + squares.push_back(result); + square_contours.push_back(contour); } + } - cv::adaptiveThreshold(*gray, *bw, 255, cv::ADAPTIVE_THRESH_MEAN_C, cv::THRESH_BINARY_INV, thresh_param1, thresh_param2); - //cvThreshold(gray, bw, 127, 255, CV_THRESH_BINARY_INV); - - + _n_blobs = squares.size(); + blob_corners.resize(_n_blobs); - vector > contours; - //vector hierarchy; - cv::findContours(*bw, contours, cv::RETR_TREE, cv::CHAIN_APPROX_SIMPLE ); - vector > contours_poly( contours.size() ); - vector boundRect(contours.size()); + // For every detected 4-corner blob + for (int i = 0; i < _n_blobs; ++i) + { + vector fitted_lines(4); + blob_corners[i].resize(4); + const auto& square = squares.at(i); + const auto& square_contour = square_contours.at(i); + for (int j = 0; j < 4; ++j) + { + const auto& pt0 = square.at(j); + const auto& pt1 = square.at((j + 1) % 4); + int k0 = -1, k1 = -1; + for (int k = 0; k < square_contour.size(); k++) + { + const auto& pt2 = square_contour.at(k); + if ((pt0.x == pt2.x) && (pt0.y == pt2.y)) + k0 = k; + if ((pt1.x == pt2.x) && (pt1.y == pt2.y)) + k1 = k; + } + int len; + if (k1 >= k0) + len = k1 - k0 - 1; // neither k0 nor k1 are included + else + len = square_contour.size() - k0 + k1 - 1; + if (len == 0) + len = 1; + + cv::Mat line_data = cv::Mat(1, len, CV_32FC2); + for (int l = 0; l < len; l++) + { + int ll = (k0 + l + 1) % square_contour.size(); + const auto& p = square_contour.at(ll); + cv::Point2f pp; + pp.x = float(p.x); + pp.y = float(p.y); + + // Undistort + if (cam) + cam->Undistort(pp); + + line_data.at(0, l) = pp; + } + + // Fit edge and put to vector of edges + cv::Vec4f params; + + // TODO: The detect_pose_grayscale is still under work... + /* + if (detect_pose_grayscale && + (pt0->x > 3) && (pt0->y > 3) && + (pt0->x < (gray->width-4)) && + (pt0->y < (gray->height-4))) + { + // ttehop: Grayscale experiment + FitLineGray(line_data, params, gray); + } + */ + cv::fitLine(line_data, params, cv::DIST_L2, 0, 0.01, 0.01); + + // cvFitLine(line_data, CV_DIST_L2, 0, 0.01, 0.01, params); + ////cvFitLine(line_data, CV_DIST_HUBER, 0, 0.01, 0.01, params); + Line line = Line(params); + if (visualize) + DrawLine(image, line); + fitted_lines[j] = line; + + line_data.release(); + } - // CvSeq* contours; - // CvSeq* squares = cvCreateSeq(0, sizeof(CvSeq), sizeof(CvSeq), storage); - // CvSeq* square_contours = cvCreateSeq(0, sizeof(CvSeq), sizeof(CvSeq), storage); - - - for( size_t i = 0; i < contours.size(); i++ ) + // Calculated four intersection points + for (size_t j = 0; j < 4; ++j) { - cv::approxPolyDP( contours[i], contours_poly[i], 3, true ); - boundRect[i] = cv::boundingRect( contours_poly[i]); - // minEnclosingCircle( contours_poly[i], centers[i], radius[i] ); + PointDouble intc = + Intersection(fitted_lines[j], fitted_lines[(j + 1) % 4]); + + // TODO: Instead, test OpenCV find corner in sub-pix... + // CvPoint2D32f pt = cvPoint2D32f(intc.x, intc.y); + // cvFindCornerSubPix(gray, &pt, + // 1, cvSize(3,3), cvSize(-1,-1), + // cvTermCriteria( + // CV_TERMCRIT_ITER+CV_TERMCRIT_EPS,10,1e-4)); + + // TODO: Now there is a wierd systematic 0.5 pixel error that is fixed + // here... + // intc.x += 0.5; + // intc.y += 0.5; + + if (cam) + cam->Distort(intc); + + // TODO: Should we make this always counter-clockwise or clockwise? + /* + if (image->origin && j == 1) blob_corners[i][3] = intc; + else if (image->origin && j == 3) blob_corners[i][1] = intc; + else blob_corners[i][j] = intc; + */ + blob_corners[i][j] = intc; } - cv::RNG rng(12345); - for( size_t i = 0; i< contours.size(); i++ ) + if (visualize) { - cv::Scalar color = cv::Scalar( rng.uniform(0, 256), rng.uniform(0,256), rng.uniform(0,256) ); - cv::drawContours(*image, contours_poly, (int)i, color ); - cv::rectangle(*image, boundRect[i].tl(), boundRect[i].br(), color, 2 ); - } - - - - - // while(contours) - // { - // if(contours->total < _min_edge) - // { - // contours = contours->h_next; - // continue; - // } - - - // approxPolyDP( contours[i], contours_poly[i], 3, true ); - // CvSeq* result = cv::approxPolyDP(cv::cvarrToMat(contours), sizeof(CvContour), storage, - // CV_POLY_APPROX_DP, cvContourPerimeter(contours)*0.035, 0 ); // TODO: Parameters? - - // if( result->total == 4 && CheckBorder(result, image->rows, image->cols) && - // fabs(cvContourArea(result,CV_WHOLE_SEQ)) > _min_area && // TODO check limits - // cvCheckContourConvexity(result) ) // ttehop: Changed to 'contours' instead of 'result' - // { - // cvSeqPush(squares, result); - // cvSeqPush(square_contours, contours); - // } - // contours = contours->h_next; - // } - - // _n_blobs = squares->total; - // blob_corners.resize(_n_blobs); - - // // For every detected 4-corner blob - // for(int i = 0; i < _n_blobs; ++i) - // { - // vector fitted_lines(4); - // blob_corners[i].resize(4); - // CvSeq* sq = (CvSeq*)cv::getSeqElem(squares, i); - // CvSeq* square_contour = (CvSeq*)cv::getSeqElem(square_contours, i); - - // for(int j = 0; j < 4; ++j) - // { - // CvPoint* pt0 = (CvPoint*)cv::getSeqElem(sq, j); - // CvPoint* pt1 = (CvPoint*)cv::getSeqElem(sq, (j+1)%4); - // int k0=-1, k1=-1; - // for (int k = 0; ktotal; k++) { - // CvPoint* pt2 = (CvPoint*)cv::getSeqElem(square_contour, k); - // if ((pt0->x == pt2->x) && (pt0->y == pt2->y)) k0=k; - // if ((pt1->x == pt2->x) && (pt1->y == pt2->y)) k1=k; - // } - // int len; - // if (k1 >= k0) len = k1-k0-1; // neither k0 nor k1 are included - // else len = square_contour->total-k0+k1-1; - // if (len == 0) len = 1; - - // CvMat* line_data = cvCreateMat(1, len, CV_32FC2); - // for (int l=0; ltotal; - // CvPoint* p = (CvPoint*)cv::getSeqElem(square_contour, ll); - // CvPoint2D32f pp; - // pp.x = float(p->x); - // pp.y = float(p->y); - - // // Undistort - // if(cam) - // cam->Undistort(pp); - - // CV_MAT_ELEM(*line_data, CvPoint2D32f, 0, l) = pp; - // } - - // // Fit edge and put to vector of edges - // cv::Vec4f params = {0}; - - // // TODO: The detect_pose_grayscale is still under work... - // /* - // if (detect_pose_grayscale && - // (pt0->x > 3) && (pt0->y > 3) && - // (pt0->x < (gray->width-4)) && - // (pt0->y < (gray->height-4))) - // { - // // ttehop: Grayscale experiment - // FitLineGray(line_data, params, gray); - // } - // */ - // cvFitLine(line_data, CV_DIST_L2, 0, 0.01, 0.01, params); - - // //cvFitLine(line_data, CV_DIST_L2, 0, 0.01, 0.01, params); - // ////cvFitLine(line_data, CV_DIST_HUBER, 0, 0.01, 0.01, params); - // Line line = Line(params); - // if(visualize) DrawLine(image, line); - // fitted_lines[j] = line; - - // cvReleaseMat(&line_data); - // } - - // // Calculated four intersection points - // for(size_t j = 0; j < 4; ++j) - // { - // PointDouble intc = Intersection(fitted_lines[j],fitted_lines[(j+1)%4]); - - // // TODO: Instead, test OpenCV find corner in sub-pix... - // //CvPoint2D32f pt = cvPoint2D32f(intc.x, intc.y); - // //cvFindCornerSubPix(gray, &pt, - // // 1, cvSize(3,3), cvSize(-1,-1), - // // cvTermCriteria( - // // CV_TERMCRIT_ITER+CV_TERMCRIT_EPS,10,1e-4)); - - // // TODO: Now there is a wierd systematic 0.5 pixel error that is fixed here... - // //intc.x += 0.5; - // //intc.y += 0.5; - - // if(cam) cam->Distort(intc); - - // // TODO: Should we make this always counter-clockwise or clockwise? - // /* - // if (image->origin && j == 1) blob_corners[i][3] = intc; - // else if (image->origin && j == 3) blob_corners[i][1] = intc; - // else blob_corners[i][j] = intc; - // */ - // blob_corners[i][j] = intc; - // } - // if (visualize) { - // for(size_t j = 0; j < 4; ++j) { - // PointDouble &intc = blob_corners[i][j]; - // if (j == 0) cv::circle(*image, cvPoint(int(intc.x), int(intc.y)), 5, cv::Scalar(255, 255, 255)); - // if (j == 1) cv::circle(*image, cvPoint(int(intc.x), int(intc.y)), 5, cv::Scalar(255, 0, 0)); - // if (j == 2) cv::circle(*image, cvPoint(int(intc.x), int(intc.y)), 5, cv::Scalar(0, 255, 0)); - // if (j == 3) cv::circle(*image, cvPoint(int(intc.x), int(intc.y)), 5, cv::Scalar(0, 0, 255)); - // } - // } - //} - - // cvClearMemStorage(storage); + for (size_t j = 0; j < 4; ++j) + { + PointDouble& intc = blob_corners[i][j]; + if (j == 0) + cv::circle(image, cv::Point(int(intc.x), int(intc.y)), 5, + CV_RGB(255, 255, 255)); + if (j == 1) + cv::circle(image, cv::Point(int(intc.x), int(intc.y)), 5, + CV_RGB(255, 0, 0)); + if (j == 2) + cv::circle(image, cv::Point(int(intc.x), int(intc.y)), 5, + CV_RGB(0, 255, 0)); + if (j == 3) + cv::circle(image, cv::Point(int(intc.x), int(intc.y)), 5, + CV_RGB(0, 0, 255)); + } + } + } } -vector LabelingCvSeq::LabelImage(cv::Mat* image, int min_size, bool approx) +std::vector> LabelingCvSeq::LabelImage(cv::Mat& image, + int min_size, + bool approx) { - // assert(image->origin == 0); // Currently only top-left origin supported - if (gray && ((gray->rows != image->rows) || (gray->cols != image->cols))) { - // cvReleaseImage(&gray); - gray=NULL; - // if (bw) cvReleaseImage(&bw); - bw=NULL; - } - if (gray == NULL) { - - cv::Mat temp = cv::Mat(cv::Size(image->rows, image->cols),CV_8UC1); - - gray = &temp; - - cv::Mat temp2 = cv::Mat(cv::Size(image->rows, image->cols),CV_8UC1); - //gray->origin = image->origin; - bw = &temp2; - } - - // Convert grayscale and threshold - if(image->channels() == 4) - { - cv::cvtColor(*image, *gray, cv::COLOR_RGB2GRAY); - } - else if(image->channels() == 3) - { - cv::cvtColor(*image, *gray, cv::COLOR_RGB2GRAY); - } - else if(image->channels() == 1) - { - // cv::Copy(image, gray); - cv::Mat tmp = image->clone(); - gray = &tmp; - } - else - { - cerr<<"Unsupported image format"< > contours; - //vector hierarchy; - cv::findContours(*bw, contours, cv::RETR_TREE, cv::CHAIN_APPROX_SIMPLE ); - vector > contours_poly( contours.size() ); - vector boundRect(contours.size()); - - for( size_t i = 0; i < contours.size(); i++ ) + if (!gray.empty() && ((gray.cols != image.cols) || (gray.rows != image.rows))) + { + gray.release(); + bw.release(); + } + if (gray.empty()) + { + gray = cv::Mat(image.rows, image.cols, CV_8UC1); + bw = cv::Mat(image.rows, image.cols, CV_8UC1); + } + + // Convert grayscale and threshold + if (image.channels() == 4) + cv::cvtColor(image, gray, cv::COLOR_RGBA2GRAY); + else if (image.channels() == 3) + cv::cvtColor(image, gray, cv::COLOR_RGB2GRAY); + else if (image.channels() == 1) + image.copyTo(gray); + else + { + cerr << "Unsupported image format" << endl; + } + + cv::adaptiveThreshold(gray, bw, 255, cv::ADAPTIVE_THRESH_MEAN_C, + cv::THRESH_BINARY_INV, thresh_param1, thresh_param2); + + std::vector> contours; + std::vector> squares; + + cv::findContours(bw, contours, cv::RETR_LIST, cv::CHAIN_APPROX_NONE, + cv::Point(0, 0)); + + for (const auto& contour : contours) + { + if (approx) { - cv::approxPolyDP( contours[i], contours_poly[i], 3, true ); - boundRect[i] = cv::boundingRect( contours_poly[i]); - // minEnclosingCircle( contours_poly[i], centers[i], radius[i] ); + std::vector result; + cv::approxPolyDP(contour, result, cv::arcLength(contour, false) * 0.02, + false); // TODO: Parameters? + if (cv::isContourConvex(result)) + { + squares.push_back(result); + } } + else + squares.push_back(contour); + } - // CvSeq* contours; - // CvSeq* edges = cvCreateSeq(0, sizeof(CvSeq), sizeof(CvSeq), storage); - // CvSeq* squares = cvCreateSeq(0, sizeof(CvSeq), sizeof(CvSeq), storage); - - // cv::findContours(bw, storage, &contours, sizeof(CvContour), - // CV_RETR_LIST, CV_CHAIN_APPROX_NONE, cvPoint(0,0)); - // //cvFindContours(bw, storage, &contours, sizeof(CvContour), - // // CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE, cvPoint(0,0)); - - - // while(contours) - // { - // if(contours->total < min_size) - // { - // contours = contours->h_next; - // continue; - // } - - // if(approx) - // { - // CvSeq* result = cv::approxPolyDP(contours, sizeof(CvContour), storage, - // CV_POLY_APPROX_DP, cvContourPerimeter(contours)*0.02, 0 ); // TODO: Parameters? - - // if(cvCheckContourConvexity(result)) - // { - // cvSeqPush(squares, result); - // } - // } - // else - // cvSeqPush(squares, contours); - - // contours = contours->h_next; - // } - - // cvClearMemStorage(storage); - - return boundRect; + return squares; } -inline int round(double x) { - return (x)>=0?(int)((x)+0.5):(int)((x)-0.5); +inline int round(double x) +{ + return (x) >= 0 ? (int)((x) + 0.5) : (int)((x)-0.5); } -template -inline T absdiff(T c1, T c2) { - return (c2>c1?c2-c1:c1-c2); +template +inline T absdiff(T c1, T c2) +{ + return (c2 > c1 ? c2 - c1 : c1 - c2); } //#define SHOW_DEBUG @@ -383,94 +320,127 @@ inline T absdiff(T c1, T c2) { #endif // TODO: This should be in LabelingCvSeq ??? -void FitLineGray(CvMat *line_data, float params[4], cv::Mat *gray) { - // this very simple approach works... - /* - float *cx = &(params[2]); - float *cy = &(params[3]); - float *sx = &(params[0]); - float *sy = &(params[1]); - CvPoint2D32f *p1 = (CvPoint2D32f*)CV_MAT_ELEM_PTR_FAST(*line_data, 0, 0, sizeof(CvPoint2D32f)); - CvPoint2D32f *p2 = (CvPoint2D32f*)CV_MAT_ELEM_PTR_FAST(*line_data, 0, line_data->cols-1, sizeof(CvPoint2D32f)); - *cx = p1->x; *cy = p1->y; - *sx = p2->x - p1->x; *sy = p2->y - p1->y; - return; - */ +void FitLineGray(cv::Mat& line_data, float params[4], cv::Mat& gray) +{ + // this very simple approach works... + /* + float *cx = &(params[2]); + float *cy = &(params[3]); + float *sx = &(params[0]); + float *sy = &(params[1]); + CvPoint2D32f *p1 = (CvPoint2D32f*)CV_MAT_ELEM_PTR_FAST(*line_data, 0, 0, + sizeof(CvPoint2D32f)); CvPoint2D32f *p2 = + (CvPoint2D32f*)CV_MAT_ELEM_PTR_FAST(*line_data, 0, line_data->cols-1, + sizeof(CvPoint2D32f)); *cx = p1->x; *cy = p1->y; *sx = p2->x - p1->x; *sy = + p2->y - p1->y; return; + */ #ifdef SHOW_DEBUG - cv::Mat *tmp = cvCreateImage(cvSize(gray->rows, gray->cols), IPL_DEPTH_8U, 3); - cv::Mat *tmp2 = cvCreateImage(cvSize(gray->rows*5, gray->cols*5), IPL_DEPTH_8U, 3); - cvCvtColor(gray, tmp, cv::COLOR_RGB2GRAY); - cvResize(tmp, tmp2, CV_INTER_NN); + IplImage* tmp = + cvCreateImage(cvSize(gray->width, gray->height), IPL_DEPTH_8U, 3); + IplImage* tmp2 = + cvCreateImage(cvSize(gray->width * 5, gray->height * 5), IPL_DEPTH_8U, 3); + cvCvtColor(gray, tmp, CV_GRAY2RGB); + cvResize(tmp, tmp2, CV_INTER_NN); #endif - // Discover 1st the line normal direction - CvPoint2D32f *p1 = (CvPoint2D32f*)CV_MAT_ELEM_PTR_FAST(*line_data, 0, 0, sizeof(CvPoint2D32f)); - CvPoint2D32f *p2 = (CvPoint2D32f*)CV_MAT_ELEM_PTR_FAST(*line_data, 0, line_data->cols-1, sizeof(CvPoint2D32f)); - double dx = +(p2->y - p1->y); - double dy = -(p2->x - p1->x); - if ((dx == 0) && (dy == 0)) return; - else if (dx == 0) { dy /= dy; } - else if (dy == 0) { dx /= dx; } - else if (abs(dx) > abs(dy)) { dy /= dx; dx /= dx; } - else { dx /= dy; dy /= dy; } - - // Build normal search table - const int win_size=5; - const int win_mid=win_size/2; - const int diff_win_size=win_size-1; - double xx[win_size], yy[win_size]; - double dxx[diff_win_size], dyy[diff_win_size]; - xx[win_mid] = 0; yy[win_mid] = 0; - for (int i=1; i<=win_size/2; i++) { - xx[win_mid + i] = round(i*dx); - xx[win_mid - i] = -xx[win_mid + i]; - yy[win_mid + i] = round(i*dy); - yy[win_mid - i] = -yy[win_mid + i]; - } - for (int i=0; icols; l++) { - // CvPoint2D32f *p = (CvPoint2D32f*)CV_MAT_ELEM_PTR_FAST(*line_data, 0, l, sizeof(CvPoint2D32f)); - - // double dx=0, dy=0, ww=0; - // for (int i=0; iimageData[int((p->y+yy[i])*temp2->widthStep+(p->x+xx[i]))]; - // unsigned char c2 = (unsigned char)temp2->imageData[int((p->y+yy[i+1])*temp2->widthStep+(p->x+xx[i+1]))]; + // Discover 1st the line normal direction + auto p1 = line_data.ptr(0, 0); + auto p2 = line_data.ptr(0, line_data.cols - 1); + double dx = +(p2->y - p1->y); + double dy = -(p2->x - p1->x); + if ((dx == 0) && (dy == 0)) + return; + else if (dx == 0) + { + dy /= dy; + } + else if (dy == 0) + { + dx /= dx; + } + else if (abs(dx) > abs(dy)) + { + dy /= dx; + dx /= dx; + } + else + { + dx /= dy; + dy /= dy; + } + + // Build normal search table + const int win_size = 5; + const int win_mid = win_size / 2; + const int diff_win_size = win_size - 1; + double xx[win_size], yy[win_size]; + double dxx[diff_win_size], dyy[diff_win_size]; + xx[win_mid] = 0; + yy[win_mid] = 0; + for (int i = 1; i <= win_size / 2; i++) + { + xx[win_mid + i] = round(i * dx); + xx[win_mid - i] = -xx[win_mid + i]; + yy[win_mid + i] = round(i * dy); + yy[win_mid - i] = -yy[win_mid + i]; + } + for (int i = 0; i < diff_win_size; i++) + { + dxx[i] = (xx[i] + xx[i + 1]) / 2; + dyy[i] = (yy[i] + yy[i + 1]) / 2; + } + + // Adjust the points + for (int l = 0; l < line_data.cols; l++) + { + auto p = line_data.ptr(0, l); + + double dx = 0, dy = 0, ww = 0; + for (int i = 0; i < diff_win_size; i++) + { + unsigned char c1 = (unsigned char)gray.at( + int((p->y + yy[i]) * gray.cols + (p->x + xx[i]))); + unsigned char c2 = (unsigned char)gray.at( + int((p->y + yy[i + 1]) * gray.cols + (p->x + xx[i + 1]))); #ifdef SHOW_DEBUG - cv::circle(tmp2, cvPoint((p->x+xx[i])*5+2,(p->y+yy[i])*5+2), 0, CV_RGB(0,0,255)); - cv::circle(tmp2, cvPoint((p->x+xx[i+1])*5+2,(p->y+yy[i+1])*5+2), 0, CV_RGB(0,0,255)); + cvCircle(tmp2, cv::Point((p->x + xx[i]) * 5 + 2, (p->y + yy[i]) * 5 + 2), + 0, CV_RGB(0, 0, 255)); + cvCircle( + tmp2, + cv::Point((p->x + xx[i + 1]) * 5 + 2, (p->y + yy[i + 1]) * 5 + 2), 0, + CV_RGB(0, 0, 255)); #endif - // double w = absdiff(c1, c2); - // dx += dxx[i]*w; - // dy += dyy[i]*w; - // ww += w; - // } - // if (ww > 0) { - // dx /= ww; dy /= ww; - // } + double w = absdiff(c1, c2); + dx += dxx[i] * w; + dy += dyy[i] * w; + ww += w; + } + if (ww > 0) + { + dx /= ww; + dy /= ww; + } #ifdef SHOW_DEBUG - cvLine(tmp2, cvPoint(p->x*5+2,p->y*5+2), cvPoint((p->x+dx)*5+2, (p->y+dy)*5+2), CV_RGB(0,255,0)); - p->x += float(dx); p->y += float(dy); - cv::circle(tmp2, cvPoint(p->x*5+2,p->y*5+2), 0, CV_RGB(255,0,0)); + cvLine(tmp2, cv::Point(p->x * 5 + 2, p->y * 5 + 2), + cv::Point((p->x + dx) * 5 + 2, (p->y + dy) * 5 + 2), + CV_RGB(0, 255, 0)); + p->x += float(dx); + p->y += float(dy); + cvCircle(tmp2, cv::Point(p->x * 5 + 2, p->y * 5 + 2), 0, CV_RGB(255, 0, 0)); #else - // p->x += float(dx); p->y += float(dy); + p->x += float(dx); + p->y += float(dy); #endif - + } #ifdef SHOW_DEBUG - cvNamedWindow("tmp"); - cvShowImage("tmp",tmp2); - cvWaitKey(0); - cvReleaseImage(&tmp); - cvReleaseImage(&tmp2); + cvNamedWindow("tmp"); + cvShowImage("tmp", tmp2); + cvWaitKey(0); + cvReleaseImage(&tmp); + cvReleaseImage(&tmp2); #endif } -} // namespace alvar +} // namespace alvar \ No newline at end of file diff --git a/ar_track_alvar/src/Draw.cpp b/ar_track_alvar/src/Draw.cpp index 50b61fa..515c770 100644 --- a/ar_track_alvar/src/Draw.cpp +++ b/ar_track_alvar/src/Draw.cpp @@ -22,272 +22,281 @@ */ #include "ar_track_alvar/Draw.h" -#include using namespace std; -namespace alvar { -using namespace std; - -void DrawPoints(cv::Mat *image, const vector& points, cv::Scalar color) +namespace alvar { - for(unsigned i = 0; i < points.size(); ++i) - cv::line(*image, cv::Point(points[i].x,points[i].y), cv::Point(points[i].x,points[i].y), color); -} +using namespace std; -void DrawLine(cv::Mat* image, const Line line, cv::Scalar color) +void DrawLine(cv::Mat& image, const Line line, const cv::Scalar& color) { - double len = 100; - cv::Point p1, p2; - p1.x = int(line.c.x); p1.y = int(line.c.y); - p2.x = int(line.c.x+line.s.x*len); p2.y = int(line.c.y+line.s.y*len); - cv::line(*image, p1, p2, color); + double len = 100; + cv::Point p1, p2; + p1.x = int(line.c.x); + p1.y = int(line.c.y); + p2.x = int(line.c.x + line.s.x * len); + p2.y = int(line.c.y + line.s.y * len); + cv::line(image, p1, p2, color); - p1.x = int(line.c.x); p1.y = int(line.c.y); - p2.x = int(line.c.x-line.s.x*len); p2.y = int(line.c.y-line.s.y*len); - cv::line(*image, p1, p2, color); + p1.x = int(line.c.x); + p1.y = int(line.c.y); + p2.x = int(line.c.x - line.s.x * len); + p2.y = int(line.c.y - line.s.y * len); + cv::line(image, p1, p2, color); } -void DrawPoints(cv::Mat* image, const CvSeq* contour, cv::Scalar color) +void DrawPoints(cv::Mat& image, const std::vector& contour, + const cv::Scalar& color) { - for(int i = 0; i < contour->total; ++i) - { - CvPoint* pt = (CvPoint*)cv::getSeqElem( contour, i); - cv::line(*image, cvPoint(pt->x, pt->y), cv::Point(pt->x, pt->y), color); - } + for (const auto& pt : contour) + { + cv::line(image, cv::Point(pt.x, pt.y), cv::Point(pt.x, pt.y), color); + } } -void DrawCircles(cv::Mat* image, const CvSeq* contour, int radius, cv::Scalar color) +void DrawCircles(cv::Mat& image, const std::vector& contour, + int radius, cv::Scalar color) { - for(int i = 0; i < contour->total; ++i) - { - CvPoint* pt = (CvPoint*)cv::getSeqElem( contour, i); - cv::circle(*image, cvPoint(pt->x, pt->y), radius, color); - } + for (const auto& pt : contour) + { + cv::circle(image, cv::Point(pt.x, pt.y), radius, color); + } } -void DrawLines(cv::Mat* image, const CvSeq* contour, cv::Scalar color) +void DrawLines(cv::Mat& image, const std::vector& contour, + cv::Scalar color) { - if(contour->total >= 2) - { - for(int i = 0; i < contour->total; ++i) - { - cv::Point* pt1 = (cv::Point*)cv::getSeqElem( contour, i); - cv::Point* pt2 = (cv::Point*)cv::getSeqElem( contour, (i+1)%(contour->total)); - cv::line(*image, cv::Point(pt1->x, pt1->y), cv::Point(pt2->x, pt2->y), color); - } - } + if (contour.size() >= 2) + { + for (int i = 0; i < contour.size(); ++i) + { + const auto& pt1 = contour[i]; + const auto& pt2 = contour[(i + 1) % (contour.size())]; + cv::line(image, cv::Point(pt1.x, pt1.y), cv::Point(pt2.x, pt2.y), color); + } + } } -void DrawCVEllipse(cv::Mat* image, CvBox2D& ellipse, cv::Scalar color, bool fill/*=false*/, double par) +void DrawCVEllipse(cv::Mat& image, const cv::RotatedRect& ellipse, + cv::Scalar color, bool fill /*=false*/, double par) { - cv::Point center; - center.x = static_cast(ellipse.center.x); - center.y = static_cast(ellipse.center.y); - int type = 1; - if(fill) - type = cv::FILLED; + cv::Point center; + center.x = static_cast(ellipse.center.x); + center.y = static_cast(ellipse.center.y); + int type = 1; + if (fill) + type = cv::FILLED; - //cout<(par+ellipse.size.width/2), static_cast(par+ellipse.size.height/2)), -ellipse.angle, 0, 360, color, type); + // cout<(par + ellipse.size.width / 2), + static_cast(par + ellipse.size.height / 2)), + -ellipse.angle, 0, 360, color, type); } -void BuildHideTexture(CvMat *image, CvMat *hide_texture, - Camera *cam, double gl_modelview[16], - PointDouble topleft, PointDouble botright) +void BuildHideTexture(cv::Mat& image, cv::Mat& hide_texture, Camera* cam, + double gl_modelview[16], PointDouble topleft, + PointDouble botright) { + double kx = 1.0; + double ky = 1.0; + + double width = abs(botright.x - topleft.x); + double height = abs(botright.y - topleft.y); - - //assert(image->origin == 0); // Currently only top-left origin supported - double kx=1.0; - double ky=1.0; - - double width = abs(botright.x - topleft.x); - double height = abs(botright.y - topleft.y); - - //GLint vp[4]; //viewport - //GLdouble winx[8]; // point's coordinates in windowcoordinates - //GLdouble winy[8]; - //GLdouble winz[8]; - double objx; - double objy; - //GLdouble objz; - unsigned char pixels[8][3]; - unsigned char color[3]={0,0,0}; + // GLint vp[4]; //viewport + // GLdouble winx[8]; // point's coordinates in windowcoordinates + // GLdouble winy[8]; + // GLdouble winz[8]; + double objx; + double objy; + // GLdouble objz; + unsigned char pixels[8][3]; + unsigned char color[3] = { 0, 0, 0 }; - int i=0,j=0,t=0; - double ox,oy,ya,yb,xc,xd,offset; - double sizex = width/4, size2x=width/2; - double sizey = height/4, size2y=height/2; + int i = 0, j = 0, t = 0; + double ox, oy, ya, yb, xc, xd, offset; + double sizex = width / 4, size2x = width / 2; + double sizey = height / 4, size2y = height / 2; - // Calculate extended coordinates of detected marker (+ border) - objx = width/2*kx; - objy = height/2*ky; + // Calculate extended coordinates of detected marker (+ border) + objx = width / 2 * kx; + objy = height / 2 * ky; - //cout<width<<","<height<width,ystep=2*objy/hide_texture->height; - for(i=0;iwidth;i++){ - ox = -objx+i*xstep; - offset = fmod((objx-ox), size2x); - if( offset < sizex) - xc = objx + offset; - else - xc = objx+size2x-offset; - offset = fmod((objx+ox), size2x); - if( offset < sizex) - xd = -objx - offset; - else - xd = -objx-size2x+offset; - r=(ox+objx); - for(j=0;jheight;j++){ - oy = -objy+j*ystep; - offset = fmod((objy-oy), size2y); - if( offset < sizey) - ya = objy + offset; - else - ya = objy+size2y-offset; - offset = fmod((oy+objy), size2y); - if( offset < sizey) - yb = -objy - offset; - else - yb = -objy-size2y+offset; - s=(oy+objy); + // cout<width<<","<height<ProjectPoints(&points3d_mat, gl_modelview, &points2d_mat); - int kuvanx4 = (int)Limit(points2d[0][0], 0, image->width-1); int kuvany4 = (int)Limit(points2d[0][1], 0, image->height-1); - int kuvanx5 = (int)Limit(points2d[1][0], 0, image->width-1); int kuvany5 = (int)Limit(points2d[1][1], 0, image->height-1); - int kuvanx6 = (int)Limit(points2d[2][0], 0, image->width-1); int kuvany6 = (int)Limit(points2d[2][1], 0, image->height-1); - int kuvanx7 = (int)Limit(points2d[3][0], 0, image->width-1); int kuvany7 = (int)Limit(points2d[3][1], 0, image->height-1); + double l2r = 2 * width * kx; + double l2s = 2 * height * ky; + double lr = width * kx; + double ls = height * ky; + double r, s; + double xstep = 2 * objx / hide_texture.cols, + ystep = 2 * objy / hide_texture.rows; + for (i = 0; i < hide_texture.cols; i++) + { + ox = -objx + i * xstep; + offset = fmod((objx - ox), size2x); + if (offset < sizex) + xc = objx + offset; + else + xc = objx + size2x - offset; + offset = fmod((objx + ox), size2x); + if (offset < sizex) + xd = -objx - offset; + else + xd = -objx - size2x + offset; + r = (ox + objx); + for (j = 0; j < hide_texture.rows; j++) + { + oy = -objy + j * ystep; + offset = fmod((objy - oy), size2y); + if (offset < sizey) + ya = objy + offset; + else + ya = objy + size2y - offset; + offset = fmod((oy + objy), size2y); + if (offset < sizey) + yb = -objy - offset; + else + yb = -objy - size2y + offset; + s = (oy + objy); - pixels[4][0] = (unsigned char)cvGet2D(image, kuvany4, kuvanx4).val[0]; - pixels[4][1] = (unsigned char)cvGet2D(image, kuvany4, kuvanx4).val[1]; - pixels[4][2] = (unsigned char)cvGet2D(image, kuvany4, kuvanx4).val[2]; - pixels[5][0] = (unsigned char)cvGet2D(image, kuvany5, kuvanx5).val[0]; - pixels[5][1] = (unsigned char)cvGet2D(image, kuvany5, kuvanx5).val[1]; - pixels[5][2] = (unsigned char)cvGet2D(image, kuvany5, kuvanx5).val[2]; - pixels[6][0] = (unsigned char)cvGet2D(image, kuvany6, kuvanx6).val[0]; - pixels[6][1] = (unsigned char)cvGet2D(image, kuvany6, kuvanx6).val[1]; - pixels[6][2] = (unsigned char)cvGet2D(image, kuvany6, kuvanx6).val[2]; - pixels[7][0] = (unsigned char)cvGet2D(image, kuvany7, kuvanx7).val[0]; - pixels[7][1] = (unsigned char)cvGet2D(image, kuvany7, kuvanx7).val[1]; - pixels[7][2] = (unsigned char)cvGet2D(image, kuvany7, kuvanx7).val[2]; + double points3d[4][3] = { + ox, ya, 0, ox, yb, 0, xc, oy, 0, xd, oy, 0, + }; + double points2d[4][2]; + cv::Mat points3d_mat, points2d_mat; + points3d_mat = cv::Mat(4, 3, CV_64F, points3d); + points2d_mat = cv::Mat(4, 2, CV_64F, points2d); - // make the borders of the texture partly transparent - int opaque; - const int w=1; - if((ihide_texture->width-w)|(j>hide_texture->width-w)) - opaque=60; - else if ((i<2*w)|(j<2*w)|(i>hide_texture->width-2*w)|(j>hide_texture->width-2*w)) - opaque=100; - else if ((i<3*w)|(j<3*w)|(i>hide_texture->width-3*w)|(j>hide_texture->width-3*w)) - opaque=140; - else if ((i<4*w)|(j<4*w)|(i>hide_texture->width-4*w)|(j>hide_texture->width-4*w)) - opaque=200; - else - opaque=255; - - cvSet2D(hide_texture, j, i, cvScalar( - (((lr-r)*pixels[7][0] + r*pixels[6][0]+ s* pixels[4][0] + (ls-s)* pixels[5][0])/l2r), - (((lr-r)*pixels[7][1] + r*pixels[6][1]+ s* pixels[4][1] + (ls-s)* pixels[5][1])/l2r), - (((lr-r)*pixels[7][2] + r*pixels[6][2]+ s* pixels[4][2] + (ls-s)* pixels[5][2])/l2r), - opaque - )); - } - } + + CvMat temp = cvMat(points3d_mat); + CvMat temp2 = cvMat(points2d_mat); + + cam->ProjectPoints(&temp, gl_modelview, &temp2); + int kuvanx4 = (int)Limit(points2d[0][0], 0, image.cols - 1); + int kuvany4 = (int)Limit(points2d[0][1], 0, image.rows - 1); + int kuvanx5 = (int)Limit(points2d[1][0], 0, image.cols - 1); + int kuvany5 = (int)Limit(points2d[1][1], 0, image.rows - 1); + int kuvanx6 = (int)Limit(points2d[2][0], 0, image.cols - 1); + int kuvany6 = (int)Limit(points2d[2][1], 0, image.rows - 1); + int kuvanx7 = (int)Limit(points2d[3][0], 0, image.cols - 1); + int kuvany7 = (int)Limit(points2d[3][1], 0, image.rows - 1); + + pixels[4][0] = (unsigned char)image.at(kuvany4, kuvanx4)[0]; + pixels[4][1] = (unsigned char)image.at(kuvany4, kuvanx4)[1]; + pixels[4][2] = (unsigned char)image.at(kuvany4, kuvanx4)[2]; + pixels[5][0] = (unsigned char)image.at(kuvany5, kuvanx5)[0]; + pixels[5][1] = (unsigned char)image.at(kuvany5, kuvanx5)[1]; + pixels[5][2] = (unsigned char)image.at(kuvany5, kuvanx5)[2]; + pixels[6][0] = (unsigned char)image.at(kuvany6, kuvanx6)[0]; + pixels[6][1] = (unsigned char)image.at(kuvany6, kuvanx6)[1]; + pixels[6][2] = (unsigned char)image.at(kuvany6, kuvanx6)[2]; + pixels[7][0] = (unsigned char)image.at(kuvany7, kuvanx7)[0]; + pixels[7][1] = (unsigned char)image.at(kuvany7, kuvanx7)[1]; + pixels[7][2] = (unsigned char)image.at(kuvany7, kuvanx7)[2]; + + // make the borders of the texture partly transparent + int opaque; + const int w = 1; + if ((i < w) | (j < w) | (i > hide_texture.cols - w) | + (j > hide_texture.cols - w)) + opaque = 60; + else if ((i < 2 * w) | (j < 2 * w) | (i > hide_texture.cols - 2 * w) | + (j > hide_texture.cols - 2 * w)) + opaque = 100; + else if ((i < 3 * w) | (j < 3 * w) | (i > hide_texture.cols - 3 * w) | + (j > hide_texture.cols - 3 * w)) + opaque = 140; + else if ((i < 4 * w) | (j < 4 * w) | (i > hide_texture.cols - 4 * w) | + (j > hide_texture.cols - 4 * w)) + opaque = 200; + else + opaque = 255; + hide_texture.at(j, i) = + cv::Vec4b((((lr - r) * pixels[7][0] + r * pixels[6][0] + + s * pixels[4][0] + (ls - s) * pixels[5][0]) / + l2r), + (((lr - r) * pixels[7][1] + r * pixels[6][1] + + s * pixels[4][1] + (ls - s) * pixels[5][1]) / + l2r), + (((lr - r) * pixels[7][2] + r * pixels[6][2] + + s * pixels[4][2] + (ls - s) * pixels[5][2]) / + l2r), + opaque); + } + } } -void DrawTexture(cv::Mat *image, cv::Mat *texture, - Camera *cam, double gl_modelview[16], - PointDouble topleft, PointDouble botright) +void DrawTexture(cv::Mat& image, cv::Mat& texture, Camera* cam, + double gl_modelview[16], PointDouble topleft, + PointDouble botright) { - //assert(image->origin == 0); // Currently only top-left origin supported - double width = abs(botright.x - topleft.x); - double height = abs(botright.y - topleft.y); - double objx = width/2; - double objy = height/2; - - + double width = abs(botright.x - topleft.x); + double height = abs(botright.y - topleft.y); + double objx = width / 2; + double objy = height / 2; - // Project corners - double points3d[4][3] = { - -objx, -objy, 0, - -objx, objy, 0, - objx, objy, 0, - objx, -objy, 0, - }; - double points2d[4][2]; - CvMat points3d_mat, points2d_mat; - cvInitMatHeader(&points3d_mat, 4, 3, CV_64F, points3d); - cvInitMatHeader(&points2d_mat, 4, 2, CV_64F, points2d); - cam->ProjectPoints(&points3d_mat, gl_modelview, &points2d_mat); - - // Warp texture and mask using the perspective that is based on the corners - double map[9]; - cv::Mat map_mat = cv::Mat(3, 3, CV_64F, map); - cv::Point2f src[4] = { - { 0, 0 }, - { 0, float(texture->cols-1) }, - { float(texture->rows-1), float(texture->cols-1) }, - { float(texture->rows-1), 0 }, - }; - cv::Point2f dst[4] = { - { float(points2d[0][0]), float(points2d[0][1]) }, - { float(points2d[1][0]), float(points2d[1][1]) }, - { float(points2d[2][0]), float(points2d[2][1]) }, - { float(points2d[3][0]), float(points2d[3][1]) }, - }; + // Project corners + double points3d[4][3] = { + -objx, -objy, 0, -objx, objy, 0, objx, objy, 0, objx, -objy, 0, + }; + double points2d[4][2]; + cv::Mat points3d_mat, points2d_mat; + points3d_mat = cv::Mat(4, 3, CV_64F, points3d); + points2d_mat = cv::Mat(4, 2, CV_64F, points2d); - map_mat = cv::getPerspectiveTransform(src, dst); - cv::Mat img = image->clone(); - cv::Mat img2 = image->clone(); - cv::Mat mask = cv::Mat::zeros(image->rows, image->cols, CV_8U); - cv::Mat mask2 = cv::Mat::zeros(image->rows, image->cols, CV_8U); - // cvZero(img); - // cvZero(img2); - // cvZero(mask); - // cvZero(mask2); + CvMat temp = cvMat(points3d_mat); + CvMat temp2 = cvMat(points2d_mat); - CvMat texture2 = cvMat(*texture); - CvMat mask3 = cvMat(mask); - CvMat img3 = cvMat(img); - for (int j=0; jcols; j++) { //ttesis: why must we copy the texture first? - for (int i=0; irows; i++) { - CvScalar s = cvGet2D(&texture2, j, i); - cvSet2D(&img3, j, i, s); - // img.at(j,i) = s; - // //image.at - if ((i>0) && (j>0) && (i<(texture->rows-1)) && (j<(texture->cols-1))) - cvSet2D(&mask3, j, i, cvScalar(1)); //ttesis: why are edges not included? - } - } - cv::warpPerspective(cv::cvarrToMat(&img3), img2, map_mat,cv::cvarrToMat(&img3).size()); - cv::warpPerspective(cv::cvarrToMat(&mask3), mask2, map_mat,cv::cvarrToMat(&mask3).size());//, cv::InterpolationFlags::INTER_LINEAR , cv::BORDER_CONSTANT, cv::Scalar(0)); - + cam->ProjectPoints(&temp, gl_modelview, &temp2); + + //cam->ProjectPoints(points3d_mat, gl_modelview, points2d_mat); - img2.copyTo(*image,mask2); + // Warp texture and mask using the perspective that is based on the corners + double map[9]; + cv::Mat map_mat = cv::Mat(3, 3, CV_64F, map); + cv::Point2f src[4] = { + { 0, 0 }, + { 0, float(texture.rows - 1) }, + { float(texture.cols - 1), float(texture.rows - 1) }, + { float(texture.cols - 1), 0 }, + }; + cv::Point2f dst[4] = { + { float(points2d[0][0]), float(points2d[0][1]) }, + { float(points2d[1][0]), float(points2d[1][1]) }, + { float(points2d[2][0]), float(points2d[2][1]) }, + { float(points2d[3][0]), float(points2d[3][1]) }, + }; + map_mat = cv::getPerspectiveTransform(src, dst); + cv::Mat img = image.clone(); + cv::Mat img2 = image.clone(); + cv::Mat mask = cv::Mat::zeros(cv::Size(image.cols, image.rows), CV_8UC1); + cv::Mat mask2 = cv::Mat::zeros(cv::Size(image.cols, image.rows), CV_8UC1); + img.setTo(cv::Scalar::all(0)); + img2.setTo(cv::Scalar::all(0)); + for (int j = 0; j < texture.rows; j++) + { // ttesis: why must we copy the texture first? + for (int i = 0; i < texture.cols; i++) + { + cv::Vec3b s = texture.at(j, i); + img.at(j, i) = s; + if ((i > 0) && (j > 0) && (i < (texture.cols - 1)) && + (j < (texture.rows - 1))) + mask.at(j, i) = 1; // ttesis: why are edges not included? + } + } + cv::warpPerspective(img, img2, map_mat, img2.size()); + cv::warpPerspective(mask, mask2, map_mat, img2.size(), 0); - //cvCopy(CvMat(img2), image, mask2); + img2.copyTo(image, mask2); - // cvReleaseImage(&img); - // cvReleaseImage(&img2); - // cvReleaseImage(&mask); - // cvReleaseImage(&mask2); + img.release(); + img2.release(); + mask.release(); + mask2.release(); } -} // namespace alvar +} // namespace alvar \ No newline at end of file diff --git a/ar_track_alvar/src/MarkerDetector.cpp b/ar_track_alvar/src/MarkerDetector.cpp index d7f02db..0052812 100644 --- a/ar_track_alvar/src/MarkerDetector.cpp +++ b/ar_track_alvar/src/MarkerDetector.cpp @@ -102,9 +102,12 @@ namespace alvar { } labeling->SetCamera(cam); - labeling->LabelSquares(image, visualize); + labeling->LabelSquares(*image, visualize); vector >& blob_corners = labeling->blob_corners; - cv::Mat* gray = labeling->gray; + + cv::Mat temp = labeling->gray; + + cv::Mat* gray = &temp; int orientation; diff --git a/ar_track_alvar/test/test_ar_legacy.py b/ar_track_alvar/test/test_ar_legacy.py index 59c5389..3cb7382 100644 --- a/ar_track_alvar/test/test_ar_legacy.py +++ b/ar_track_alvar/test/test_ar_legacy.py @@ -1,27 +1,4 @@ -# import os -# import sys -# import time -# import unittest - -# from ament_index_python.packages import get_package_share_directory -# from launch import LaunchDescription -# from launch_ros.actions import Node -# import launch_testing -# import launch - -# import pytest -# import rclpy -# import numpy -# import math - -# from geometry_msgs.msg import Pose - from rclpy.duration import Duration -# from rclpy.node import Node - - -# from sensor_msgs.msg import Image -# from ar_track_alvar_msgs.msg import AlvarMarkers import os import sys @@ -53,59 +30,12 @@ max_frequency="100" max_track_error="0.2" output_frame="camera" - -# @pytest.mark.rostest -# def generate_test_description(): -# return LaunchDescription([ -# # DisparityNode - -# launch.actions.ExecuteProcess( -# cmd=['ros2', 'bag', 'play', '-s',"rosbag_v2",bag_name], -# output='screen' -# ), - -# Node( -# package='ar_track_alvar', -# executable='individualMarkersNoKinect', -# name='individual_markers', -# remappings=[ -# ("camera_image", cam_image_topic), -# ("camera_info",cam_info_topic) -# ], -# arguments=[marker_size, max_new_marker_error, max_track_error, cam_image_topic, cam_info_topic, output_frame], -# output='screen' -# ), -# launch_testing.actions.ReadyToTest(), -# ]) - -# class TestArTrack(unittest.TestCase): -# @classmethod -# def setUpClass(cls): -# rclpy.init() -# cls.node = rclpy.create_node('test_ar_track_alvar_legacy') - - -# @classmethod -# def tearDownClass(cls): -# cls.node.destroy_node() -# rclpy.shutdown() - - -# def test_markers(self): -# ''' -# Aiming the real output taken from a bag file. -# ''' -# tfBuffer = Buffer() -# self.tflistener = TransformListener(tfBuffer, self) - -# # Wait up to 60 seconds to receive message -# start_time = time.time() @pytest.mark.rostest def generate_test_description(): return LaunchDescription([ - # DisparityNode + # Bag Node launch.actions.ExecuteProcess( cmd=['ros2', 'bag', 'play', '-s',"rosbag_v2",bag_name], From 544d7cd6ee7854ecea638d2a2aa0a5bba54cc37e Mon Sep 17 00:00:00 2001 From: Patrick Musau Date: Fri, 30 Jul 2021 16:09:48 -0500 Subject: [PATCH 18/27] Figured out segfault --- ar_track_alvar/CMakeLists.txt | 58 +- ar_track_alvar/include/ar_track_alvar/Alvar.h | 155 +- .../include/ar_track_alvar/AlvarException.h | 10 +- .../include/ar_track_alvar/Bitset.h | 187 +- .../include/ar_track_alvar/Camera.h | 491 ++-- .../include/ar_track_alvar/Capture.h | 290 +- .../include/ar_track_alvar/CaptureDevice.h | 72 +- .../include/ar_track_alvar/CaptureFactory.h | 172 +- .../ar_track_alvar/CaptureFactory_private.h | 44 +- .../include/ar_track_alvar/CapturePlugin.h | 72 +- .../ar_track_alvar/ConnectedComponents.h | 2 +- .../include/ar_track_alvar/Container3d.h | 408 +-- .../include/ar_track_alvar/CvTestbed.h | 239 +- .../ar_track_alvar/DirectoryIterator.h | 69 +- .../DirectoryIterator_private.h | 26 +- ar_track_alvar/include/ar_track_alvar/Draw.h | 2 +- ar_track_alvar/include/ar_track_alvar/EC.h | 1545 ++++++----- .../ar_track_alvar/FernImageDetector.h | 100 +- .../ar_track_alvar/FernPoseEstimator.h | 37 +- .../include/ar_track_alvar/FileFormat.h | 69 +- .../include/ar_track_alvar/FileFormatUtils.h | 92 +- .../include/ar_track_alvar/Filter.h | 279 +- .../include/ar_track_alvar/GlutViewer.h | 82 +- .../include/ar_track_alvar/IntegralImage.h | 213 +- .../include/ar_track_alvar/Kalman.h | 524 ++-- ar_track_alvar/include/ar_track_alvar/Line.h | 65 +- ar_track_alvar/include/ar_track_alvar/Lock.h | 41 +- .../include/ar_track_alvar/Marker.h | 636 +++-- .../include/ar_track_alvar/MarkerDetector.h | 238 +- .../include/ar_track_alvar/MultiMarker.h | 335 ++- .../ar_track_alvar/MultiMarkerBundle.h | 118 +- .../ar_track_alvar/MultiMarkerFiltered.h | 84 +- .../ar_track_alvar/MultiMarkerInitializer.h | 171 +- ar_track_alvar/include/ar_track_alvar/Mutex.h | 46 +- .../include/ar_track_alvar/Mutex_private.h | 16 +- .../include/ar_track_alvar/Optimization.h | 195 +- .../include/ar_track_alvar/Platform.h | 14 +- .../include/ar_track_alvar/Plugin.h | 87 +- .../include/ar_track_alvar/Plugin_private.h | 18 +- ar_track_alvar/include/ar_track_alvar/Pose.h | 150 +- .../include/ar_track_alvar/Ransac.h | 712 ++--- .../include/ar_track_alvar/Rotation.h | 365 +-- ar_track_alvar/include/ar_track_alvar/SfM.h | 179 +- .../include/ar_track_alvar/Shared.h | 63 +- .../include/ar_track_alvar/Threads.h | 38 +- .../include/ar_track_alvar/Threads_private.h | 14 +- ar_track_alvar/include/ar_track_alvar/Timer.h | 40 +- .../include/ar_track_alvar/Timer_private.h | 16 +- .../include/ar_track_alvar/Tracker.h | 27 +- .../include/ar_track_alvar/TrackerFeatures.h | 163 +- .../ar_track_alvar/TrackerOrientation.h | 147 +- .../include/ar_track_alvar/TrackerPsa.h | 76 +- .../include/ar_track_alvar/TrackerStat.h | 74 +- .../include/ar_track_alvar/TrifocalTensor.h | 212 +- .../include/ar_track_alvar/Uncopyable.h | 46 +- .../include/ar_track_alvar/UnscentedKalman.h | 466 ++-- ar_track_alvar/include/ar_track_alvar/Util.h | 622 +++-- .../ar_track_alvar/filter/kinect_filtering.h | 46 +- .../ar_track_alvar/filter/medianFilter.h | 24 +- ar_track_alvar/nodes/FindMarkerBundles.cpp | 2 +- .../nodes/FindMarkerBundlesNoKinect.cpp | 6 +- ar_track_alvar/nodes/IndividualMarkers.cpp | 157 +- .../nodes/IndividualMarkersNoKinect.cpp | 36 +- ar_track_alvar/nodes/TrainMarkerBundle.cpp | 6 +- ar_track_alvar/src/Alvar.cpp | 30 +- ar_track_alvar/src/Bitset.cpp | 604 +++-- ar_track_alvar/src/Camera.cpp | 1698 ++++++------ ar_track_alvar/src/CaptureDevice.cpp | 26 +- ar_track_alvar/src/CaptureFactory.cpp | 405 +-- ar_track_alvar/src/CaptureFactory_unix.cpp | 103 +- ar_track_alvar/src/CaptureFactory_win.cpp | 122 +- ar_track_alvar/src/ConnectedComponents.cpp | 4 +- ar_track_alvar/src/CvTestbed.cpp | 422 +-- ar_track_alvar/src/DirectoryIterator.cpp | 20 +- ar_track_alvar/src/DirectoryIterator_unix.cpp | 125 +- ar_track_alvar/src/DirectoryIterator_win.cpp | 114 +- ar_track_alvar/src/Draw.cpp | 17 +- ar_track_alvar/src/EC.cpp | 459 ++-- ar_track_alvar/src/FernImageDetector.cpp | 854 +++--- ar_track_alvar/src/FernPoseEstimator.cpp | 58 +- ar_track_alvar/src/FileFormatUtils.cpp | 193 +- ar_track_alvar/src/Filter.cpp | 153 +- ar_track_alvar/src/GlutViewer.cpp | 721 ++--- ar_track_alvar/src/IntegralImage.cpp | 345 +-- ar_track_alvar/src/Kalman.cpp | 951 ++++--- ar_track_alvar/src/Line.cpp | 158 +- ar_track_alvar/src/Marker.cpp | 2393 ++++++++++------- ar_track_alvar/src/MarkerDetector.cpp | 385 +-- ar_track_alvar/src/MultiMarker.cpp | 637 +++-- ar_track_alvar/src/MultiMarkerBundle.cpp | 457 ++-- ar_track_alvar/src/MultiMarkerFiltered.cpp | 126 +- ar_track_alvar/src/MultiMarkerInitializer.cpp | 262 +- ar_track_alvar/src/Mutex.cpp | 15 +- ar_track_alvar/src/Mutex_unix.cpp | 28 +- ar_track_alvar/src/Mutex_win.cpp | 28 +- ar_track_alvar/src/Optimization.cpp | 514 ++-- ar_track_alvar/src/Platform.cpp | 70 +- ar_track_alvar/src/Plugin.cpp | 43 +- ar_track_alvar/src/Plugin_unix.cpp | 55 +- ar_track_alvar/src/Plugin_win.cpp | 56 +- ar_track_alvar/src/Pose.cpp | 242 +- ar_track_alvar/src/Ransac.cpp | 280 +- ar_track_alvar/src/Rotation.cpp | 522 ++-- ar_track_alvar/src/SampleCamCalib.cpp | 372 +-- ar_track_alvar/src/SampleCvTestbed.cpp | 267 +- ar_track_alvar/src/SampleFilter.cpp | 459 ++-- ar_track_alvar/src/SampleIntegralImage.cpp | 430 +-- ar_track_alvar/src/SampleLabeling.cpp | 316 ++- ar_track_alvar/src/SampleMarkerCreator.cpp | 701 ++--- ar_track_alvar/src/SampleMarkerDetector.cpp | 408 +-- ar_track_alvar/src/SampleMarkerHide.cpp | 438 +-- .../src/SampleMarkerlessCreator.cpp | 100 +- .../src/SampleMarkerlessDetector.cpp | 384 +-- ar_track_alvar/src/SampleMultiMarker.cpp | 419 +-- .../src/SampleMultiMarkerBundle.cpp | 567 ++-- ar_track_alvar/src/SampleOptimization.cpp | 263 +- ar_track_alvar/src/SamplePointcloud.cpp | 541 ++-- ar_track_alvar/src/SampleTrack.cpp | 469 ++-- ar_track_alvar/src/SfM.cpp | 1064 ++++---- ar_track_alvar/src/Threads.cpp | 15 +- ar_track_alvar/src/Threads_unix.cpp | 44 +- ar_track_alvar/src/Threads_win.cpp | 73 +- ar_track_alvar/src/Timer.cpp | 15 +- ar_track_alvar/src/Timer_unix.cpp | 30 +- ar_track_alvar/src/Timer_win.cpp | 78 +- ar_track_alvar/src/TrackerFeatures.cpp | 503 ++-- ar_track_alvar/src/TrackerOrientation.cpp | 422 +-- ar_track_alvar/src/TrackerPsa.cpp | 438 +-- ar_track_alvar/src/TrackerStat.cpp | 160 +- ar_track_alvar/src/TrifocalTensor.cpp | 223 +- ar_track_alvar/src/UnscentedKalman.cpp | 291 +- ar_track_alvar/src/Util.cpp | 1078 ++++---- ar_track_alvar/src/kinect_filtering.cpp | 401 ++- ar_track_alvar/src/medianFilter.cpp | 103 +- .../capture_plugin_cmu/CapturePluginCmu.cpp | 441 +-- .../capture_plugin_cmu/CapturePluginCmu.h | 95 +- .../CapturePluginDSCapture.cpp | 255 +- .../CapturePluginDSCapture.h | 142 +- .../capture_plugin_file/CapturePluginFile.cpp | 116 +- .../capture_plugin_file/CapturePluginFile.h | 95 +- .../CapturePluginHighgui.cpp | 175 +- .../CapturePluginHighgui.h | 98 +- .../CapturePluginPtgrey.cpp | 319 ++- .../CapturePluginPtgrey.h | 108 +- ar_track_alvar/test/test_ar_legacy.py | 6 +- .../test_ar_track_alvar_individual_markers.py | 2 +- 146 files changed, 20490 insertions(+), 17118 deletions(-) mode change 100755 => 100644 ar_track_alvar/include/ar_track_alvar/DirectoryIterator.h mode change 100755 => 100644 ar_track_alvar/include/ar_track_alvar/DirectoryIterator_private.h mode change 100755 => 100644 ar_track_alvar/include/ar_track_alvar/Shared.h mode change 100755 => 100644 ar_track_alvar/src/DirectoryIterator.cpp mode change 100755 => 100644 ar_track_alvar/src/DirectoryIterator_unix.cpp mode change 100755 => 100644 ar_track_alvar/src/DirectoryIterator_win.cpp diff --git a/ar_track_alvar/CMakeLists.txt b/ar_track_alvar/CMakeLists.txt index 4d8ca54..67990fe 100644 --- a/ar_track_alvar/CMakeLists.txt +++ b/ar_track_alvar/CMakeLists.txt @@ -36,7 +36,7 @@ find_package(visualization_msgs REQUIRED) find_package(PCL REQUIRED QUIET COMPONENTS common io) find_package(Eigen3 REQUIRED) -find_package(TinyXML2 REQUIRED) +find_package(tinyxml_vendor) include_directories(include) @@ -52,7 +52,7 @@ set(dependencies pcl_ros pcl_conversions std_msgs - TinyXML2 + tinyxml_vendor image_transport perception_pcl visualization_msgs @@ -67,45 +67,43 @@ set(dependencies include_directories(include ${OpenCV_INCLUDE_DIRS} - ${TinyXML2_INCLUDE_DIRS} + ${TinyXML_INCLUDE_DIRS} ${PCL_COMMON_INCLUDE_DIRS} ) -add_library(ar_track_alvar - src/Camera.cpp - src/CaptureDevice.cpp - src/Pose.cpp - src/Marker.cpp - src/MarkerDetector.cpp +add_library(${PROJECT_NAME} src/Bitset.cpp - src/Rotation.cpp - src/CvTestbed.cpp + src/Camera.cpp src/CaptureDevice.cpp src/CaptureFactory.cpp src/CaptureFactory_unix.cpp - src/FileFormatUtils.cpp - src/Threads.cpp - src/Threads_unix.cpp - src/Mutex.cpp - src/Mutex_unix.cpp src/ConnectedComponents.cpp - src/Line.cpp - src/Plugin.cpp - src/Plugin_unix.cpp + src/CvTestbed.cpp src/DirectoryIterator.cpp src/DirectoryIterator_unix.cpp src/Draw.cpp - src/Util.cpp + src/FileFormatUtils.cpp src/Filter.cpp src/Kalman.cpp - src/kinect_filtering.cpp - src/Optimization.cpp + src/Line.cpp + src/Marker.cpp + src/MarkerDetector.cpp src/MultiMarker.cpp src/MultiMarkerBundle.cpp src/MultiMarkerInitializer.cpp - ) + src/Mutex.cpp + src/Mutex_unix.cpp + src/Optimization.cpp + src/Plugin.cpp + src/Plugin_unix.cpp + src/Pose.cpp + src/Rotation.cpp + src/Threads.cpp + src/Threads_unix.cpp + src/Util.cpp +) -target_link_libraries(${PROJECT_NAME} ${OpenCV_LIBRARIES} ${TinyXML_LIBRARIES}) +target_link_libraries(${PROJECT_NAME} ${OpenCV_LIBRARIES} tinyxml) ament_target_dependencies(ar_track_alvar ${dependencies}) add_library(kinect_filtering src/kinect_filtering.cpp) @@ -113,11 +111,11 @@ ament_target_dependencies(kinect_filtering ${dependencies}) add_library(medianFilter src/medianFilter.cpp) -target_link_libraries(medianFilter ar_track_alvar) +target_link_libraries(medianFilter ar_track_alvar ${TinyXML_LIBRARIES}) ament_target_dependencies(medianFilter ${dependencies}) add_executable(individualMarkers nodes/IndividualMarkers.cpp) -target_link_libraries(individualMarkers ar_track_alvar kinect_filtering) +target_link_libraries(individualMarkers ar_track_alvar kinect_filtering ${TinyXML_LIBRARIES}) ament_target_dependencies(individualMarkers ${dependencies}) add_executable(individualMarkersNoKinect nodes/IndividualMarkersNoKinect.cpp) @@ -125,20 +123,20 @@ target_link_libraries(individualMarkersNoKinect ar_track_alvar) ament_target_dependencies(individualMarkersNoKinect ${dependencies}) add_executable(trainMarkerBundle nodes/TrainMarkerBundle.cpp) -target_link_libraries(trainMarkerBundle ar_track_alvar) +target_link_libraries(trainMarkerBundle ar_track_alvar ${TinyXML_LIBRARIES}) ament_target_dependencies(trainMarkerBundle ${dependencies}) add_executable(findMarkerBundles nodes/FindMarkerBundles.cpp) -target_link_libraries(findMarkerBundles ar_track_alvar kinect_filtering medianFilter) +target_link_libraries(findMarkerBundles ar_track_alvar kinect_filtering medianFilter ${TinyXML_LIBRARIES}) ament_target_dependencies(findMarkerBundles ${dependencies}) add_executable(findMarkerBundlesNoKinect nodes/FindMarkerBundlesNoKinect.cpp) -target_link_libraries(findMarkerBundlesNoKinect ar_track_alvar) +target_link_libraries(findMarkerBundlesNoKinect ar_track_alvar ${TinyXML_LIBRARIES}) ament_target_dependencies(findMarkerBundlesNoKinect ${dependencies}) add_executable(createMarker src/SampleMarkerCreator.cpp) -target_link_libraries(createMarker ar_track_alvar) +target_link_libraries(createMarker ar_track_alvar ${TinyXML_LIBRARIES}) ament_target_dependencies(createMarker ${dependencies}) diff --git a/ar_track_alvar/include/ar_track_alvar/Alvar.h b/ar_track_alvar/include/ar_track_alvar/Alvar.h index 3418557..0e5da2c 100644 --- a/ar_track_alvar/include/ar_track_alvar/Alvar.h +++ b/ar_track_alvar/include/ar_track_alvar/Alvar.h @@ -29,44 +29,52 @@ * * \section Introduction * - * ALVAR is a software library for creating virtual and augmented reality (AR) applications. ALVAR has - * been developed by the VTT Technical Research Centre of Finland. ALVAR is released under the terms of - * the GNU Lesser General Public License, version 2.1, or (at your option) any later version. - * - * ALVAR is designed to be as flexible as possible. It offers high-level tools and methods for creating - * augmented reality applications with just a few lines of code. The library also includes interfaces - * for all of the low-level tools and methods, which makes it possible for the user to develop their - * own solutions using alternative approaches or completely new algorithms. - * - * ALVAR is currently provided on Windows and Linux operating systems and only depends on one third - * party library (OpenCV). ALVAR is independent of any graphical libraries and can be easily integrated - * into existing applications. The sample applications use GLUT and the demo applications use OpenSceneGraph. + * ALVAR is a software library for creating virtual and augmented reality (AR) + * applications. ALVAR has been developed by the VTT Technical Research Centre + * of Finland. ALVAR is released under the terms of the GNU Lesser General + * Public License, version 2.1, or (at your option) any later version. + * + * ALVAR is designed to be as flexible as possible. It offers high-level tools + * and methods for creating augmented reality applications with just a few lines + * of code. The library also includes interfaces for all of the low-level tools + * and methods, which makes it possible for the user to develop their own + * solutions using alternative approaches or completely new algorithms. + * + * ALVAR is currently provided on Windows and Linux operating systems and only + * depends on one third party library (OpenCV). ALVAR is independent of any + * graphical libraries and can be easily integrated into existing applications. + * The sample applications use GLUT and the demo applications use + * OpenSceneGraph. * * \section Features * - * - Detecting and tracking 2D markers (\e MarkerDetector). Currently two types of square matrix markers - * are supported (\e MarkerData and \e MarkerArtoolkit). Future marker types can easily be added. ALVAR - * keeps the \e Marker \e Pose estimation as accurate as possible. Furthermore, ALVAR uses some tracking - * heuristics to identify markers that are "too far" and to recover from occlusions in the multimarker - * case for example. - * - Using a setup of multiple markers for pose detection (\e MultiMarker). The marker setup coordinates - * can be set manually or they can be automatically deduced using various methods (\e MultiMarkerFiltered - * and \e MultiMarkerBundle). - * - Tools for calibrating \e Camera. Distorting and undistorting points, projecting points and finding - * exterior orientation using point-sets. + * - Detecting and tracking 2D markers (\e MarkerDetector). Currently two types + * of square matrix markers are supported (\e MarkerData and \e + * MarkerArtoolkit). Future marker types can easily be added. ALVAR keeps the \e + * Marker \e Pose estimation as accurate as possible. Furthermore, ALVAR uses + * some tracking heuristics to identify markers that are "too far" and to + * recover from occlusions in the multimarker case for example. + * - Using a setup of multiple markers for pose detection (\e MultiMarker). The + * marker setup coordinates can be set manually or they can be automatically + * deduced using various methods (\e MultiMarkerFiltered and \e + * MultiMarkerBundle). + * - Tools for calibrating \e Camera. Distorting and undistorting points, + * projecting points and finding exterior orientation using point-sets. * - Hiding markers from the view (\e BuildHideTexture and \e DrawTexture). - * - Several basic filters: \e FilterAverage, \e FilterMedian, \e FilterRunningAverage, - * \e FilterDoubleExponentialSmoothing. - * - \e Kalman filters for sensor fusion: \e Kalman Filter, \e Extended Kalman Filter and Unscented Kalman - * Filter (\e KalmanSensor, \e KalmanSensorEkf, \e KalmanEkf, \e UnscentedKalman). - * - Several methods for tracking using optical flow: \e TrackerPsa , \e TrackerPsaRot , \e TrackerFeatures - * and \e TrackerStat. + * - Several basic filters: \e FilterAverage, \e FilterMedian, \e + * FilterRunningAverage, \e FilterDoubleExponentialSmoothing. + * - \e Kalman filters for sensor fusion: \e Kalman Filter, \e Extended Kalman + * Filter and Unscented Kalman Filter (\e KalmanSensor, \e KalmanSensorEkf, \e + * KalmanEkf, \e UnscentedKalman). + * - Several methods for tracking using optical flow: \e TrackerPsa , \e + * TrackerPsaRot , \e TrackerFeatures and \e TrackerStat. * - etc... * * \section Platforms * * ALVAR is officially supported and tested on the following platforms. - * - Windows XP 32-bit, Microsoft Visual Studio 2005 (8.0), 2008 (9.0) and 2010 (10.0) + * - Windows XP 32-bit, Microsoft Visual Studio 2005 (8.0), 2008 (9.0) and 2010 + * (10.0) * - Linux 32-bit, GCC 4.3 and 4.4 * - Linux 64-bit, GCC 4.3 and 4.4 * @@ -96,61 +104,68 @@ * - OpenSceneGraph (http://www.openscenegraph.org) * * \example SampleCamCalib.cpp - * This is an example of how to use \e ProjPoints and \e Camera classes to perform camera calibration - * using a chessboard pattern. + * This is an example of how to use \e ProjPoints and \e Camera classes to + * perform camera calibration using a chessboard pattern. * * \example SampleCvTestbed.cpp - * This is an example of how to use the \e CvTestbed and \e Capture classes in order to make quick OpenCV - * prototype applications. + * This is an example of how to use the \e CvTestbed and \e Capture classes in + * order to make quick OpenCV prototype applications. * * \example SampleFilter.cpp - * This is an example of how to use various filters: \e FilterAverage, \e FilterMedian, - * \e FilterRunningAverage, \e FilterDoubleExponentialSmoothing and \e Kalman. + * This is an example of how to use various filters: \e FilterAverage, \e + * FilterMedian, \e FilterRunningAverage, \e FilterDoubleExponentialSmoothing + * and \e Kalman. * * \example SampleIntegralImage.cpp - * This is an example of how to use the \e IntegralImage and \e IntegralGradient classes for image - * gradient analysis. + * This is an example of how to use the \e IntegralImage and \e IntegralGradient + * classes for image gradient analysis. * * \example SampleLabeling.cpp - * This is an example of how to label images using \e LabelImage and \e MarchEdge. + * This is an example of how to label images using \e LabelImage and \e + * MarchEdge. * * \example SampleMarkerCreator.cpp - * This is an example that demonstrates the generation of \e MarkerData markers and saving the image - * using \e SaveMarkerImage. + * This is an example that demonstrates the generation of \e MarkerData markers + * and saving the image using \e SaveMarkerImage. * * \example SampleMarkerDetector.cpp - * This is an example that shows how to detect \e MarkerData markers and visualize them using\e GlutViewer. + * This is an example that shows how to detect \e MarkerData markers and + * visualize them using\e GlutViewer. * * \example SampleMarkerHide.cpp - * This is an example that shows how to detect \e MarkerData markers, visualize them using \e GlutViewer - * and hide them with \e BuildHideTexture and \e DrawTexture. + * This is an example that shows how to detect \e MarkerData markers, visualize + * them using \e GlutViewer and hide them with \e BuildHideTexture and \e + * DrawTexture. * * \example SampleMarkerlessCreator.cpp - * This is an example that demonstrates the use of FernImageDetector to train a Fern classifier. + * This is an example that demonstrates the use of FernImageDetector to train a + * Fern classifier. * * \example SampleMarkerlessDetector.cpp - * This is an example that demonstrates the use of FernImageDetector to detect an image as a marker. + * This is an example that demonstrates the use of FernImageDetector to detect + * an image as a marker. * * \example SampleMultiMarker.cpp - * This is an example that demonstrates the use of a preconfigured \e MultiMarker setup. + * This is an example that demonstrates the use of a preconfigured \e + * MultiMarker setup. * * \example SampleMultiMarkerBundle.cpp - * This is an example that automatically recognising \e MultiMarker setups using \e MultiMarkerFiltered and - * optimizes it with \e MultiMarkerBundle. + * This is an example that automatically recognising \e MultiMarker setups using + * \e MultiMarkerFiltered and optimizes it with \e MultiMarkerBundle. * * \example SampleOptimization.cpp - * This is an example of how to use the \e Optimization class by fitting curves of increasing degree to - * random data. + * This is an example of how to use the \e Optimization class by fitting curves + * of increasing degree to random data. * * \example SamplePointcloud.cpp - * This is an example showing how to use \e SimpleSfM for tracking the environment using features in - * addition to \e MultiMarker. + * This is an example showing how to use \e SimpleSfM for tracking the + * environment using features in addition to \e MultiMarker. * * \example SampleTrack.cpp - * This is an example that shows how to perform tracking of the optical flow using \e TrackerPsa, - * \e TrackerPsaRot, \e TrackerFeatures or \e TrackerStat. + * This is an example that shows how to perform tracking of the optical flow + * using \e TrackerPsa, \e TrackerPsaRot, \e TrackerFeatures or \e TrackerStat. */ - + /** * \file Alvar.h * @@ -159,20 +174,20 @@ */ #if defined(WIN32) && !defined(ALVAR_STATIC) - #ifdef ALVAR_BUILD - #define ALVAR_EXPORT __declspec(dllexport) - #else - #define ALVAR_EXPORT __declspec(dllimport) - #endif +#ifdef ALVAR_BUILD +#define ALVAR_EXPORT __declspec(dllexport) #else - #define ALVAR_EXPORT +#define ALVAR_EXPORT __declspec(dllimport) +#endif +#else +#define ALVAR_EXPORT #endif /** * \brief Main ALVAR namespace. */ -namespace alvar { - +namespace alvar +{ /** * \brief Major version number. */ @@ -193,35 +208,35 @@ static const int ALVAR_VERSION_PATCH = 0; * * The tag contains alpha, beta and release candidate versions. */ -static const char *ALVAR_VERSION_TAG = ""; +static const char* ALVAR_VERSION_TAG = ""; /** * \brief Revision version string. * * The revision contains an identifier from the source control system. */ -static const char *ALVAR_VERSION_REVISION = ""; +static const char* ALVAR_VERSION_REVISION = ""; /** * \brief Entire version string. */ -static const char *ALVAR_VERSION = "2.0.0"; +static const char* ALVAR_VERSION = "2.0.0"; /** * \brief Entire version string without dots. */ -static const char *ALVAR_VERSION_NODOTS = "200"; +static const char* ALVAR_VERSION_NODOTS = "200"; /** * \brief Date the library was built. */ -static const char *ALVAR_DATE = "2012-06-20"; +static const char* ALVAR_DATE = "2012-06-20"; /** * \brief System the library was built on. */ -static const char *ALVAR_SYSTEM = "Linux 3.2.0-24-generic x86_64"; +static const char* ALVAR_SYSTEM = "Linux 3.2.0-24-generic x86_64"; -} +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/AlvarException.h b/ar_track_alvar/include/ar_track_alvar/AlvarException.h index 21b3c1a..99c971d 100644 --- a/ar_track_alvar/include/ar_track_alvar/AlvarException.h +++ b/ar_track_alvar/include/ar_track_alvar/AlvarException.h @@ -32,17 +32,19 @@ #include -namespace alvar { - +namespace alvar +{ /** * \brief ALVAR exception class. */ class AlvarException : public std::runtime_error { public: - AlvarException(const char *s) : std::runtime_error(s) { } + AlvarException(const char* s) : std::runtime_error(s) + { + } }; -} +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/Bitset.h b/ar_track_alvar/include/ar_track_alvar/Bitset.h index 5cc9df8..35ba5d8 100644 --- a/ar_track_alvar/include/ar_track_alvar/Bitset.h +++ b/ar_track_alvar/include/ar_track_alvar/Bitset.h @@ -37,14 +37,16 @@ #include #include -namespace alvar { - +namespace alvar +{ /** * \brief \e Bitset is a basic class for handling bit sequences * - * The bits are stored internally using deque (See http://osdir.com/ml/gis.geos.devel/2006-04/msg00142.html). - * The bitset is assumed to have most significant bits left i.e. the push_back() methods add to the least - * significant end of the bit sequence. The usage is clarified by the following example. + * The bits are stored internally using deque (See + * http://osdir.com/ml/gis.geos.devel/2006-04/msg00142.html). The bitset is + * assumed to have most significant bits left i.e. the push_back() methods add + * to the least significant end of the bit sequence. The usage is clarified by + * the following example. * * \section Usage * \code @@ -59,101 +61,110 @@ namespace alvar { * b.Output(std::cout); // b contains now: 00000000 00000000 10001000 10001000 * \endcode */ -class ALVAR_EXPORT Bitset { +class ALVAR_EXPORT Bitset +{ protected: - std::deque bits; - + std::deque bits; + public: - /** \brief The length of the \e Bitset */ - int Length(); - /** \brief Output the bits to selected ostream - * \param os The output stream to be used for outputting e.g. std::cout - */ - std::ostream &Output(std::ostream &os) const; - /** \brief Clear the bits */ - void clear(); - /** \brief Push back one bit - * \param bit Boolean (true/false) to be pushed to the end of bit sequence. - */ - void push_back(const bool bit); - /** \brief Push back \e bit_count bits from 'byte' \e b - * \param b Unsigned character (8-bits) to be pushed to the end of bit sequence. - * \param bit_count Number of bits to be pushed (default/max is 8 bits) - */ - void push_back(const unsigned char b, const int bit_count=8); - /** \brief Push back \e bit_count bits from 'short' \e s - * \param s Unsigned short (16-bits) to be pushed to the end of bit sequence. - * \param bit_count Number of bits to be pushed (default/max is 16 bits) - */ - void push_back(const unsigned short s, const int bit_count=16); - /** \brief Push back \e bit_count bits from 'long' \e l - * \param l Unsigned long (32-bits) to be pushed to the end of bit sequence. - * \param bit_count Number of bits to be pushed (default/max is 32 bits) - */ - void push_back(const unsigned long l, const int bit_count=32); - /** \brief Push back meaningful bits from 'long' \e l - * \param l The meaningful bits of the given unsigned long (32-bits) are pushed to the end of bit sequence. - */ - void push_back_meaningful(const unsigned long l); - /** \brief Fill the \e Bitset with non-meaningful zeros - * \param bit_count Non-meaningful zeros are added until this given \e bit_count is reached. - */ - void fill_zeros_left(const size_t bit_count); - /** \brief Push back a string of characters - * \param s String of characters to be pushed to the end of bit sequence. - */ - void push_back(std::string s); - /** \brief Pop the front bit */ - bool pop_front(); - /** \brief Pop the back bit */ - bool pop_back(); - /** \brief Flip the selected bit - * \param pos The bit in this given position is flipped. - */ - void flip(size_t pos); - /** \brief The \e Bitset as a hex string */ - std::string hex(); - /** \brief The \e Bitset as 'unsigned long' */ - unsigned long ulong(); - /** \brief The \e Bitset as 'unsigned char' */ - unsigned char uchar(); - /** \brief The \e Bitset as 'deque' */ - inline std::deque& GetBits() - { - return bits; - } + /** \brief The length of the \e Bitset */ + int Length(); + /** \brief Output the bits to selected ostream + * \param os The output stream to be used for outputting e.g. std::cout + */ + std::ostream& Output(std::ostream& os) const; + /** \brief Clear the bits */ + void clear(); + /** \brief Push back one bit + * \param bit Boolean (true/false) to be pushed to the end of bit sequence. + */ + void push_back(const bool bit); + /** \brief Push back \e bit_count bits from 'byte' \e b + * \param b Unsigned character (8-bits) to be pushed to the end of bit + * sequence. \param bit_count Number of bits to be pushed (default/max is 8 + * bits) + */ + void push_back(const unsigned char b, const int bit_count = 8); + /** \brief Push back \e bit_count bits from 'short' \e s + * \param s Unsigned short (16-bits) to be pushed to the end of bit sequence. + * \param bit_count Number of bits to be pushed (default/max is 16 bits) + */ + void push_back(const unsigned short s, const int bit_count = 16); + /** \brief Push back \e bit_count bits from 'long' \e l + * \param l Unsigned long (32-bits) to be pushed to the end of bit sequence. + * \param bit_count Number of bits to be pushed (default/max is 32 bits) + */ + void push_back(const unsigned long l, const int bit_count = 32); + /** \brief Push back meaningful bits from 'long' \e l + * \param l The meaningful bits of the given unsigned long (32-bits) are + * pushed to the end of bit sequence. + */ + void push_back_meaningful(const unsigned long l); + /** \brief Fill the \e Bitset with non-meaningful zeros + * \param bit_count Non-meaningful zeros are added until this given \e + * bit_count is reached. + */ + void fill_zeros_left(const size_t bit_count); + /** \brief Push back a string of characters + * \param s String of characters to be pushed to the end of bit sequence. + */ + void push_back(std::string s); + /** \brief Pop the front bit */ + bool pop_front(); + /** \brief Pop the back bit */ + bool pop_back(); + /** \brief Flip the selected bit + * \param pos The bit in this given position is flipped. + */ + void flip(size_t pos); + /** \brief The \e Bitset as a hex string */ + std::string hex(); + /** \brief The \e Bitset as 'unsigned long' */ + unsigned long ulong(); + /** \brief The \e Bitset as 'unsigned char' */ + unsigned char uchar(); + /** \brief The \e Bitset as 'deque' */ + inline std::deque& GetBits() + { + return bits; + } }; /** - * \brief An extended \e Bitset ( \e BitsetExt ) for handling e.g. Hamming encoding/decoding + * \brief An extended \e Bitset ( \e BitsetExt ) for handling e.g. Hamming + * encoding/decoding * - * This class is based on the basic \e Bitset. It provides additional features for Hamming coding - * (See http://en.wikipedia.org/wiki/Hamming_code). + * This class is based on the basic \e Bitset. It provides additional features + * for Hamming coding (See http://en.wikipedia.org/wiki/Hamming_code). * * The \e BitsetExt is used e.g by \e MarkerData */ -class ALVAR_EXPORT BitsetExt : public Bitset { +class ALVAR_EXPORT BitsetExt : public Bitset +{ protected: - bool verbose; - void hamming_enc_block(unsigned long block_len, std::deque::iterator &iter); - int hamming_dec_block(unsigned long block_len, std::deque::iterator &iter); + bool verbose; + void hamming_enc_block(unsigned long block_len, + std::deque::iterator& iter); + int hamming_dec_block(unsigned long block_len, + std::deque::iterator& iter); + public: - /** \brief Constructor */ - BitsetExt(); - /** \brief Constructor */ - BitsetExt(bool _verbose); - /** \brief Set the verbose/silent mode */ - void SetVerbose(bool _verbose); - /** \brief Count how many bits will be in the Bitset after hamming encoding */ - static int count_hamming_enc_len(int block_len, int dec_len); - /** \brief Count how many bits will be in the Bitset after hamming decoding */ - static int count_hamming_dec_len(int block_len, int enc_len); - /** \brief Hamming encoding 'in-place' using the defined block length */ - void hamming_enc(int block_len); - /** \brief Hamming decoding 'in-place' using the defined block length */ - int hamming_dec(int block_len); + /** \brief Constructor */ + BitsetExt(); + /** \brief Constructor */ + BitsetExt(bool _verbose); + /** \brief Set the verbose/silent mode */ + void SetVerbose(bool _verbose); + /** \brief Count how many bits will be in the Bitset after hamming encoding */ + static int count_hamming_enc_len(int block_len, int dec_len); + /** \brief Count how many bits will be in the Bitset after hamming decoding */ + static int count_hamming_dec_len(int block_len, int enc_len); + /** \brief Hamming encoding 'in-place' using the defined block length */ + void hamming_enc(int block_len); + /** \brief Hamming decoding 'in-place' using the defined block length */ + int hamming_dec(int block_len); }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/Camera.h b/ar_track_alvar/include/ar_track_alvar/Camera.h index bc9003a..89420e4 100644 --- a/ar_track_alvar/include/ar_track_alvar/Camera.h +++ b/ar_track_alvar/include/ar_track_alvar/Camera.h @@ -43,251 +43,306 @@ #include #include #include -#include "opencv2/core/types_c.h" -#include -#include -#include -#include "opencv2/imgproc/types_c.h" -#include "opencv2/opencv.hpp" - -namespace alvar { - -/** \brief Simple structure for collecting 2D and 3D points e.g. for camera calibration */ -struct ALVAR_EXPORT ProjPoints { - int width; - int height; - - /** \brief 3D object points corresponding with the detected 2D image points. */ - std::vector object_points; - /** \brief Detected 2D object points +namespace alvar +{ +/** \brief Simple structure for collecting 2D and 3D points e.g. for camera + * calibration */ +struct ALVAR_EXPORT ProjPoints +{ + int width; + int height; + + /** \brief 3D object points corresponding with the detected 2D image points. + */ + std::vector object_points; + /** \brief Detected 2D object points + * If point_counts[0] == 10, then the * If point_counts[0] == 10, then the + * If point_counts[0] == 10, then the + * If point_counts[0] == 10, then the + * If point_counts[0] == 10, then the + * first 10 points are detected in the first frame. If + * first 10 points are detected in the first frame. If + * first 10 points are detected in the first frame. If * first 10 points are detected in the first frame. If + * first 10 points are detected in the first frame. If + * point_counts[1] == 6, then the next 6 of these points are * point_counts[1] == 6, then the next 6 of these points are - * detected in the next frame... etc. - */ - std::vector image_points; - /** \brief Vector indicating how many points are detected for each frame */ - std::vector point_counts; - - /** \brief Reset \e object_points , \e image_points and \e point_counts */ - void Reset(); - - /** \brief Add elements to \e object_points , \e image_points and \e point_counts using Chessboard pattern */ - bool AddPointsUsingChessboard(cv::Mat* image, double etalon_square_size, int etalon_rows, int etalon_columns, bool visualize); - - /** \brief Add elements to \e object_points , \e image_points and \e point_counts using detected markers */ - bool AddPointsUsingMarkers(std::vector &marker_corners, std::vector &marker_corners_img, cv::Mat* image); + * point_counts[1] == 6, then the next 6 of these points are + * point_counts[1] == 6, then the next 6 of these points are + * point_counts[1] == 6, then the next 6 of these points are + * detected in the next frame... etc. + */ + std::vector image_points; + /** \brief Vector indicating how many points are detected for each frame */ + std::vector point_counts; + + /** \brief Reset \e object_points , \e image_points and \e point_counts */ + void Reset(); + + /** \brief Add elements to \e object_points , \e image_points and \e + * point_counts using Chessboard pattern */ + bool AddPointsUsingChessboard(const cv::Mat& image, double etalon_square_size, + int etalon_rows, int etalon_columns, + bool visualize); + + /** \brief Add elements to \e object_points , \e image_points and \e + * point_counts using detected markers */ + bool AddPointsUsingMarkers(std::vector& marker_corners, + std::vector& marker_corners_img, + cv::Mat& image); }; - /** - * \brief Simple \e Camera class for calculating distortions, orientation or projections with pre-calibrated camera + * \brief Simple \e Camera class for calculating distortions, orientation or + * projections with pre-calibrated camera */ -class ALVAR_EXPORT Camera { //: public rclcpp::Node { - +class ALVAR_EXPORT Camera +{ public: - - CvMat calib_K; double calib_K_data[3][3]; - CvMat calib_D; double calib_D_data[4]; - int calib_x_res; - int calib_y_res; - int x_res; - int y_res; - bool getCamInfo_; + cv::Mat calib_K; + double calib_K_data[3][3]; + cv::Mat calib_D; + double calib_D_data[4]; + int calib_x_res; + int calib_y_res; + int x_res; + int y_res; + bool getCamInfo_; protected: - std::string cameraInfoTopic_; + std::string cameraInfoTopic_; sensor_msgs::msg::CameraInfo cam_info_; // void camInfoCallback (const sensor_msgs::msg::CameraInfo::SharedPtr cam_info); // rclcpp::Subscription::SharedPtr sub_; private: - bool LoadCalibXML(const char *calibfile); - bool LoadCalibOpenCV(const char *calibfile); - bool SaveCalibXML(const char *calibfile); - bool SaveCalibOpenCV(const char *calibfile); + bool LoadCalibXML(const char* calibfile); + bool LoadCalibOpenCV(const char* calibfile); + bool SaveCalibXML(const char* calibfile); + bool SaveCalibOpenCV(const char* calibfile); public: - /** \brief One of the two methods to make this class serializable by \e Serialization class */ - std::string SerializeId() { return "camera"; }; - /** \brief One of the two methods to make this class serializable by \e Serialization class - * + /** \brief One of the two methods to make this class serializable by \e + * Serialization class */ + std::string SerializeId() + { + return "camera"; + }; + /** \brief One of the two methods to make this class serializable by \e + * Serialization class + * + * You can serialize the \e Camera class using filename or any std::iostream + * You can serialize the \e Camera class using filename or any std::iostream + * You can serialize the \e Camera class using filename or any std::iostream * You can serialize the \e Camera class using filename or any std::iostream - * as follows: - * \code - * alvar::Camera cam; - * cam.SetCalib("calib.xml", 320, 240); - * Serialization sero("calib1.xml"); - * sero<>cam; - * cam.SetRes(320, 240); - * \endcode - * \code - * std::stringstream ss; - * Serialization sero(ss); - * sero<>cam; - * \endcode - */ - bool Serialize(Serialization *ser) { - if (!ser->Serialize(calib_x_res, "width")) return false; - if (!ser->Serialize(calib_y_res, "height")) return false; - if (!ser->Serialize(calib_K, "intrinsic_matrix")) return false; - if (!ser->Serialize(calib_D, "distortion")) return false; - return true; - } - - /** \brief Constructor */ - Camera(); - Camera(std::string cam_info_topic); - - /** Sets the intrinsic calibration */ - void SetCameraInfo(const sensor_msgs::msg::CameraInfo::SharedPtr cam_info); - - /** \brief Get x-direction FOV in radians */ - double GetFovX() { + * You can serialize the \e Camera class using filename or any std::iostream + * as follows: + * \code + * alvar::Camera cam; + * cam.SetCalib("calib.xml", 320, 240); + * Serialization sero("calib1.xml"); + * sero<>cam; + * cam.SetRes(320, 240); + * \endcode + * \code + * std::stringstream ss; + * Serialization sero(ss); + * sero<>cam; + * \endcode + */ + bool Serialize(Serialization* ser) + { + if (!ser->Serialize(calib_x_res, "width")) + return false; + if (!ser->Serialize(calib_y_res, "height")) + return false; + if (!ser->Serialize(calib_K, "intrinsic_matrix")) + return false; + if (!ser->Serialize(calib_D, "distortion")) + return false; + return true; + } + + /** \brief Constructor */ + Camera(); + // Camera(ros::NodeHandle& n, std::string cam_info_topic); + + /** Sets the intrinsic calibration */ + void SetCameraInfo(const sensor_msgs::msg::CameraInfo::SharedPtr cam_info); + + + /** \brief Get x-direction FOV in radians */ + double GetFovX() + { + return (2.0f * atan2(double(x_res) / 2.0f, (double)calib_K_data[0][0])); return (2.0f * atan2(double(x_res) / 2.0f, (double)calib_K_data[0][0])); - } - /** \brief Get y-direction FOV in radians */ - double GetFovY() { - return (2.0f * atan2(double(y_res) / 2.0f, (double)calib_K_data[1][1])); - } - - void SetSimpleCalib(int _x_res, int _y_res, double f_fac=1.); - - /** \brief Set the calibration file and the current resolution for which the calibration is adjusted to - * \param calibfile File to load. - * \param _x_res Width of images captured from the real camera. - * \param _y_res Height of images captured from the real camera. - * \param format FILE_FORMAT_OPENCV (default) or FILE_FORMAT_XML (see doc/Camera.xsd). - */ - bool SetCalib(const char *calibfile, int _x_res, int _y_res, - FILE_FORMAT format = FILE_FORMAT_DEFAULT); - - /** \brief Save the current calibration information to a file - * \param calibfile File to save. - * \param format FILE_FORMAT_OPENCV (default) or FILE_FORMAT_XML (see doc/Camera.xsd). - */ - bool SaveCalib(const char *calibfile, FILE_FORMAT format = FILE_FORMAT_DEFAULT); - - /** \brief Calibrate using the collected \e ProjPoints */ - void Calibrate(ProjPoints &pp); - - /** \brief If we have no calibration file we can still adjust the default calibration to current resolution */ - void SetRes(int _x_res, int _y_res); - + return (2.0f * atan2(double(x_res) / 2.0f, (double)calib_K_data[0][0])); + return (2.0f * atan2(double(x_res) / 2.0f, (double)calib_K_data[0][0])); + return (2.0f * atan2(double(x_res) / 2.0f, (double)calib_K_data[0][0])); + } + /** \brief Get y-direction FOV in radians */ + double GetFovY() + { + return (2.0f * atan2(double(y_res) / 2.0f, (double)calib_K_data[1][1])); + } + + void SetSimpleCalib(int _x_res, int _y_res, double f_fac = 1.); + + /** \brief Set the calibration file and the current resolution for which the + * calibration is adjusted to \param calibfile File to load. \param _x_res + * Width of images captured from the real camera. \param _y_res Height of + * images captured from the real camera. \param format FILE_FORMAT_OPENCV + * (default) or FILE_FORMAT_XML (see doc/Camera.xsd). + */ + bool SetCalib(const char* calibfile, int _x_res, int _y_res, + FILE_FORMAT format = FILE_FORMAT_DEFAULT); + + /** \brief Save the current calibration information to a file + * \param calibfile File to save. + * \param format FILE_FORMAT_OPENCV (default) or FILE_FORMAT_XML (see + * doc/Camera.xsd). + */ + bool SaveCalib(const char* calibfile, + FILE_FORMAT format = FILE_FORMAT_DEFAULT); + + /** \brief Calibrate using the collected \e ProjPoints */ + void Calibrate(ProjPoints& pp); + + /** \brief If we have no calibration file we can still adjust the default + * calibration to current resolution */ + void SetRes(int _x_res, int _y_res); + + /** \brief Get OpenGL matrix /** \brief Get OpenGL matrix - * Generates the OpenGL projection matrix based on OpenCV intrinsic camera matrix K. - * \code - * 2*K[0][0]/width 2*K[0][1]/width -(2*K[0][2]/width+1) 0 - * 0 2*K[1][1]/height 2*K[1][2]/height-1 0 - * 0 0 -(f+n)/(f-n) -2*f*n/(f-n) - * 0 0 -1 0 - * \endcode - * - * Note, that the sign of the element [2][0] is changed. It should be - * \code - * 2*K[0][2]/width+1 - * \endcode - * + /** \brief Get OpenGL matrix + /** \brief Get OpenGL matrix + /** \brief Get OpenGL matrix + * Generates the OpenGL projection matrix based on OpenCV intrinsic camera + * matrix K. \code 2*K[0][0]/width 2*K[0][1]/width -(2*K[0][2]/width+1) 0 + * 0 2*K[1][1]/height 2*K[1][2]/height-1 0 + * 0 0 -(f+n)/(f-n) -2*f*n/(f-n) + * 0 0 -1 0 + * \endcode + * + * Note, that the sign of the element [2][0] is changed. It should be + * \code + * 2*K[0][2]/width+1 + * \endcode + * + * The sign change is due to the fact that with OpenCV and OpenGL projection * The sign change is due to the fact that with OpenCV and OpenGL projection - * matrices both y and z should be mirrored. With other matrix elements - * the sign changes eliminate each other, but with principal point - * in x-direction we need to make the change by hand. - */ - void GetOpenglProjectionMatrix(double proj_matrix[16], const int width, const int height, const float far_clip = 1000.0f, const float near_clip = 0.1f); - - /** \brief Invert operation for \e GetOpenglProjectionMatrix */ - void SetOpenglProjectionMatrix(double proj_matrix[16], const int width, const int height); - - /** \brief Unapplys the lens distortion for points on image plane. */ - void Undistort(std::vector& points); - - /** \brief Unapplys the lens distortion for one point on an image plane. */ - void Undistort(PointDouble &point); - - /** \brief Unapplys the lens distortion for one point on an image plane. */ - void Undistort(cv::Point2f& point); - - /** \brief Applys the lens distortion for one point on an image plane. */ - void Distort(cv::Point3f& point); - - /** \brief Applys the lens distortion for points on image plane. */ - void Distort(std::vector& points); - - /** \brief Applys the lens distortion for points on image plane. */ - void Distort(PointDouble &point); - - /** \brief Calculate exterior orientation */ - void CalcExteriorOrientation(std::vector& pw, std::vector& pi, Pose *pose); - - /** \brief Calculate exterior orientation - */ - void CalcExteriorOrientation(std::vector& pw, std::vector& pi, - CvMat *rodriques, CvMat *tra); - - /** \brief Calculate exterior orientation - */ - void CalcExteriorOrientation(std::vector& pw, std::vector& pi, - CvMat *rodriques, CvMat *tra); - - /** \brief Calculate exterior orientation - */ - void CalcExteriorOrientation(std::vector& pw, std::vector& pi, Pose *pose); - - /** \brief Update existing pose based on new observations. Use (CV_32FC3 and CV_32FC2) for matrices. */ - bool CalcExteriorOrientation(const CvMat* object_points, CvMat* image_points, Pose *pose); - - /** \brief Update existing pose (in rodriques&tra) based on new observations. Use (CV_32FC3 and CV_32FC2) for matrices. */ - bool CalcExteriorOrientation(const CvMat* object_points, CvMat* image_points, CvMat *rodriques, CvMat *tra); - - /** \brief Project one point */ - void ProjectPoint(const cv::Point3f pw, const Pose *pose, cv::Point2f &pi) const; - - // /** \brief Project one point */ - // void ProjectPoint(const cv::Point3f pw, const Pose *pose, cv::Point2f &pi) const; - - /** \brief Project points */ - void ProjectPoints(std::vector& pw, Pose *pose, std::vector& pi) const; - - /** \brief Project points - */ - void ProjectPoints(const CvMat* object_points, const CvMat* rotation_vector, - const CvMat* translation_vector, CvMat* image_points) const; - - /** \brief Project points - */ - void ProjectPoints(const CvMat* object_points, double gl[16], CvMat* image_points) const; - - /** \brief Project points */ - void ProjectPoints(const CvMat* object_points, const Pose* pose, CvMat* image_points) const; + * The sign change is due to the fact that with OpenCV and OpenGL projection + * The sign change is due to the fact that with OpenCV and OpenGL projection + * The sign change is due to the fact that with OpenCV and OpenGL projection + * matrices both y and z should be mirrored. With other matrix elements + * the sign changes eliminate each other, but with principal point + * in x-direction we need to make the change by hand. + */ + void GetOpenglProjectionMatrix(double proj_matrix[16], const int width, + const int height, + const float far_clip = 1000.0f, + const float near_clip = 0.1f); + + /** \brief Invert operation for \e GetOpenglProjectionMatrix */ + void SetOpenglProjectionMatrix(double proj_matrix[16], const int width, + const int height); + + /** \brief Unapplys the lens distortion for points on image plane. */ + void Undistort(std::vector& points); + + /** \brief Unapplys the lens distortion for one point on an image plane. */ + void Undistort(PointDouble& point); + + /** \brief Unapplys the lens distortion for one point on an image plane. */ + void Undistort(cv::Point2f& point); + + /** \brief Applys the lens distortion for one point on an image plane. */ + void Distort(cv::Point2f& point); + + /** \brief Applys the lens distortion for points on image plane. */ + void Distort(std::vector& points); + + /** \brief Applys the lens distortion for points on image plane. */ + void Distort(PointDouble& point); + + /** \brief Calculate exterior orientation + */ + void CalcExteriorOrientation(const std::vector& pw, + const std::vector& pi, + cv::Mat& rodriques, cv::Mat& tra) const; + + /** \brief Calculate exterior orientation + */ + void CalcExteriorOrientation(const std::vector& pw, + const std::vector& pi, + cv::Mat& rodriques, cv::Mat& tra) const; + + /** \brief Calculate exterior orientation + */ + void CalcExteriorOrientation(const std::vector& pw, + const std::vector& pi, + Pose* pose) const; + + /** \brief Project one point */ + void ProjectPoint(const cv::Point3d& pw, const Pose* pose, + cv::Point2d& pi) const; + + /** \brief Project one point */ + void ProjectPoint(const cv::Point3f& pw, const Pose* pose, + cv::Point2f& pi) const; + + /** \brief Project points */ + void ProjectPoints(std::vector& pw, Pose* pose, + std::vector& pi) const; + + /** \brief Project points + */ + void ProjectPoints(const cv::Mat& object_points, + const cv::Mat& rotation_vector, + const cv::Mat& translation_vector, + cv::Mat& image_points) const; + + /** \brief Project points + */ + void ProjectPoints(const cv::Mat& object_points, double gl[16], + cv::Mat& image_points) const; + + /** \brief Project points */ + void ProjectPoints(const cv::Mat& object_points, const Pose* pose, + cv::Mat& image_points) const; }; /** - * \brief Simple \e Homography class for finding and projecting points between two planes. + * \brief Simple \e Homography class for finding and projecting points between + * two planes. */ -struct ALVAR_EXPORT Homography { - double H_data[3][3]; - CvMat H; - - /** \brief Constructor */ - Homography(); - - /** \brief Find Homography for two point-sets */ - void Find(const std::vector& pw, const std::vector& pi); - - /** \brief Project points using the Homography */ - void ProjectPoints(const std::vector& from, std::vector& to); +struct ALVAR_EXPORT Homography +{ + cv::Mat H; + + /** \brief Constructor */ + Homography(); + + /** \brief Find Homography for two point-sets */ + void Find(const std::vector& pw, + const std::vector& pi); + + /** \brief Project points using the Homography */ + void ProjectPoints(const std::vector& from, + std::vector& to) const; }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/Capture.h b/ar_track_alvar/include/ar_track_alvar/Capture.h index b6f3f90..005a659 100644 --- a/ar_track_alvar/include/ar_track_alvar/Capture.h +++ b/ar_track_alvar/include/ar_track_alvar/Capture.h @@ -34,155 +34,179 @@ #include "CaptureDevice.h" #include "Util.h" -namespace alvar { - +namespace alvar +{ /** * \brief Capture interface that plugins must implement. * - * All plugins must implement the Capture interface. This is the class that implements - * all of the camera capture funtionality. This class is created by the CapturePlugin - * implementation. + * All plugins must implement the Capture interface. This is the class that + * implements all of the camera capture funtionality. This class is created by + * the CapturePlugin implementation. */ class ALVAR_EXPORT Capture { public: - /** - * \brief Constructor. - * - * \param captureDevice Information of which camera to create. - */ - Capture(const CaptureDevice captureDevice) - : mCaptureDevice(captureDevice) - , mXResolution(0) - , mYResolution(0) - , mIsCapturing(false) + /** + * \brief Constructor. + * + * \param captureDevice Information of which camera to create. + */ + Capture(const CaptureDevice captureDevice) + : mCaptureDevice(captureDevice) + , mXResolution(0) + , mYResolution(0) + , mIsCapturing(false) + { + } + + /** + * \brief Destructor + */ + virtual ~Capture() + { + } + + /** + * \brief The camera information associated to this capture object. + */ + CaptureDevice captureDevice() + { + return mCaptureDevice; + } + + /** + * \brief The resolution along the x axis (horizontal). + */ + unsigned long xResolution() + { + return mXResolution; + } + + /** + * \brief The resolution along the y axis (vertical). + */ + unsigned long yResolution() + { + return mYResolution; + } + + /** + * \brief Test if the camera was properly initialized. + */ + bool isCapturing() + { + return mIsCapturing; + } + + /** + * \brief Set the resolution. + * + * \param xResolution The resolution along the x axis (horizontal). + * \param yResolution The resolution along the y axis (vertical). + */ + virtual void setResolution(const unsigned long xResolution, + const unsigned long yResolution) + { + } + + /** + * \brief Starts the camera capture. + * + * \return True if the camera was properly initialized, false otherwise. + */ + virtual bool start() = 0; + + /** + * \brief Stops the camera capture. + */ + virtual void stop() = 0; + + /** + * \brief Capture one image from the camera. + * + * Do not modify this image. + * + * \return The captured image. + */ + virtual cv::Mat& captureImage() = 0; + + /** + * \brief Save camera settings to a file. + * + * \param filename The filename to write to. + * \return True if the settings were sucessfully saved, false otherwise. + */ + virtual bool saveSettings(std::string filename) + { + if (!isCapturing()) { + return false; } - /** - * \brief Destructor - */ - virtual ~Capture() {} - - /** - * \brief The camera information associated to this capture object. - */ - CaptureDevice captureDevice() {return mCaptureDevice;} - - /** - * \brief The resolution along the x axis (horizontal). - */ - unsigned long xResolution() {return mXResolution;} - - /** - * \brief The resolution along the y axis (vertical). - */ - unsigned long yResolution() {return mYResolution;} - - /** - * \brief Test if the camera was properly initialized. - */ - bool isCapturing() {return mIsCapturing;} - - /** - * \brief Set the resolution. - * - * \param xResolution The resolution along the x axis (horizontal). - * \param yResolution The resolution along the y axis (vertical). - */ - virtual void setResolution(const unsigned long xResolution, const unsigned long yResolution) + Serialization serialization(filename); + try + { + serialization << (*this); + } + catch (...) + { + return false; + } + return true; + } + + /** + * \brief Load camera settings from a file. + * + * \param filename The filename to read from. + * \return True if the settings were sucessfully loaded, false otherwise. + */ + virtual bool loadSettings(std::string filename) + { + if (!isCapturing()) { + return false; } - /** - * \brief Starts the camera capture. - * - * \return True if the camera was properly initialized, false otherwise. - */ - virtual bool start() = 0; - - /** - * \brief Stops the camera capture. - */ - virtual void stop() = 0; - - /** - * \brief Capture one image from the camera. - * - * Do not modify this image. - * - * \return The captured image. - */ - virtual IplImage *captureImage() = 0; - - /** - * \brief Save camera settings to a file. - * - * \param filename The filename to write to. - * \return True if the settings were sucessfully saved, false otherwise. - */ - virtual bool saveSettings(std::string filename) { - if (!isCapturing()) { - return false; - } - - Serialization serialization(filename); - try { - serialization << (*this); - } - catch (...) { - return false; - } - return true; - } - - /** - * \brief Load camera settings from a file. - * - * \param filename The filename to read from. - * \return True if the settings were sucessfully loaded, false otherwise. - */ - virtual bool loadSettings(std::string filename) { - if (!isCapturing()) { - return false; - } - - Serialization serialization(filename); - try { - serialization >> (*this); - } - catch (...) { - return false; - } - return true; - } - - /** - * \brief Show the settings dialog of the camera. - * \return True if the settings dialog was shown, false otherwise. - */ - virtual bool showSettingsDialog() = 0; - - /** - * \brief The identification of the class for serialization. - */ - virtual std::string SerializeId() = 0; - - /** - * \brief Performs serialization of the class members and configuration. - * - * \param serialization The Serialization object. - * \return True if the serialization of the class was successful, false otherwise. - */ - virtual bool Serialize(Serialization *serialization) = 0; + Serialization serialization(filename); + try + { + serialization >> (*this); + } + catch (...) + { + return false; + } + return true; + } + + /** + * \brief Show the settings dialog of the camera. + * \return True if the settings dialog was shown, false otherwise. + */ + virtual bool showSettingsDialog() = 0; + + /** + * \brief The identification of the class for serialization. + */ + virtual std::string SerializeId() = 0; + + /** + * \brief Performs serialization of the class members and configuration. + * + * \param serialization The Serialization object. + * \return True if the serialization of the class was successful, false + * otherwise. + */ + virtual bool Serialize(Serialization* serialization) = 0; protected: - CaptureDevice mCaptureDevice; - unsigned long mXResolution; - unsigned long mYResolution; - bool mIsCapturing; + CaptureDevice mCaptureDevice; + unsigned long mXResolution; + unsigned long mYResolution; + bool mIsCapturing; }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/CaptureDevice.h b/ar_track_alvar/include/ar_track_alvar/CaptureDevice.h index aae94ba..2beb21b 100644 --- a/ar_track_alvar/include/ar_track_alvar/CaptureDevice.h +++ b/ar_track_alvar/include/ar_track_alvar/CaptureDevice.h @@ -34,56 +34,58 @@ #include -namespace alvar { - +namespace alvar +{ /** * \brief CaptureDevice holder for camera information. * - * CaptureDevice contains the desired backend, the id and description of the camera. + * CaptureDevice contains the desired backend, the id and description of the + * camera. */ class ALVAR_EXPORT CaptureDevice { public: - /** - * \brief Constructor. - * - * \param captureType The type of capture backend. - * \param id The id of the camera. - * \param description A human readable description of the camera. - */ - CaptureDevice(const std::string captureType, const std::string id, const std::string description = ""); + /** + * \brief Constructor. + * + * \param captureType The type of capture backend. + * \param id The id of the camera. + * \param description A human readable description of the camera. + */ + CaptureDevice(const std::string captureType, const std::string id, + const std::string description = ""); - /** - * \brief Destructor. - */ - ~CaptureDevice(); + /** + * \brief Destructor. + */ + ~CaptureDevice(); - /** - * \brief The type of capture backend. - */ - std::string captureType() const; + /** + * \brief The type of capture backend. + */ + std::string captureType() const; - /** - * \brief The id of the camera. - */ - std::string id() const; + /** + * \brief The id of the camera. + */ + std::string id() const; - /** - * \brief The description of the camera. - */ - std::string description() const; + /** + * \brief The description of the camera. + */ + std::string description() const; - /** - * \brief A unique name consisting of the capture type and the id. - */ - std::string uniqueName() const; + /** + * \brief A unique name consisting of the capture type and the id. + */ + std::string uniqueName() const; private: - std::string mCaptureType; - std::string mId; - std::string mDescription; + std::string mCaptureType; + std::string mId; + std::string mDescription; }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/CaptureFactory.h b/ar_track_alvar/include/ar_track_alvar/CaptureFactory.h index 2e4833a..28e3c49 100644 --- a/ar_track_alvar/include/ar_track_alvar/CaptureFactory.h +++ b/ar_track_alvar/include/ar_track_alvar/CaptureFactory.h @@ -38,101 +38,115 @@ #include "Platform.h" #include "Util.h" -namespace alvar { - +namespace alvar +{ class CaptureFactoryPrivate; /** * \brief CaptureFactory for creating Capture classes. * * CaptureFactory is a singleton that creates Capture classes used to perform - * camera acquisition. Different backends are implemented as dynamicly loaded plugins - * so that platform dependancies can be handled at runtime and not compile time. + * camera acquisition. Different backends are implemented as dynamicly loaded + * plugins so that platform dependancies can be handled at runtime and not + * compile time. */ class ALVAR_EXPORT CaptureFactory { public: - /** - * \brief The singleton instance of CaptureFactory. - */ - static CaptureFactory *instance(); - - /** - * \brief Vector of strings. - */ - typedef std::vector CapturePluginVector; - - /** - * \brief Enumerate capture plugins currently available. - * - * This method should be used carfully since it will load all the available plugins. - * - * \return A vector of strings identifying all currently available capture plugins. - */ - CapturePluginVector enumeratePlugins(); - - /** - * \brief Vector of CaptureDevices. - */ - typedef std::vector CaptureDeviceVector; - - /** - * \brief Enumerate capture devices currently available. - * - * This method should be used carfully since it will load all the known plugins - * and call their respective enumeration methods. The vector of CaptureDevice - * objects returned should be cached. - * - * \param captureType Force the enumeration of only one type of plugin. - * \return A vector of CaptureDevice objects that are currently available. - */ - CaptureDeviceVector enumerateDevices(const std::string &captureType = ""); - - /** - * \brief Create Capture class. Transfers onwership to the caller. - * - * If the needed backend plugin is not loaded, an attempt is made to load it and - * an instance of it is kept such that it is available for subsequent calls. - * - * \param captureDevice CaptureDevice object specifying the plugin to use. - * \return A new Capture class for which the caller takes ownership. - */ - Capture *createCapture(const CaptureDevice captureDevice); + /** + * \brief The singleton instance of CaptureFactory. + */ + static CaptureFactory* instance(); + + /** + * \brief Vector of strings. + */ + typedef std::vector CapturePluginVector; + + /** + * \brief Enumerate capture plugins currently available. + * + * This method should be used carfully since it will load all the available + * plugins. + * + * \return A vector of strings identifying all currently available capture + * plugins. + */ + CapturePluginVector enumeratePlugins(); + + /** + * \brief Vector of CaptureDevices. + */ + typedef std::vector CaptureDeviceVector; + + /** + * \brief Enumerate capture devices currently available. + * + * This method should be used carfully since it will load all the known + * plugins and call their respective enumeration methods. The vector of + * CaptureDevice objects returned should be cached. + * + * \param captureType Force the enumeration of only one type of plugin. + * \return A vector of CaptureDevice objects that are currently available. + */ + CaptureDeviceVector enumerateDevices(const std::string& captureType = ""); + + /** + * \brief Create Capture class. Transfers onwership to the caller. + * + * If the needed backend plugin is not loaded, an attempt is made to load it + * and an instance of it is kept such that it is available for subsequent + * calls. + * + * \param captureDevice CaptureDevice object specifying the plugin to use. + * \return A new Capture class for which the caller takes ownership. + */ + Capture* createCapture(const CaptureDevice captureDevice); protected: - /** - * \brief Destructor. - */ - ~CaptureFactory(); + /** + * \brief Destructor. + */ + ~CaptureFactory(); private: - /** - * \brief CaptureFactoryDestroyer for deleting the CaptureFactory singleton. - */ - class CaptureFactoryDestroyer + /** + * \brief CaptureFactoryDestroyer for deleting the CaptureFactory singleton. + */ + class CaptureFactoryDestroyer + { + public: + CaptureFactoryDestroyer(CaptureFactory* instance = NULL) + : mInstance(instance) + { + } + ~CaptureFactoryDestroyer() + { + delete mInstance; + } + void set(CaptureFactory* instance) { - public: - CaptureFactoryDestroyer(CaptureFactory *instance = NULL) : mInstance(instance) {} - ~CaptureFactoryDestroyer() {delete mInstance;} - void set(CaptureFactory *instance) {mInstance = instance;} - private: - CaptureFactory *mInstance; - }; - - // private constructors and assignment operator for singleton implementation - CaptureFactory(); - CaptureFactory(const CaptureFactory&); - CaptureFactory &operator=(const CaptureFactory&); - - // static members for singleton implementation - static CaptureFactory *mInstance; - static Mutex mMutex; - static CaptureFactoryDestroyer mDestroyer; - - // members - CaptureFactoryPrivate *d; + mInstance = instance; + } + + private: + CaptureFactory* mInstance; + }; + + // private constructors and assignment operator for singleton implementation + CaptureFactory(); + CaptureFactory(const CaptureFactory&); + CaptureFactory& operator=(const CaptureFactory&); + + // static members for singleton implementation + static CaptureFactory* mInstance; + static Mutex mMutex; + static CaptureFactoryDestroyer mDestroyer; + + // members + CaptureFactoryPrivate* d; }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/CaptureFactory_private.h b/ar_track_alvar/include/ar_track_alvar/CaptureFactory_private.h index f3506d1..41f1470 100644 --- a/ar_track_alvar/include/ar_track_alvar/CaptureFactory_private.h +++ b/ar_track_alvar/include/ar_track_alvar/CaptureFactory_private.h @@ -30,38 +30,38 @@ #include "Plugin.h" -namespace alvar { - +namespace alvar +{ class CapturePlugin; class CaptureFactoryPrivate { public: - CaptureFactoryPrivate(); - ~CaptureFactoryPrivate(); + CaptureFactoryPrivate(); + ~CaptureFactoryPrivate(); - void setupPluginPaths(); - void parseEnvironmentVariable(const std::string &variable); - std::string pluginPrefix(); - std::string pluginExtension(); + void setupPluginPaths(); + void parseEnvironmentVariable(const std::string& variable); + std::string pluginPrefix(); + std::string pluginExtension(); - void loadPlugins(); - void loadPlugin(const std::string &captureType); - void loadPlugin(const std::string &captureType, const std::string &filename); - CapturePlugin *getPlugin(const std::string &captureType); + void loadPlugins(); + void loadPlugin(const std::string& captureType); + void loadPlugin(const std::string& captureType, const std::string& filename); + CapturePlugin* getPlugin(const std::string& captureType); - typedef std::vector PluginPathsVector; - PluginPathsVector mPluginPaths; - std::string mPluginPrefix; - std::string mPluginPostfix; + typedef std::vector PluginPathsVector; + PluginPathsVector mPluginPaths; + std::string mPluginPrefix; + std::string mPluginPostfix; - bool mLoadedAllPlugins; - typedef std::map PluginMap; - PluginMap mPluginMap; - typedef std::map CapturePluginMap; - CapturePluginMap mCapturePluginMap; + bool mLoadedAllPlugins; + typedef std::map PluginMap; + PluginMap mPluginMap; + typedef std::map CapturePluginMap; + CapturePluginMap mCapturePluginMap; }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/CapturePlugin.h b/ar_track_alvar/include/ar_track_alvar/CapturePlugin.h index 3ba0ca3..b8ed826 100644 --- a/ar_track_alvar/include/ar_track_alvar/CapturePlugin.h +++ b/ar_track_alvar/include/ar_track_alvar/CapturePlugin.h @@ -33,56 +33,56 @@ #include "Alvar.h" #include "CaptureDevice.h" -namespace alvar { - +namespace alvar +{ /** * \brief CapturePlugin interface that plugins must implement. * - * All plugins must implement the CapturePlugin interface. When the plugin is loaded, - * the CapturePlugin implementation will register itself with the CaptureFactory. + * All plugins must implement the CapturePlugin interface. When the plugin is + * loaded, the CapturePlugin implementation will register itself with the + * CaptureFactory. */ class ALVAR_EXPORT CapturePlugin { public: - /** - * \brief Constructor. - * - * \param captureType A unique identifier for the capture plugin. - */ - CapturePlugin(const std::string &captureType) - : mCaptureType(captureType) - { - } + /** + * \brief Constructor. + * + * \param captureType A unique identifier for the capture plugin. + */ + CapturePlugin(const std::string& captureType) : mCaptureType(captureType) + { + } - /** - * \brief Destructor. - */ - virtual ~CapturePlugin() {}; + /** + * \brief Destructor. + */ + virtual ~CapturePlugin(){}; - /** - * \brief Vector of CaptureDevices. - */ - typedef std::vector CaptureDeviceVector; + /** + * \brief Vector of CaptureDevices. + */ + typedef std::vector CaptureDeviceVector; - /** - * \brief Enumerate capture devices currently available. - * - * \return A vector of CaptureDevice objects that are currently available. - */ - virtual CaptureDeviceVector enumerateDevices() = 0; + /** + * \brief Enumerate capture devices currently available. + * + * \return A vector of CaptureDevice objects that are currently available. + */ + virtual CaptureDeviceVector enumerateDevices() = 0; - /** - * \brief Create Capture class. Transfers onwership to the caller. - * - * \param captureDevice Information of which camera to create. - * \return A new Capture class for which the caller takes ownership. - */ - virtual Capture *createCapture(const CaptureDevice captureDevice) = 0; + /** + * \brief Create Capture class. Transfers onwership to the caller. + * + * \param captureDevice Information of which camera to create. + * \return A new Capture class for which the caller takes ownership. + */ + virtual Capture* createCapture(const CaptureDevice captureDevice) = 0; protected: - std::string mCaptureType; + std::string mCaptureType; }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/ConnectedComponents.h b/ar_track_alvar/include/ar_track_alvar/ConnectedComponents.h index 7063eb6..b60d719 100644 --- a/ar_track_alvar/include/ar_track_alvar/ConnectedComponents.h +++ b/ar_track_alvar/include/ar_track_alvar/ConnectedComponents.h @@ -135,4 +135,4 @@ class ALVAR_EXPORT LabelingCvSeq : public Labeling } // namespace alvar -#endif \ No newline at end of file +#endif diff --git a/ar_track_alvar/include/ar_track_alvar/Container3d.h b/ar_track_alvar/include/ar_track_alvar/Container3d.h index 2a24270..72a70d1 100644 --- a/ar_track_alvar/include/ar_track_alvar/Container3d.h +++ b/ar_track_alvar/include/ar_track_alvar/Container3d.h @@ -35,80 +35,107 @@ #include #include -namespace alvar { - -template class Container3d; +namespace alvar +{ +template +class Container3d; -/** \brief Functor class for \e Container3d \e Sort() to sort the search base using distance to specified origin. */ +/** \brief Functor class for \e Container3d \e Sort() to sort the search base + * using distance to specified origin. */ template -class Container3dSortDist { +class Container3dSortDist +{ protected: - CvPoint3D32f orig; - Container3d &container; + CvPoint3D32f orig; + Container3d& container; + public: - Container3dSortDist(Container3d &_container, const CvPoint3D32f _orig) : container(_container), orig(_orig) {} - bool operator()(size_t i1, size_t i2) - { - float x1 = container[i1].first.x-orig.x, x2 = container[i2].first.x-orig.x; - float y1 = container[i1].first.y-orig.y, y2 = container[i2].first.y-orig.y; - float z1 = container[i1].first.z-orig.z, z2 = container[i2].first.z-orig.z; - float d1 = x1*x1 + y1*y1 + z1*z1; - float d2 = x2*x2 + y2*y2 + z2*z2; - return d1& _container, const CvPoint3D32f _orig) + : container(_container), orig(_orig) + { + } + bool operator()(size_t i1, size_t i2) + { + float x1 = container[i1].first.x - orig.x, + x2 = container[i2].first.x - orig.x; + float y1 = container[i1].first.y - orig.y, + y2 = container[i2].first.y - orig.y; + float z1 = container[i1].first.z - orig.z, + z2 = container[i2].first.z - orig.z; + float d1 = x1 * x1 + y1 * y1 + z1 * z1; + float d2 = x2 * x2 + y2 * y2 + z2 * z2; + return d1 < d2; + } }; -/** \brief Functor class for \e Container3d \e Sort() to sort the search base using content size. */ +/** \brief Functor class for \e Container3d \e Sort() to sort the search base + * using content size. */ template -class Container3dSortSize { +class Container3dSortSize +{ protected: - Container3d &container; + Container3d& container; + public: - Container3dSortSize(Container3d &_container) : container(_container) {} - bool operator()(size_t i1, size_t i2) - { - return (container[i1].second->size() > container[i2].second->size()); - } + Container3dSortSize(Container3d& _container) : container(_container) + { + } + bool operator()(size_t i1, size_t i2) + { + return (container[i1].second->size() > container[i2].second->size()); + } }; -/** \brief Functor class for \e Container3d \e Limit() to limit the search space with distance */ +/** \brief Functor class for \e Container3d \e Limit() to limit the search space + * with distance */ template -class Container3dLimitDist { +class Container3dLimitDist +{ protected: - Container3d &container; - CvPoint3D32f orig; - float dist_limit; + Container3d& container; + CvPoint3D32f orig; + float dist_limit; + public: - Container3dLimitDist(Container3d &_container, const CvPoint3D32f _orig, float _dist_limit) - : container(_container),orig(_orig),dist_limit(_dist_limit) {} - bool operator()(size_t i) const { - float x = container[i].first.x-orig.x; - float y = container[i].first.y-orig.y; - float z = container[i].first.z-orig.z; - float d = x*x + y*y + z*z; - if (d <= dist_limit*dist_limit) return true; - return false; - } + Container3dLimitDist(Container3d& _container, const CvPoint3D32f _orig, + float _dist_limit) + : container(_container), orig(_orig), dist_limit(_dist_limit) + { + } + bool operator()(size_t i) const + { + float x = container[i].first.x - orig.x; + float y = container[i].first.y - orig.y; + float z = container[i].first.z - orig.z; + float d = x * x + y * y + z * z; + if (d <= dist_limit * dist_limit) + return true; + return false; + } }; /** - * \brief Generic container to store any information in 3D (features, photos, ...) + * \brief Generic container to store any information in 3D (features, photos, + * ...) * - * You can store any information in 3D using this container. Each element in the container has - * an unique id that it can be referenced with using \e operator[](). The indices are from 0 - * to \e size(). You can find specific index using \e GetIndex(Iterator &iter) or \e GetIndex(T *p) . + * You can store any information in 3D using this container. Each element in the + * container has an unique id that it can be referenced with using \e + * operator[](). The indices are from 0 to \e size(). You can find specific + * index using \e GetIndex(Iterator &iter) or \e GetIndex(T *p) . * - * In addition the Container3d contains also a 'search space' that can be iterated through - * using \e begin() and \e end(). This 'search space' can be limited using \e Limit() , - * sorted using \e Sort() and reseted using \e ResetSearchSpace(). You specify what to limit/sort - * using specified functors. In ALVAR there exists functors \e Container3dLimitDist , - * \e Container3dSortSize and \e Container3dSortDist . But you can quite well make your own - * versions (see example below). - * - * The implementation is optimized for a situation where there are a lot of searches between every - * time the search space is limited/ordered. This is the reason we use vector containers - * internally. The idea is that later we will improve the class by providing more ways for limiting - * and sorting the search space; for example Frustum culling. + * In addition the Container3d contains also a 'search space' that can be + * iterated through using \e begin() and \e end(). This 'search space' can be + * limited using \e Limit() , sorted using \e Sort() and reseted using \e + * ResetSearchSpace(). You specify what to limit/sort using specified functors. + * In ALVAR there exists functors \e Container3dLimitDist , \e + * Container3dSortSize and \e Container3dSortDist . But you can quite well make + * your own versions (see example below). + * + * The implementation is optimized for a situation where there are a lot of + * searches between every time the search space is limited/ordered. This is the + * reason we use vector containers internally. The idea is that later we will + * improve the class by providing more ways for limiting and sorting the search + * space; for example Frustum culling. * * Usage: * @@ -130,14 +157,14 @@ class Container3dLimitDist { * int x_min, x_max; * Container3d &container; * public: - * Container3dLimitX(Container3d &_container, int _x_min, int _x_max) + * Container3dLimitX(Container3d &_container, int _x_min, int _x_max) * : container(_container),x_min(_x_min),x_max(_x_max) {} * bool operator()(size_t i1) const { - * if ((container[i1].first.x >= x_min) && (container[i1].first.x <= x_max)) return true; - * return false; + * if ((container[i1].first.x >= x_min) && (container[i1].first.x <= x_max)) + * return true; return false; * } * }; - * + * * ... * Container3d c3d; * c3d.Add(CvPoint3D32f(0,0,0), 0); @@ -165,120 +192,175 @@ class Container3dLimitDist { template class Container3d { - public: - /** \brief \e node_type for storing data. 3D-position is paired with the data content. */ - typedef std::pair node_type; - protected: - /** \brief the actual data in using node_type: pair */ - std::vector data; - /** \brief Possibly limited set of indices for \e data in somehow "optimal" search order */ - std::vector search_space; +public: + /** \brief \e node_type for storing data. 3D-position is paired with the data + * content. */ + typedef std::pair node_type; + +protected: + /** \brief the actual data in using node_type: pair */ + std::vector data; + /** \brief Possibly limited set of indices for \e data in somehow "optimal" + * search order */ + std::vector search_space; + +public: + /** \brief Add \e _data in the container and associate it with 3D position \e + * _pos */ + void Add(const CvPoint3D32f& _pos, const T& _data) + { + data.push_back(node_type(_pos, _data)); + search_space.push_back(data.size() - 1); + } + /** \brief Clear the container */ + void Clear() + { + data.clear(); + search_space.clear(); + } + /** \brief Reset the search space to contain whole data */ + void ResetSearchSpace() + { + search_space.resize(data.size()); + for (size_t i = 0; i < search_space.size(); i++) + { + search_space[i] = i; + } + } + /** \brief Erase item in the container */ + void Erase(size_t index) + { + typename std::vector::iterator iter_d; + iter_d = data.begin(); + for (size_t i = 0; i < index; i++) + iter_d++; + data.erase(iter_d); + ResetSearchSpace(); + } + /** \brief Sort using external Compare method */ + template + int Sort(Compare comp) + { + stable_sort(search_space.begin(), search_space.end(), comp); + return search_space.size(); + } - public: - /** \brief Add \e _data in the container and associate it with 3D position \e _pos */ - void Add(const CvPoint3D32f& _pos, const T& _data){ - data.push_back(node_type(_pos, _data)); - search_space.push_back(data.size()-1); - } - /** \brief Clear the container */ - void Clear() { - data.clear(); - search_space.clear(); - } - /** \brief Reset the search space to contain whole data */ - void ResetSearchSpace() { - search_space.resize(data.size()); - for (size_t i=0; i::iterator iter_d; - iter_d = data.begin(); - for (size_t i=0; i - int Sort(Compare comp) { - stable_sort(search_space.begin(), search_space.end(), comp); - return search_space.size(); - } + /** \brief Limit the search space with external limitation */ + template + int Limit(Test test) + { + std::vector::iterator iter; + for (iter = search_space.begin(); iter != search_space.end();) + { + if (!test(*iter)) + { + iter = search_space.erase(iter); + } + else + { + iter++; + } + } + return search_space.size(); + } - /** \brief Limit the search space with external limitation */ - template - int Limit(Test test) { - std::vector::iterator iter; - for (iter = search_space.begin(); iter != search_space.end();) { - if (!test(*iter)) { - iter = search_space.erase(iter); - } else { - iter++; - } - } - return search_space.size(); - } + /** \brief Iterator for going through the items in \e Container3d in the + * specified order + * + * The idea is that the content in \e Container3d can be sorted and limited in + * different ways. After sorting/limiting the content the \e iterator (\e + * Begin() and \e End() ) can be used for accessing the data items in optimal + * order. + */ + class Iterator : public std::iterator + { + protected: + Container3d* container; + std::vector::iterator iter; - /** \brief Iterator for going through the items in \e Container3d in the specified order - * - * The idea is that the content in \e Container3d can be sorted and limited in different - * ways. After sorting/limiting the content the \e iterator (\e Begin() and \e End() ) can - * be used for accessing the data items in optimal order. - */ - class Iterator : public std::iterator - { - protected: - Container3d *container; - std::vector::iterator iter; - public: - Iterator() {} - Iterator(Container3d *_container, std::vector::iterator _iter) : container(_container), iter(_iter) {} - node_type &operator*() const { return container->data[*iter]; } - node_type *operator->() const { return &(operator*()); } - virtual Iterator& operator++() { ++iter; return *this; } - bool operator==(const Iterator& _m) const { return iter == _m.iter; } - bool operator!=(const Iterator& _m) const { return iter != _m.iter; } - size_t GetIndex() { return *iter; } - }; + public: + Iterator() + { + } + Iterator(Container3d* _container, std::vector::iterator _iter) + : container(_container), iter(_iter) + { + } + node_type& operator*() const + { + return container->data[*iter]; + } + node_type* operator->() const + { + return &(operator*()); + } + virtual Iterator& operator++() + { + ++iter; + return *this; + } + bool operator==(const Iterator& _m) const + { + return iter == _m.iter; + } + bool operator!=(const Iterator& _m) const + { + return iter != _m.iter; + } + size_t GetIndex() + { + return *iter; + } + }; - /** \brief Provides an iterator pointing to the beginning of the limited/sorted 3D content */ - Iterator begin() { - return Iterator(this, search_space.begin()); - } + /** \brief Provides an iterator pointing to the beginning of the + * limited/sorted 3D content */ + Iterator begin() + { + return Iterator(this, search_space.begin()); + } - /** \brief Provides an iterator pointing to the end of the limited/sorted 3D content */ - Iterator end() { - return Iterator(this, search_space.end()); - } + /** \brief Provides an iterator pointing to the end of the limited/sorted 3D + * content */ + Iterator end() + { + return Iterator(this, search_space.end()); + } - /** \brief Get number of items that can be referenced using \e operator[]() */ - size_t size() const { return data.size(); } + /** \brief Get number of items that can be referenced using \e operator[]() */ + size_t size() const + { + return data.size(); + } - /** \brief Get absolute reference usable with \e operator[]() based on the iterator */ - size_t GetIndex(Iterator &iter) { - return iter.GetIndex(); - } + /** \brief Get absolute reference usable with \e operator[]() based on the + * iterator */ + size_t GetIndex(Iterator& iter) + { + return iter.GetIndex(); + } - /** \brief Instead of \e Iterator we can use also absolute references for data with \e operator[]() */ - node_type &operator[](size_t index) { - return data[index]; - } + /** \brief Instead of \e Iterator we can use also absolute references for data + * with \e operator[]() */ + node_type& operator[](size_t index) + { + return data[index]; + } - /** \brief Get absolute reference usable with \e operator[]() based on the content */ - size_t GetIndex(T *p) { - size_t i=0; - for (; i #include #include -#include +#include #include -#include -#include #include "CaptureFactory.h" using namespace alvar; @@ -19,7 +18,7 @@ using namespace alvar; * Usage: * \code * #include "CvTestbed.h" - * + * * void videocallback(IplImage *image) * { * static IplImage *img_gray=NULL; @@ -27,22 +26,23 @@ using namespace alvar; * assert(image); * if (img_gray == NULL) { * // Following image is toggled visible using key '0' - * img_gray = CvTestbed::Instance().CreateImageWithProto("img_gray", image, 0, 1); + * img_gray = CvTestbed::Instance().CreateImageWithProto("img_gray", + *image, 0, 1); * } * cvCvtColor(image, img_gray, CV_RGB2GRAY); * // TODO: Do your image operations * } * * void main() { - * CvTestbed::Instance().SetVideoCallback(videocallback); // Set video callback - * CvTestbed::Instance().StartVideo("movie.avi"); // Video from avi + * CvTestbed::Instance().SetVideoCallback(videocallback); // Set video + *callback CvTestbed::Instance().StartVideo("movie.avi"); // Video from avi * // CvTestbed::Instance().StartVideo(0) // Video from camera 0 * } * \endcode * * In addition to handling video input from avi or from camera * \e CvTestbed has also functions for creating and showing - * (and automatically releasing) IplImage's. + * (and automatically releasing) IplImage's. * * The \e CvTestbed is made into a simple Meyers singleton: * \code @@ -60,115 +60,134 @@ using namespace alvar; * } * \endcode * - * The only instance of the class is accessed using public + * The only instance of the class is accessed using public * Instance()-interface. All possible constructors and destructors - * are hidden. If more complex singleton approach is needed + * are hidden. If more complex singleton approach is needed * refer to Loki library or the book "Modern C++ Design". */ -class CvTestbed { +class CvTestbed +{ protected: - Capture *cap; - /** \brief Hidden constructor for Singleton */ - CvTestbed(); - /** \brief Hidden copy constructor for Singleton */ - CvTestbed(const CvTestbed&); - /** \brief Hidden copy operator for Singleton */ - CvTestbed& operator=(const CvTestbed&); - /** \brief Hidden destructor for Singleton */ - ~CvTestbed(); - /** \brief Boolean indicating are we still running. We exit from the \e WaitKeys when this is false. */ - bool running; + Capture* cap; + /** \brief Hidden constructor for Singleton */ + CvTestbed(); + /** \brief Hidden copy constructor for Singleton */ + CvTestbed(const CvTestbed&); + /** \brief Hidden copy operator for Singleton */ + CvTestbed& operator=(const CvTestbed&); + /** \brief Hidden destructor for Singleton */ + ~CvTestbed(); + /** \brief Boolean indicating are we still running. We exit from the \e + * WaitKeys when this is false. */ + bool running; + + /** \brief Pointer for the user-defined videocallback. */ + void (*videocallback)(cv::Mat& image); + /** \brief Pointer for the user-defined KEYcallback. */ + int (*keycallback)(int key); + /** \brief The window title for the video view. */ + std::string wintitle; + /** \brief The filename if we are reading an AVI file. */ + std::string filename; + /** \brief Image structure to store the images internally */ + struct Image + { + cv::Mat ipl; + std::string title; + bool visible; + bool release_at_exit; + Image(cv::Mat _ipl, std::string _title, bool _visible, + bool _release_at_exit) + : ipl(std::move(_ipl)) + , title(_title) + , visible(_visible) + , release_at_exit(_release_at_exit) + { + } + }; + /** \brief Vector of images stored internally */ + std::vector images; - /** \brief Pointer for the user-defined videocallback. */ - void (*videocallback)(cv::Mat *image); - /** \brief Pointer for the user-defined KEYcallback. */ - int (*keycallback)(int key); - /** \brief The window title for the video view. */ - std::string wintitle; - /** \brief The filename if we are reading an AVI file. */ - std::string filename; - /** \brief Image structure to store the images internally */ - struct Image { - cv::Mat *ipl; - std::string title; - bool visible; - bool release_at_exit; - Image(cv::Mat *_ipl, std::string _title, bool _visible, bool _release_at_exit) - :ipl(_ipl),title(_title),visible(_visible),release_at_exit(_release_at_exit) {} - }; - /** \brief Vector of images stored internally */ - std::vector images; + /** \brief Video callback called for every frame. This calls user-defined + * videocallback if one exists. */ + static void default_videocallback(cv::Mat& image); + /** \brief \e WaitKeys contains the main loop. */ + void WaitKeys(); + /** \brief \e ShowVisibleImages is called from the videocallback. This shows + * the internally stored images which have the visible-flag on */ + void ShowVisibleImages(); - /** \brief Video callback called for every frame. This calls user-defined videocallback if one exists. */ - static void default_videocallback(cv::Mat*image); - /** \brief \e WaitKeys contains the main loop. */ - void WaitKeys(); - /** \brief \e ShowVisibleImages is called from the videocallback. This shows the internally stored images which have the visible-flag on */ - void ShowVisibleImages(); public: - //CameraDescription camera_description; - /** - * \brief The one and only instance of CvTestbed is accessed using - * \e CvTestbed::Instance() - */ - static CvTestbed& Instance(); - /** - * \brief Set the videocallback function that will be called for - * every frame. - */ - void SetVideoCallback(void (*_videocallback)(cv::Mat *image)); - /** - * \brief Sets the keyboard callback function that will be called - * when keyboard is pressed. - * - * The callback should return 0 if it doesn't want the default keyboard - * actions to be made. By default keys '0'-'9' executes - * \e ToggleImageVisible for first 10 IplImage's, while any other key - * will Exit the program. - */ - void SetKeyCallback(int (*_keycallback)(int key)); - /** - * \brief Start video input from given capture device - * \param cap The capture device. If NULL a default capture device is created. - */ - bool StartVideo(Capture *_cap, const char *_wintitle=0/*"Capture"*/); - /** - * \brief Stop video - */ - void StopVideo() { running = false; } - /** - * \brief Sets an existing IplImage to be stored with the given title - * \param title Title for the image - * \param ipl The IplImage to be stored - * \param release_at_exit Boolean indicating should \e CvTestbed automatically release the image at exit - */ - size_t SetImage(const char *title, cv::Mat *ipl, bool release_at_exit=false); - /** - * \brief Creates an image with given size, depth and channels and stores - * it with a given 'title' (see \e CvTestbed::SetImage) - */ - cv::Mat *CreateImage(const char *title, cv::Size size, int type); - /** - * \brief Creates an image based on the given prototype and stores - * it with a given 'title' (see \e CvTestbed::SetImage) - */ - cv::Mat *CreateImageWithProto(const char *title, cv::Mat *proto, int depth=0, int channels=0); - /** - * \brief Get a pointer for the stored image based on index number - */ - cv::Mat *GetImage(size_t index); - /** - * \brief Get an index number of the stored image based on title - */ - size_t GetImageIndex(const char *title); - /** - * \brief Get a pointer for the stored image based on title - */ - cv::Mat *GetImage(const char *title); - /** - * \brief Toggle the visibility of the stored image - */ - bool ToggleImageVisible(size_t index, int flags=1); + // CameraDescription camera_description; + /** + * \brief The one and only instance of CvTestbed is accessed using + * \e CvTestbed::Instance() + */ + static CvTestbed& Instance(); + /** + * \brief Set the videocallback function that will be called for + * every frame. + */ + void SetVideoCallback(void (*_videocallback)(cv::Mat& image)); + /** + * \brief Sets the keyboard callback function that will be called + * when keyboard is pressed. + * + * The callback should return 0 if it doesn't want the default keyboard + * actions to be made. By default keys '0'-'9' executes + * \e ToggleImageVisible for first 10 IplImage's, while any other key + * will Exit the program. + */ + void SetKeyCallback(int (*_keycallback)(int key)); + /** + * \brief Start video input from given capture device + * \param cap The capture device. If NULL a default capture device is created. + */ + bool StartVideo(Capture* _cap, const char* _wintitle = 0 /*"Capture"*/); + /** + * \brief Stop video + */ + void StopVideo() + { + running = false; + } + /** + * \brief Sets an existing IplImage to be stored with the given title + * \param title Title for the image + * \param ipl The IplImage to be stored + * \param release_at_exit Boolean indicating should \e CvTestbed automatically + * release the image at exit + */ + size_t SetImage(const char* title, const cv::Mat& ipl, + bool release_at_exit = false); + /** + * \brief Creates an image with given size, depth and channels and stores + * it with a given 'title' (see \e CvTestbed::SetImage) + */ + cv::Mat CreateImage(const char* title, cv::Size size, int depth, + int channels); + /** + * \brief Creates an image based on the given prototype and stores + * it with a given 'title' (see \e CvTestbed::SetImage) + */ + cv::Mat CreateImageWithProto(const char* title, cv::Mat& proto, int depth = 0, + int channels = 0); + /** + * \brief Get a pointer for the stored image based on index number + */ + cv::Mat GetImage(size_t index); + /** + * \brief Get an index number of the stored image based on title + */ + size_t GetImageIndex(const char* title); + /** + * \brief Get a pointer for the stored image based on title + */ + cv::Mat GetImage(const char* title); + /** + * \brief Toggle the visibility of the stored image + */ + bool ToggleImageVisible(size_t index, int flags = 1); }; #endif diff --git a/ar_track_alvar/include/ar_track_alvar/DirectoryIterator.h b/ar_track_alvar/include/ar_track_alvar/DirectoryIterator.h old mode 100755 new mode 100644 index a56086c..4520d6e --- a/ar_track_alvar/include/ar_track_alvar/DirectoryIterator.h +++ b/ar_track_alvar/include/ar_track_alvar/DirectoryIterator.h @@ -34,57 +34,58 @@ #include -namespace alvar { - +namespace alvar +{ class DirectoryIteratorPrivate; /** * \brief DirectoryIterator for iterating over files and directories. * - * DirectoryIterator class for iterating over files and directories on the filesystem. + * DirectoryIterator class for iterating over files and directories on the + * filesystem. */ class ALVAR_EXPORT DirectoryIterator { public: - /** - * \brief Constructor. - * - * \param path The path on the filesystem to iterate. - */ - DirectoryIterator(const std::string &path); + /** + * \brief Constructor. + * + * \param path The path on the filesystem to iterate. + */ + DirectoryIterator(const std::string& path); - /** - * \brief Destructor. - */ - ~DirectoryIterator(); + /** + * \brief Destructor. + */ + ~DirectoryIterator(); - /** - * \brief Verifies if another entry exist in the directory. - */ - bool hasNext(); + /** + * \brief Verifies if another entry exist in the directory. + */ + bool hasNext(); - /** - * \brief Advances the iterator and returns the name of the next entry. - */ - std::string next(); + /** + * \brief Advances the iterator and returns the name of the next entry. + */ + std::string next(); - /** - * \brief Returns the name of the current entry. - */ - std::string currentEntry(); + /** + * \brief Returns the name of the current entry. + */ + std::string currentEntry(); - /** - * \brief Returns the path of the current entry. - * - * This appends the name of the current entry to the path of the directory that - * is being iterated. - */ - std::string currentPath(); + /** + * \brief Returns the path of the current entry. + * + * This appends the name of the current entry to the path of the directory + * that is being iterated. + */ + std::string currentPath(); private: - DirectoryIteratorPrivate *d; + DirectoryIteratorPrivate* d; }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/DirectoryIterator_private.h b/ar_track_alvar/include/ar_track_alvar/DirectoryIterator_private.h old mode 100755 new mode 100644 index ea3dbf7..ec7009a --- a/ar_track_alvar/include/ar_track_alvar/DirectoryIterator_private.h +++ b/ar_track_alvar/include/ar_track_alvar/DirectoryIterator_private.h @@ -26,25 +26,25 @@ #include -namespace alvar { - +namespace alvar +{ class DirectoryIteratorPrivateData; class DirectoryIteratorPrivate { public: - DirectoryIteratorPrivate(const std::string &path); - ~DirectoryIteratorPrivate(); - bool hasNext(); - std::string next(); - void skip(); - - DirectoryIteratorPrivateData *d; - std::string mDirectory; - std::string mEntry; - bool mValid; + DirectoryIteratorPrivate(const std::string& path); + ~DirectoryIteratorPrivate(); + bool hasNext(); + std::string next(); + void skip(); + + DirectoryIteratorPrivateData* d; + std::string mDirectory; + std::string mEntry; + bool mValid; }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/Draw.h b/ar_track_alvar/include/ar_track_alvar/Draw.h index 1836915..2122ac8 100644 --- a/ar_track_alvar/include/ar_track_alvar/Draw.h +++ b/ar_track_alvar/include/ar_track_alvar/Draw.h @@ -197,4 +197,4 @@ void ALVAR_EXPORT DrawTexture(cv::Mat& image, cv::Mat& texture, Camera* cam, } // namespace alvar -#endif \ No newline at end of file +#endif diff --git a/ar_track_alvar/include/ar_track_alvar/EC.h b/ar_track_alvar/include/ar_track_alvar/EC.h index b2cb36c..3a6438b 100644 --- a/ar_track_alvar/include/ar_track_alvar/EC.h +++ b/ar_track_alvar/include/ar_track_alvar/EC.h @@ -36,30 +36,32 @@ #include "MarkerDetector.h" #include "MultiMarker.h" -namespace alvar { - +namespace alvar +{ /** \brief Basic structure to be usable with EC methods. * - * You can inherit your own classes/structures from this or make one with similar members. + * You can inherit your own classes/structures from this or make one with + * similar members. * - * The idea is that the EC-versions store the data externally in a map of features that - * are based on the structure \e ExternalContainer . The \e ExternalContainer has certain - * predefined fields that are used/needed by different methods. Below is a summary of - * some main methods for reading (r) and writing (w) in the external container. + * The idea is that the EC-versions store the data externally in a map of + * features that are based on the structure \e ExternalContainer . The \e + * ExternalContainer has certain predefined fields that are used/needed by + * different methods. Below is a summary of some main methods for reading (r) + * and writing (w) in the external container. * - * - * - * - * - * - * - * - * - * - * - * - * - * + *
type_idhas_p2dhas_p3dp2dp3dprojected_p2d
TrackerFeaturesEC::Track(r)w
+ * + * + * + * + * + * + * + * + * + * + * + * * * * @@ -67,8 +69,8 @@ namespace alvar { * * * - * - * + * + * * * * @@ -76,8 +78,8 @@ namespace alvar { * * * - * - * + * + * * * * @@ -85,8 +87,8 @@ namespace alvar { * * * - * - * + * + * * * * @@ -94,8 +96,8 @@ namespace alvar { * * * - * - * + * + * * * * @@ -103,8 +105,8 @@ namespace alvar { * * * - * - * + * + * * * * @@ -112,8 +114,8 @@ namespace alvar { * * * - * - * + * + * * * * @@ -121,8 +123,8 @@ namespace alvar { * * * - * - * + * + * * * * @@ -130,8 +132,8 @@ namespace alvar { * * * - * - * + * + * * * * @@ -139,670 +141,899 @@ namespace alvar { * * * - * - * + * + * * * * * * * - *
type_idhas_p2dhas_p3dp2dp3dprojected_p2d
TrackerFeaturesEC::Track(r)wrwrw
CameraEC::Undistort(r)CameraEC::Undistort(r)rwrw
CameraEC::Distort(r)CameraEC::Distort(r)rwrw
CameraEC::CalcExteriorOrientation(r)CameraEC::CalcExteriorOrientation(r)rrr
CameraEC::UpdatePose(r)CameraEC::UpdatePose(r)rrr
CameraEC::Reproject(r)CameraEC::Reproject(r)(r)(r)rw
CameraEC::EraseUsingReprojectionError(r)CameraEC::EraseUsingReprojectionError(r)(r)(r)r
MarkerDetectorEC::DetectrwMarkerDetectorEC::Detectrwww
MarkerDetectorEC::MarkerToECwMarkerDetectorEC::MarkerToECww
MultiMarkerEC::MarkersToECwMultiMarkerEC::MarkersToECwww
+ * */ -class ExternalContainer { +class ExternalContainer +{ public: - int type_id; - bool has_p2d; - bool has_p3d; - CvPoint2D32f p2d; - CvPoint3D32f p3d; - CvPoint2D32f projected_p2d; // This is only temporary -- user needs to take care that it has valid content - ExternalContainer() : type_id(-1), has_p2d(false), has_p3d(false) {} - ExternalContainer(const ExternalContainer &c) { - type_id = c.type_id; - has_p2d = c.has_p2d; - has_p3d = c.has_p3d; - p2d.x = c.p2d.x; - p2d.y = c.p2d.y; - p3d.x = c.p3d.x; - p3d.y = c.p3d.y; - p3d.z = c.p3d.z; - projected_p2d.x = c.projected_p2d.x; - projected_p2d.y = c.projected_p2d.y; - } + int type_id; + bool has_p2d; + bool has_p3d; + CvPoint2D32f p2d; + CvPoint3D32f p3d; + CvPoint2D32f projected_p2d; // This is only temporary -- user needs to take + // care that it has valid content + ExternalContainer() : type_id(-1), has_p2d(false), has_p3d(false) + { + } + ExternalContainer(const ExternalContainer& c) + { + type_id = c.type_id; + has_p2d = c.has_p2d; + has_p3d = c.has_p3d; + p2d.x = c.p2d.x; + p2d.y = c.p2d.y; + p3d.x = c.p3d.x; + p3d.y = c.p3d.y; + p3d.z = c.p3d.z; + projected_p2d.x = c.projected_p2d.x; + projected_p2d.y = c.projected_p2d.y; + } }; -/** \brief This is a default functor for testing which items in the container should be handled by each method +/** \brief This is a default functor for testing which items in the container + * should be handled by each method * - * By default this only tests that the only the \e type_id. If you specify \e type_id that is not -1 - * then the functor tests that the item's \e type_id matches. Naturally you can make whatever tests - * in your own functors. + * By default this only tests that the only the \e type_id. If you specify \e + * type_id that is not -1 then the functor tests that the item's \e type_id + * matches. Naturally you can make whatever tests in your own functors. */ -template -class DoHandleTest { +template +class DoHandleTest +{ protected: - int type_id; - bool needs_has_p2d; - bool needs_has_p3d; + int type_id; + bool needs_has_p2d; + bool needs_has_p3d; + public: - DoHandleTest(int _type_id=-1, bool _needs_has_p2d=false, bool _needs_has_p3d=false) - : type_id(_type_id), needs_has_p2d(_needs_has_p2d), needs_has_p3d(_needs_has_p3d) {} - virtual bool operator()(const T &f) const { - if (needs_has_p2d && !f.has_p2d) return false; - if (needs_has_p3d && !f.has_p3d) return false; - // if (f.type_id == -1) return false; // Unnecessary? - if ((type_id != -1) && (type_id != f.type_id)) return false; - return true; - } + DoHandleTest(int _type_id = -1, bool _needs_has_p2d = false, + bool _needs_has_p3d = false) + : type_id(_type_id) + , needs_has_p2d(_needs_has_p2d) + , needs_has_p3d(_needs_has_p3d) + { + } + virtual bool operator()(const T& f) const + { + if (needs_has_p2d && !f.has_p2d) + return false; + if (needs_has_p3d && !f.has_p3d) + return false; + // if (f.type_id == -1) return false; // Unnecessary? + if ((type_id != -1) && (type_id != f.type_id)) + return false; + return true; + } }; -/** \brief This is default functor for testing which items in the container should be erased +/** \brief This is default functor for testing which items in the container + * should be erased * - * By default this can test for \e has_p2d and \e has_p3d . Furthermore, it can test for - * reprojection error when \e Pose , \e Camera and limit are given. + * By default this can test for \e has_p2d and \e has_p3d . Furthermore, it can + * test for reprojection error when \e Pose , \e Camera and limit are given. */ -template -class DoEraseTest { +template +class DoEraseTest +{ protected: - int type_id; - bool erase_without_p2d; - bool erase_without_p3d; - bool test_reprojection; - float limit_sq; + int type_id; + bool erase_without_p2d; + bool erase_without_p3d; + bool test_reprojection; + float limit_sq; + public: - DoEraseTest(bool _erase_without_p2d, bool _erase_without_p3d, int _type_id=-1) - : type_id(_type_id), erase_without_p2d(_erase_without_p2d), erase_without_p3d(_erase_without_p3d), - test_reprojection(false), limit_sq(0.f) {} - DoEraseTest(float _limit, bool _erase_without_p2d=false, bool _erase_without_p3d=false, int _type_id=-1) - : type_id(_type_id), erase_without_p2d(_erase_without_p2d), erase_without_p3d(_erase_without_p3d), - test_reprojection(true), limit_sq(_limit*_limit) {} - virtual bool operator()(const T &f) const { - if ((type_id != -1) && (type_id != f.type_id)) return false; - if (erase_without_p2d && !f.has_p2d) return true; - if (erase_without_p3d && !f.has_p3d) return true; - if (test_reprojection) { - if (!f.has_p2d) return false; - if (!f.has_p3d) return false; - // Note that the projected_p2d needs to have valid content at this point - double dist_sq = (f.p2d.x-f.projected_p2d.x)*(f.p2d.x-f.projected_p2d.x); - dist_sq += (f.p2d.y-f.projected_p2d.y)*(f.p2d.y-f.projected_p2d.y); - if (dist_sq > limit_sq) - return true; - } - return false; - } + DoEraseTest(bool _erase_without_p2d, bool _erase_without_p3d, + int _type_id = -1) + : type_id(_type_id) + , erase_without_p2d(_erase_without_p2d) + , erase_without_p3d(_erase_without_p3d) + , test_reprojection(false) + , limit_sq(0.f) + { + } + DoEraseTest(float _limit, bool _erase_without_p2d = false, + bool _erase_without_p3d = false, int _type_id = -1) + : type_id(_type_id) + , erase_without_p2d(_erase_without_p2d) + , erase_without_p3d(_erase_without_p3d) + , test_reprojection(true) + , limit_sq(_limit * _limit) + { + } + virtual bool operator()(const T& f) const + { + if ((type_id != -1) && (type_id != f.type_id)) + return false; + if (erase_without_p2d && !f.has_p2d) + return true; + if (erase_without_p3d && !f.has_p3d) + return true; + if (test_reprojection) + { + if (!f.has_p2d) + return false; + if (!f.has_p3d) + return false; + // Note that the projected_p2d needs to have valid content at this point + double dist_sq = + (f.p2d.x - f.projected_p2d.x) * (f.p2d.x - f.projected_p2d.x); + dist_sq += (f.p2d.y - f.projected_p2d.y) * (f.p2d.y - f.projected_p2d.y); + if (dist_sq > limit_sq) + return true; + } + return false; + } }; /** \brief Erasing items from container using \e DoEraseTest type functor */ -template -inline int EraseItemsEC(std::map &container, F do_erase_test) { - int count=0; - typename std::map::iterator iter = container.begin(); - while(iter != container.end()) { - T &f = iter->second; - if (do_erase_test(f)) { - container.erase(iter++); - count++; - } else iter++; - } - return count; +template +inline int EraseItemsEC(std::map& container, F do_erase_test) +{ + int count = 0; + typename std::map::iterator iter = container.begin(); + while (iter != container.end()) + { + T& f = iter->second; + if (do_erase_test(f)) + { + container.erase(iter++); + count++; + } + else + iter++; + } + return count; } /** \brief Version of \e TrackerFeatures using external container */ -class TrackerFeaturesEC : public TrackerFeatures { +class TrackerFeaturesEC : public TrackerFeatures +{ protected: - bool purge; + bool purge; + public: - /** \brief Constructor */ - TrackerFeaturesEC(int _max_features=100, int _min_features=90, double _quality_level=0.01, double _min_distance=10, int _pyr_levels=4, int win_size=6) - : TrackerFeatures(_max_features, _min_features, _quality_level, _min_distance, _pyr_levels, win_size), purge(false) {} - - /** \brief Track features with matching type id. New features will have id's in the specified id range. */ - template - bool Track(IplImage *img, IplImage *mask, std::map &container, int type_id=-1, int first_id=-1, int last_id=-1) - { - DoHandleTest do_handle_test_default(type_id); - if (type_id == -1) type_id=0; - return Track(img, mask, container, do_handle_test_default, type_id, first_id, last_id); - } - - /** \brief Track features matching the given functor. New features will have id's in the specified id range. */ - /*template - bool Track(IplImage *img, IplImage *mask, std::map &container, F do_handle_test, int type_id=0, int first_id=0, int last_id=65535) - { - // Update features to match the ones in the given container - typename std::map::iterator iter = container.begin(); - typename std::map::iterator iter_end = container.end(); - feature_count = 0; - for (;iter != iter_end; iter++) { - T &f = iter->second; - if (!do_handle_test(f)) continue; - if (f.has_p2d != true) continue; - f.has_p2d = false; // This is updated again to true if tracking succeeds - features[feature_count] = f.p2d; - ids[feature_count] = iter->first; - feature_count++; - if (feature_count == max_features) break; - } - // Check that next_id is ok - if (next_id < first_id) next_id = first_id; - if (next_id > last_id) return false; // TODO: Make some better solution for this - // Purge if needed - if (purge) { - TrackerFeatures::Purge(); - purge=false; - } - // Track as usual (this will swap above features to prev_features) - TrackHid(img, mask); - // Update the container to have the updated features - for (int i=0; i= last_id) break; - T &f = container[id]; - f.type_id = type_id; - f.has_p2d = true; - f.p2d = features[i]; - } - return true; - }*/ - - /** \brief Track features matching the given functor. If first_id >= 0 we call \e AddFeatures with the specified id range. */ - template - bool Track(IplImage *img, IplImage *mask, std::map &container, F do_handle_test, int type_id=0, int first_id=-1, int last_id=-1) - { - // When first_id && last_id are < 0 then we don't add new features... - if (first_id < 0) last_id = -1; - // Update features to match the ones in the given container - typename std::map::iterator iter = container.begin(); - typename std::map::iterator iter_end = container.end(); - feature_count = 0; - for (;iter != iter_end; iter++) { - T &f = iter->second; - if (!do_handle_test(f)) continue; - if (f.has_p2d != true) continue; - f.has_p2d = false; // This is updated again to true if tracking succeeds - features[feature_count] = f.p2d; - ids[feature_count] = iter->first; - feature_count++; - if (feature_count == max_features) break; - } - // Purge if needed - if (purge) { - TrackerFeatures::Purge(); - purge=false; - } - if (first_id < 0) { - // Track as usual (this will swap above features to prev_features) - TrackHid(img, mask, false); - } else { - // Check that next_id is ok - if (next_id < first_id) next_id = first_id; - if (next_id > last_id) return false; // TODO: Make some better solution for this - // Track as usual (this will swap above features to prev_features) - TrackHid(img, mask, true); - } - // Update the container to have the updated features - for (int i=0; i= 0 && id >= last_id) break; - T &f = container[id]; - f.type_id = type_id; - f.has_p2d = true; - f.p2d = features[i]; - } - return true; - } - - /** \brief add features to the previously tracked frame if there are less than min_features */ - template - bool AddFeatures(std::map &container, int type_id=0, int first_id=0, int last_id=65535) - { - if (TrackerFeatures::AddFeatures() == 0) return false; - // Update the container to have the updated features - for (int i=0; i= 0 && id >= last_id) break; - T &f = container[id]; - f.type_id = type_id; - f.has_p2d = true; - f.p2d = features[i]; - } - return true; - } - - /** \brief Erases the items matching with \e type_id having \e has_p2d == false . If \e type_id == -1 doesn't test the type. */ - template - int EraseNonTracked(std::map &container, int type_id=-1) - { - DoEraseTest do_erase_test(true, false, type_id); - return EraseItemsEC(container, do_erase_test); - } - /** \brief Purge features that are considerably closer than the defined min_distance. - * - * Note, that we always try to maintain the smaller id's assuming that they are older ones - */ - void Purge() { purge=true; } - void Reset() { throw alvar::AlvarException("Method not supported"); } - double Reset(IplImage *img, IplImage *mask) { throw alvar::AlvarException("Method not supported"); } - bool DelFeature(int index) { throw alvar::AlvarException("Method not supported"); } - bool DelFeatureId(int id) { throw alvar::AlvarException("Method not supported"); } + /** \brief Constructor */ + TrackerFeaturesEC(int _max_features = 100, int _min_features = 90, + double _quality_level = 0.01, double _min_distance = 10, + int _pyr_levels = 4, int win_size = 6) + : TrackerFeatures(_max_features, _min_features, _quality_level, + _min_distance, _pyr_levels, win_size) + , purge(false) + { + } + + /** \brief Track features with matching type id. New features will have id's + * in the specified id range. */ + template + bool Track(IplImage* img, IplImage* mask, std::map& container, + int type_id = -1, int first_id = -1, int last_id = -1) + { + DoHandleTest do_handle_test_default(type_id); + if (type_id == -1) + type_id = 0; + return Track(img, mask, container, do_handle_test_default, type_id, + first_id, last_id); + } + + /** \brief Track features matching the given functor. New features will have + * id's in the specified id range. */ + /*template + bool Track(IplImage *img, IplImage *mask, std::map &container, F + do_handle_test, int type_id=0, int first_id=0, int last_id=65535) + { + // Update features to match the ones in the given container + typename std::map::iterator iter = container.begin(); + typename std::map::iterator iter_end = container.end(); + feature_count = 0; + for (;iter != iter_end; iter++) { + T &f = iter->second; + if (!do_handle_test(f)) continue; + if (f.has_p2d != true) continue; + f.has_p2d = false; // This is updated again to true if tracking succeeds + features[feature_count] = f.p2d; + ids[feature_count] = iter->first; + feature_count++; + if (feature_count == max_features) break; + } + // Check that next_id is ok + if (next_id < first_id) next_id = first_id; + if (next_id > last_id) return false; // TODO: Make some better solution for + this + // Purge if needed + if (purge) { + TrackerFeatures::Purge(); + purge=false; + } + // Track as usual (this will swap above features to prev_features) + TrackHid(img, mask); + // Update the container to have the updated features + for (int i=0; i= last_id) break; + T &f = container[id]; + f.type_id = type_id; + f.has_p2d = true; + f.p2d = features[i]; + } + return true; + }*/ + + /** \brief Track features matching the given functor. If first_id >= 0 we call + * \e AddFeatures with the specified id range. */ + template + bool Track(IplImage* img, IplImage* mask, std::map& container, + F do_handle_test, int type_id = 0, int first_id = -1, + int last_id = -1) + { + // When first_id && last_id are < 0 then we don't add new features... + if (first_id < 0) + last_id = -1; + // Update features to match the ones in the given container + typename std::map::iterator iter = container.begin(); + typename std::map::iterator iter_end = container.end(); + feature_count = 0; + for (; iter != iter_end; iter++) + { + T& f = iter->second; + if (!do_handle_test(f)) + continue; + if (f.has_p2d != true) + continue; + f.has_p2d = false; // This is updated again to true if tracking succeeds + features[feature_count] = f.p2d; + ids[feature_count] = iter->first; + feature_count++; + if (feature_count == max_features) + break; + } + // Purge if needed + if (purge) + { + TrackerFeatures::Purge(); + purge = false; + } + if (first_id < 0) + { + // Track as usual (this will swap above features to prev_features) + TrackHid(img, mask, false); + } + else + { + // Check that next_id is ok + if (next_id < first_id) + next_id = first_id; + if (next_id > last_id) + return false; // TODO: Make some better solution for this + // Track as usual (this will swap above features to prev_features) + TrackHid(img, mask, true); + } + // Update the container to have the updated features + for (int i = 0; i < feature_count; i++) + { + int id = ids[i]; + if (last_id >= 0 && id >= last_id) + break; + T& f = container[id]; + f.type_id = type_id; + f.has_p2d = true; + f.p2d = features[i]; + } + return true; + } + + /** \brief add features to the previously tracked frame if there are less than + * min_features */ + template + bool AddFeatures(std::map& container, int type_id = 0, + int first_id = 0, int last_id = 65535) + { + if (TrackerFeatures::AddFeatures() == 0) + return false; + // Update the container to have the updated features + for (int i = 0; i < feature_count; i++) + { + int id = ids[i]; + if (last_id >= 0 && id >= last_id) + break; + T& f = container[id]; + f.type_id = type_id; + f.has_p2d = true; + f.p2d = features[i]; + } + return true; + } + + /** \brief Erases the items matching with \e type_id having \e has_p2d == + * false . If \e type_id == -1 doesn't test the type. */ + template + int EraseNonTracked(std::map& container, int type_id = -1) + { + DoEraseTest do_erase_test(true, false, type_id); + return EraseItemsEC(container, do_erase_test); + } + /** \brief Purge features that are considerably closer than the defined + * min_distance. + * + * Note, that we always try to maintain the smaller id's assuming that they + * are older ones + */ + void Purge() + { + purge = true; + } + void Reset() + { + throw alvar::AlvarException("Method not supported"); + } + double Reset(IplImage* img, IplImage* mask) + { + throw alvar::AlvarException("Method not supported"); + } + bool DelFeature(int index) + { + throw alvar::AlvarException("Method not supported"); + } + bool DelFeatureId(int id) + { + throw alvar::AlvarException("Method not supported"); + } }; /** \brief Version of \e Camera using external container */ -class ALVAR_EXPORT CameraEC : public Camera { +class ALVAR_EXPORT CameraEC : public Camera +{ public: - /** \brief Undistort the items with matching \e type_id */ - template - void Undistort(std::map &container, int type_id=-1) { - DoHandleTest do_handle_test_default(type_id); - return Undistort(container, do_handle_test_default); - } - /** \brief Undistort the items matching the given functor */ - template - void Undistort(std::map &container, F &do_handle_test) { - typename std::map::iterator iter = container.begin(); - typename std::map::iterator iter_end = container.end(); - for (;iter != iter_end; iter++) { - T &f = iter->second; - if (!do_handle(f)) continue; - if (f.has_p2d) Undistort(f.p2d); - } - } - /** \brief Distort the items with matching \e type_id */ - template - void Distort(std::map &container, int type_id=-1) { - DoHandleTest do_handle_test_default(type_id); - return Distort(container, do_handle_test_default); - } - /** \brief Distort the items matching the given functor */ - template - void Distort(std::map &container, F &do_handle_test) { - typename std::map::iterator iter = container.begin(); - typename std::map::iterator iter_end = container.end(); - for (;iter != iter_end; iter++) { - T &f = iter->second; - if (!do_handle(f)) continue; - if (f.has_p2d) Distort(f.p2d); - } - } - /** \brief Calculate the \e pose using the items with matching \e type_id */ - template - bool CalcExteriorOrientation(std::map &container, Pose *pose, int type_id=-1) { - DoHandleTest do_handle_test_default(type_id); - return CalcExteriorOrientation(container, pose, do_handle_test_default); - } - /** \brief Calculate the \e pose using the items matching the given functor */ - template - bool CalcExteriorOrientation(std::map &container, Pose *pose, F do_handle_test) { - int count_points = container.size(); - if(count_points < 4) return false; - CvMat* world_points = cvCreateMat(count_points, 1, CV_32FC3); - CvMat* image_observations = cvCreateMat(count_points*2, 1, CV_32FC2); - typename std::map::iterator iter = container.begin(); - typename std::map::iterator iter_end = container.end(); - int ind = 0; - for (;iter != iter_end; iter++) { - T &f = iter->second; - if (!do_handle_test(f)) continue; - if (f.has_p2d && f.has_p3d) { - world_points->data.fl[ind*3+0] = (float)f.p3d.x; - world_points->data.fl[ind*3+1] = (float)f.p3d.y; - world_points->data.fl[ind*3+2] = (float)f.p3d.z; - image_observations->data.fl[ind*2+0] = (float)f.p2d.x; - image_observations->data.fl[ind*2+1] = (float)f.p2d.y; - ind++; - } - } - if (ind<4) return false; - - CvRect r; r.x = 0; r.y = 0; r.height = ind; r.width = 1; - CvMat world_points_sub; - cvGetSubRect(world_points, &world_points_sub, r); - CvMat image_observations_sub; - cvGetSubRect(image_observations, &image_observations_sub, r); - - bool ret = Camera::CalcExteriorOrientation(&world_points_sub, &image_observations_sub, pose); - - cvReleaseMat(&image_observations); - cvReleaseMat(&world_points); - - return ret; - } - /** \brief Update the \e pose using the items with matching \e type_id */ - template - bool UpdatePose(std::map &container, Pose *pose, int type_id=-1, std::map *weights=0) { - DoHandleTest do_handle_test_default(type_id); - return UpdatePose(container, pose, do_handle_test_default, weights); - } - /** \brief Update the rotation in \e pose using the items with matching \e type_id */ - template - bool UpdateRotation(std::map &container, Pose *pose, int type_id=-1) { - DoHandleTest do_handle_test_default(type_id); - return UpdateRotation(container, pose, do_handle_test_default); - } - /** \brief Update the rotation in \e pose using the items matching the given functor */ - template - bool UpdateRotation(std::map &container, Pose *pose, F do_handle_test) { - int count_points = container.size(); - if(count_points < 6) return false; - - CvMat* world_points = cvCreateMat(count_points, 1, CV_64FC3); - CvMat* image_observations = cvCreateMat(count_points*2, 1, CV_64F); // [u v u v u v ...]' - - int ind = 0; - typename std::map::iterator iter = container.begin(); - typename std::map::iterator iter_end = container.end(); - for (;iter != iter_end; iter++) { - T &f = iter->second; - if (!do_handle_test(f)) continue; - if (f.has_p2d && f.has_p3d) { - world_points->data.db[ind*3+0] = f.p3d.x; - world_points->data.db[ind*3+1] = f.p3d.y; - world_points->data.db[ind*3+2] = f.p3d.z; - image_observations->data.db[ind*2+0] = f.p2d.x; - image_observations->data.db[ind*2+1] = f.p2d.y; - ind++; - } - } - if (ind < 6) return false; - - CvRect r; r.x = 0; r.y = 0; r.height = ind; r.width = 1; - CvMat world_points_sub; - cvGetSubRect(world_points, &world_points_sub, r); - - r.height = 2*ind; - CvMat image_observations_sub; - cvGetSubRect(image_observations, &image_observations_sub, r); - - bool ret = UpdateRotation(&world_points_sub, &image_observations_sub, pose); - - cvReleaseMat(&image_observations); - cvReleaseMat(&world_points); - - return ret; - } - /** \brief Update the \e pose using the items matching the given functor */ - template - bool UpdatePose(std::map &container, Pose *pose, F do_handle_test, std::map *weights=0) { - int count_points = container.size(); - if(count_points < 4) return false; // Note, UpdatePose calls CalcExteriorOrientation if 4 or 5 points - - CvMat* world_points = cvCreateMat(count_points, 1, CV_64FC3); - CvMat* image_observations = cvCreateMat(count_points*2, 1, CV_64F); // [u v u v u v ...]' - CvMat* w = 0; - if(weights) w = cvCreateMat(count_points*2, 1, CV_64F); - - int ind = 0; - typename std::map::iterator iter = container.begin(); - typename std::map::iterator iter_end = container.end(); - for (;iter != iter_end; iter++) { - T &f = iter->second; - if (!do_handle_test(f)) continue; - if (f.has_p2d && f.has_p3d) { - world_points->data.db[ind*3+0] = f.p3d.x; - world_points->data.db[ind*3+1] = f.p3d.y; - world_points->data.db[ind*3+2] = f.p3d.z; - image_observations->data.db[ind*2+0] = f.p2d.x; - image_observations->data.db[ind*2+1] = f.p2d.y; - - if(weights) - w->data.db[ind*2+1] = w->data.db[ind*2+0] = (*weights)[iter->first]; - ind++; - } - } - if (ind < 4) return false; // Note, UpdatePose calls CalcExteriorOrientation if 4 or 5 points - - CvRect r; r.x = 0; r.y = 0; r.height = ind; r.width = 1; - CvMat world_points_sub; - cvGetSubRect(world_points, &world_points_sub, r); - - r.height = 2*ind; - CvMat image_observations_sub; - cvGetSubRect(image_observations, &image_observations_sub, r); - - bool ret; - if (weights) { - CvMat w_sub; - cvGetSubRect(w, &w_sub, r); - ret = UpdatePose(&world_points_sub, &image_observations_sub, pose, &w_sub); - } - else - ret = UpdatePose(&world_points_sub, &image_observations_sub, pose); - - cvReleaseMat(&image_observations); - cvReleaseMat(&world_points); - if(w) cvReleaseMat(&w); - - return ret; - } - - /** \brief Projects \e p3d in the items matching with \e type_id into \e projected_p2d . If \e type_id == -1 doesn't test the type. */ - template - float Reproject(std::map &container, Pose *pose, int type_id=-1, bool needs_has_p2d=false, bool needs_has_p3d=false, float average_outlier_limit=0.f) - { - DoHandleTest do_handle_test(type_id, needs_has_p2d, needs_has_p3d); - return Reproject(container, pose, do_handle_test, average_outlier_limit); - } - /** \brief Projects \e p3d in the items matching with the given functor */ - template - float Reproject(std::map &container, Pose *pose, F do_handle_test, float average_outlier_limit=0.f) - { - float reprojection_sum=0.f; - int reprojection_count=0; - float limit_sq = average_outlier_limit*average_outlier_limit; - typename std::map::iterator iter = container.begin(); - typename std::map::iterator iter_end = container.end(); - for(;iter != iter_end;iter++) { - T &f = iter->second; - if (!do_handle_test(f)) continue; - Camera::ProjectPoint(iter->second.p3d, pose, iter->second.projected_p2d); - Camera::ProjectPoint(iter->second.p3d_sh, pose, iter->second.projected_p2d_sh); - - // TODO: Now this is calculated in several places (should this distance be included in ExternalContainer structure? - float dist_sq = (f.p2d.x-f.projected_p2d.x)*(f.p2d.x-f.projected_p2d.x); - dist_sq += (f.p2d.y-f.projected_p2d.y)*(f.p2d.y-f.projected_p2d.y); - if ((limit_sq == 0) || (dist_sq < limit_sq)) { - reprojection_sum += sqrt(dist_sq); - reprojection_count++; - } - } - if (reprojection_count == 0) return 0.f; - return reprojection_sum/reprojection_count; - } - - /** \brief Erases the items matching with \e type_id having large reprojection error. - * If \e type_id == -1 doesn't test the type. - * If \e pose is given calls \e Reproject() internally -- otherwise assumes it has been called beforehands. - */ - template - int EraseUsingReprojectionError(std::map &container, float reprojection_error_limit, int type_id=-1, Pose *pose = 0) - { - if (pose) Reproject(container, pose, type_id, false, false); - DoEraseTest do_erase_test(reprojection_error_limit, false, false, type_id) ; - return EraseItemsEC(container, do_erase_test); - } - - /** \brief Update pose rotations based on new observations */ - bool UpdateRotation(const CvMat* object_points, CvMat* image_points, Pose *pose); - - /** \brief Update pose rotations (in rodriques&tra) based on new observations */ - bool UpdateRotation(const CvMat* object_points, CvMat* image_points, CvMat *rot, CvMat *tra); - - /** \brief Update existing pose based on new observations */ - bool UpdatePose(const CvMat* object_points, CvMat* image_points, Pose *pose, CvMat *weights=0); - - /** \brief Update existing pose (in rodriques&tra) based on new observations */ - bool UpdatePose(const CvMat* object_points, CvMat* image_points, CvMat *rodriques, CvMat *tra, CvMat *weights=0); - - /** \brief Reconstruct 3D point using two camera poses and corresponding undistorted image feature positions */ - bool ReconstructFeature(const Pose *pose1, const Pose *pose2, const CvPoint2D32f *u1, const CvPoint2D32f *u2, CvPoint3D32f *p3d, double limit); - - /** \brief Get 3D-coordinate for 2D feature on the plane defined by the pose (z == 0) */ - void Get3dOnPlane(const Pose *pose, CvPoint2D32f p2d, CvPoint3D32f &p3d); - - /** \brief Get 3D-coordinate for 2D feature assuming certain depth */ - void Get3dOnDepth(const Pose *pose, CvPoint2D32f p2d, float depth, CvPoint3D32f &p3d); + /** \brief Undistort the items with matching \e type_id */ + template + void Undistort(std::map& container, int type_id = -1) + { + DoHandleTest do_handle_test_default(type_id); + return Undistort(container, do_handle_test_default); + } + /** \brief Undistort the items matching the given functor */ + template + void Undistort(std::map& container, F& do_handle_test) + { + typename std::map::iterator iter = container.begin(); + typename std::map::iterator iter_end = container.end(); + for (; iter != iter_end; iter++) + { + T& f = iter->second; + if (!do_handle(f)) + continue; + if (f.has_p2d) + Undistort(f.p2d); + } + } + /** \brief Distort the items with matching \e type_id */ + template + void Distort(std::map& container, int type_id = -1) + { + DoHandleTest do_handle_test_default(type_id); + return Distort(container, do_handle_test_default); + } + /** \brief Distort the items matching the given functor */ + template + void Distort(std::map& container, F& do_handle_test) + { + typename std::map::iterator iter = container.begin(); + typename std::map::iterator iter_end = container.end(); + for (; iter != iter_end; iter++) + { + T& f = iter->second; + if (!do_handle(f)) + continue; + if (f.has_p2d) + Distort(f.p2d); + } + } + /** \brief Calculate the \e pose using the items with matching \e type_id */ + template + bool CalcExteriorOrientation(std::map& container, Pose* pose, + int type_id = -1) + { + DoHandleTest do_handle_test_default(type_id); + return CalcExteriorOrientation(container, pose, do_handle_test_default); + } + /** \brief Calculate the \e pose using the items matching the given functor */ + template + bool CalcExteriorOrientation(std::map& container, Pose* pose, + F do_handle_test) + { + int count_points = container.size(); + if (count_points < 4) + return false; + CvMat* world_points = cvCreateMat(count_points, 1, CV_32FC3); + CvMat* image_observations = cvCreateMat(count_points * 2, 1, CV_32FC2); + typename std::map::iterator iter = container.begin(); + typename std::map::iterator iter_end = container.end(); + int ind = 0; + for (; iter != iter_end; iter++) + { + T& f = iter->second; + if (!do_handle_test(f)) + continue; + if (f.has_p2d && f.has_p3d) + { + world_points->data.fl[ind * 3 + 0] = (float)f.p3d.x; + world_points->data.fl[ind * 3 + 1] = (float)f.p3d.y; + world_points->data.fl[ind * 3 + 2] = (float)f.p3d.z; + image_observations->data.fl[ind * 2 + 0] = (float)f.p2d.x; + image_observations->data.fl[ind * 2 + 1] = (float)f.p2d.y; + ind++; + } + } + if (ind < 4) + return false; + + CvRect r; + r.x = 0; + r.y = 0; + r.height = ind; + r.width = 1; + CvMat world_points_sub; + cvGetSubRect(world_points, &world_points_sub, r); + CvMat image_observations_sub; + cvGetSubRect(image_observations, &image_observations_sub, r); + + bool ret = Camera::CalcExteriorOrientation(&world_points_sub, + &image_observations_sub, pose); + + cvReleaseMat(&image_observations); + cvReleaseMat(&world_points); + + return ret; + } + /** \brief Update the \e pose using the items with matching \e type_id */ + template + bool UpdatePose(std::map& container, Pose* pose, int type_id = -1, + std::map* weights = 0) + { + DoHandleTest do_handle_test_default(type_id); + return UpdatePose(container, pose, do_handle_test_default, weights); + } + /** \brief Update the rotation in \e pose using the items with matching \e + * type_id */ + template + bool UpdateRotation(std::map& container, Pose* pose, int type_id = -1) + { + DoHandleTest do_handle_test_default(type_id); + return UpdateRotation(container, pose, do_handle_test_default); + } + /** \brief Update the rotation in \e pose using the items matching the given + * functor */ + template + bool UpdateRotation(std::map& container, Pose* pose, F do_handle_test) + { + int count_points = container.size(); + if (count_points < 6) + return false; + + CvMat* world_points = cvCreateMat(count_points, 1, CV_64FC3); + CvMat* image_observations = + cvCreateMat(count_points * 2, 1, CV_64F); // [u v u v u v ...]' + + int ind = 0; + typename std::map::iterator iter = container.begin(); + typename std::map::iterator iter_end = container.end(); + for (; iter != iter_end; iter++) + { + T& f = iter->second; + if (!do_handle_test(f)) + continue; + if (f.has_p2d && f.has_p3d) + { + world_points->data.db[ind * 3 + 0] = f.p3d.x; + world_points->data.db[ind * 3 + 1] = f.p3d.y; + world_points->data.db[ind * 3 + 2] = f.p3d.z; + image_observations->data.db[ind * 2 + 0] = f.p2d.x; + image_observations->data.db[ind * 2 + 1] = f.p2d.y; + ind++; + } + } + if (ind < 6) + return false; + + CvRect r; + r.x = 0; + r.y = 0; + r.height = ind; + r.width = 1; + CvMat world_points_sub; + cvGetSubRect(world_points, &world_points_sub, r); + + r.height = 2 * ind; + CvMat image_observations_sub; + cvGetSubRect(image_observations, &image_observations_sub, r); + + bool ret = UpdateRotation(&world_points_sub, &image_observations_sub, pose); + + cvReleaseMat(&image_observations); + cvReleaseMat(&world_points); + + return ret; + } + /** \brief Update the \e pose using the items matching the given functor */ + template + bool UpdatePose(std::map& container, Pose* pose, F do_handle_test, + std::map* weights = 0) + { + int count_points = container.size(); + if (count_points < 4) + return false; // Note, UpdatePose calls CalcExteriorOrientation if 4 or 5 + // points + + CvMat* world_points = cvCreateMat(count_points, 1, CV_64FC3); + CvMat* image_observations = + cvCreateMat(count_points * 2, 1, CV_64F); // [u v u v u v ...]' + CvMat* w = 0; + if (weights) + w = cvCreateMat(count_points * 2, 1, CV_64F); + + int ind = 0; + typename std::map::iterator iter = container.begin(); + typename std::map::iterator iter_end = container.end(); + for (; iter != iter_end; iter++) + { + T& f = iter->second; + if (!do_handle_test(f)) + continue; + if (f.has_p2d && f.has_p3d) + { + world_points->data.db[ind * 3 + 0] = f.p3d.x; + world_points->data.db[ind * 3 + 1] = f.p3d.y; + world_points->data.db[ind * 3 + 2] = f.p3d.z; + image_observations->data.db[ind * 2 + 0] = f.p2d.x; + image_observations->data.db[ind * 2 + 1] = f.p2d.y; + + if (weights) + w->data.db[ind * 2 + 1] = w->data.db[ind * 2 + 0] = + (*weights)[iter->first]; + ind++; + } + } + if (ind < 4) + return false; // Note, UpdatePose calls CalcExteriorOrientation if 4 or 5 + // points + + CvRect r; + r.x = 0; + r.y = 0; + r.height = ind; + r.width = 1; + CvMat world_points_sub; + cvGetSubRect(world_points, &world_points_sub, r); + + r.height = 2 * ind; + CvMat image_observations_sub; + cvGetSubRect(image_observations, &image_observations_sub, r); + + bool ret; + if (weights) + { + CvMat w_sub; + cvGetSubRect(w, &w_sub, r); + ret = + UpdatePose(&world_points_sub, &image_observations_sub, pose, &w_sub); + } + else + ret = UpdatePose(&world_points_sub, &image_observations_sub, pose); + + cvReleaseMat(&image_observations); + cvReleaseMat(&world_points); + if (w) + cvReleaseMat(&w); + + return ret; + } + + /** \brief Projects \e p3d in the items matching with \e type_id into \e + * projected_p2d . If \e type_id == -1 doesn't test the type. */ + template + float Reproject(std::map& container, Pose* pose, int type_id = -1, + bool needs_has_p2d = false, bool needs_has_p3d = false, + float average_outlier_limit = 0.f) + { + DoHandleTest do_handle_test(type_id, needs_has_p2d, needs_has_p3d); + return Reproject(container, pose, do_handle_test, average_outlier_limit); + } + /** \brief Projects \e p3d in the items matching with the given functor */ + template + float Reproject(std::map& container, Pose* pose, F do_handle_test, + float average_outlier_limit = 0.f) + { + float reprojection_sum = 0.f; + int reprojection_count = 0; + float limit_sq = average_outlier_limit * average_outlier_limit; + typename std::map::iterator iter = container.begin(); + typename std::map::iterator iter_end = container.end(); + for (; iter != iter_end; iter++) + { + T& f = iter->second; + if (!do_handle_test(f)) + continue; + Camera::ProjectPoint(iter->second.p3d, pose, iter->second.projected_p2d); + Camera::ProjectPoint(iter->second.p3d_sh, pose, + iter->second.projected_p2d_sh); + + // TODO: Now this is calculated in several places (should this distance be + // included in ExternalContainer structure? + float dist_sq = + (f.p2d.x - f.projected_p2d.x) * (f.p2d.x - f.projected_p2d.x); + dist_sq += (f.p2d.y - f.projected_p2d.y) * (f.p2d.y - f.projected_p2d.y); + if ((limit_sq == 0) || (dist_sq < limit_sq)) + { + reprojection_sum += sqrt(dist_sq); + reprojection_count++; + } + } + if (reprojection_count == 0) + return 0.f; + return reprojection_sum / reprojection_count; + } + + /** \brief Erases the items matching with \e type_id having large reprojection + * error. If \e type_id == -1 doesn't test the type. If \e pose is given calls + * \e Reproject() internally -- otherwise assumes it has been called + * beforehands. + */ + template + int EraseUsingReprojectionError(std::map& container, + float reprojection_error_limit, + int type_id = -1, Pose* pose = 0) + { + if (pose) + Reproject(container, pose, type_id, false, false); + DoEraseTest do_erase_test(reprojection_error_limit, false, false, + type_id); + return EraseItemsEC(container, do_erase_test); + } + + /** \brief Update pose rotations based on new observations */ + bool UpdateRotation(const CvMat* object_points, CvMat* image_points, + Pose* pose); + + /** \brief Update pose rotations (in rodriques&tra) based on new observations + */ + bool UpdateRotation(const CvMat* object_points, CvMat* image_points, + CvMat* rot, CvMat* tra); + + /** \brief Update existing pose based on new observations */ + bool UpdatePose(const CvMat* object_points, CvMat* image_points, Pose* pose, + CvMat* weights = 0); + + /** \brief Update existing pose (in rodriques&tra) based on new observations + */ + bool UpdatePose(const CvMat* object_points, CvMat* image_points, + CvMat* rodriques, CvMat* tra, CvMat* weights = 0); + + /** \brief Reconstruct 3D point using two camera poses and corresponding + * undistorted image feature positions */ + bool ReconstructFeature(const Pose* pose1, const Pose* pose2, + const CvPoint2D32f* u1, const CvPoint2D32f* u2, + CvPoint3D32f* p3d, double limit); + + /** \brief Get 3D-coordinate for 2D feature on the plane defined by the pose + * (z == 0) */ + void Get3dOnPlane(const Pose* pose, CvPoint2D32f p2d, CvPoint3D32f& p3d); + + /** \brief Get 3D-coordinate for 2D feature assuming certain depth */ + void Get3dOnDepth(const Pose* pose, CvPoint2D32f p2d, float depth, + CvPoint3D32f& p3d); }; -/** \brief Calculate the index used in external container map for specified \e marker_id */ -inline int MarkerIdToContainerId(int marker_id, int corner_id, int first_id=0, int last_id=65535) { - int id = first_id + marker_id*4 + corner_id; - if (id > last_id) return -1; - return id; +/** \brief Calculate the index used in external container map for specified \e + * marker_id */ +inline int MarkerIdToContainerId(int marker_id, int corner_id, int first_id = 0, + int last_id = 65535) +{ + int id = first_id + marker_id * 4 + corner_id; + if (id > last_id) + return -1; + return id; } /** \brief Version of \e MarkerDetector using external container */ -template -class MarkerDetectorEC : public MarkerDetector { +template +class MarkerDetectorEC : public MarkerDetector +{ protected: - template - bool PreDetect(std::pair &p, int type_id) { - return PreDetect(p.second, type_id); - } - template - bool PreDetect(T &p, int type_id) { - if (p.type_id != type_id) return false; - p.has_p2d = false; // This is updated again to true if tracking succeeds - return true; - } + template + bool PreDetect(std::pair& p, int type_id) + { + return PreDetect(p.second, type_id); + } + template + bool PreDetect(T& p, int type_id) + { + if (p.type_id != type_id) + return false; + p.has_p2d = false; // This is updated again to true if tracking succeeds + return true; + } + public: - int GetId(int marker_id, int corner_id, int first_id=0, int last_id=65535) { - return MarkerIdToContainerId(marker_id, corner_id, first_id, last_id); - } - - /** \brief Detect markers in the image and fill in their 2D-positions in the given external container */ - template - int Detect(IplImage *image, - Camera *cam, - std::map &container, - int type_id=0, - int first_id=0, - int last_id=65535, - bool track=false, - bool visualize=false, - double max_new_marker_error=0.08, - double max_track_error=0.2, - LabelingMethod labeling_method=CVSEQ) - { - int ret; - - // The existing markers in the container are marked to not have valid p2d (has_p2d=false) - typename std::map::iterator iter = container.begin(); - typename std::map::iterator iter_end = container.end(); - for (;iter != iter_end; iter++) { - T &f = iter->second; - if (f.type_id != type_id) continue; - f.has_p2d = false; // This is updated again to true if tracking succeeds - } - - // Detect without making the unnecessary pose update - ret = MarkerDetector::Detect(image, cam, track, visualize, max_new_marker_error, max_track_error, labeling_method, false); - - // Fill in the detected markers to the container - for (size_t i=0; i::markers->size(); i++) { - M &m = (*(MarkerDetector::markers))[i]; - for (int corner=0; corner<4; corner++) { - int id = MarkerIdToContainerId(m.GetId(), corner, first_id, last_id); - if (id != -1) { - T &f = container[id]; - f.type_id = type_id; - f.has_p2d = true; - f.p2d.x = float(m.marker_corners_img[corner].x); - f.p2d.y = float(m.marker_corners_img[corner].y); - } - } - } - return ret; - } - - /** \brief Fill in 3D-coordinates for \e marker_id marker corners using \e edge_length and \e pose */ - template - void MarkerToEC(std::map &container, int marker_id, double edge_length, Pose &pose, int type_id=0, int first_id=0,int last_id=65535) { - CvMat *m3 = cvCreateMat(4,4,CV_64F); cvSetIdentity(m3); - pose.GetMatrix(m3); - - for(size_t corner = 0; corner < 4; ++corner) - { - // TODO: This should be exactly the same as in Marker class. - // Should we get the values from there somehow? - double X_data[4] = {0, 0, 0, 1}; - if (corner == 0) { - X_data[0] = -0.5*edge_length; - X_data[1] = -0.5*edge_length; - } else if (corner == 1) { - X_data[0] = +0.5*edge_length; - X_data[1] = -0.5*edge_length; - } else if (corner == 2) { - X_data[0] = +0.5*edge_length; - X_data[1] = +0.5*edge_length; - } else if (corner == 3) { - X_data[0] = -0.5*edge_length; - X_data[1] = +0.5*edge_length; - } - - CvMat X = cvMat(4, 1, CV_64F, X_data); - cvMatMul(m3, &X, &X); - - int id = MarkerIdToContainerId(marker_id, corner, first_id, last_id); - T &f = container[id]; - f.type_id = type_id; - f.p3d.x = float(X_data[0] / X_data[3]); - f.p3d.y = float(X_data[1] / X_data[3]); - f.p3d.z = float(X_data[2] / X_data[3]); - f.has_p3d = true; - } - cvReleaseMat(&m3); - } + int GetId(int marker_id, int corner_id, int first_id = 0, int last_id = 65535) + { + return MarkerIdToContainerId(marker_id, corner_id, first_id, last_id); + } + + /** \brief Detect markers in the image and fill in their 2D-positions in the + * given external container */ + template + int Detect(IplImage* image, Camera* cam, std::map& container, + int type_id = 0, int first_id = 0, int last_id = 65535, + bool track = false, bool visualize = false, + double max_new_marker_error = 0.08, double max_track_error = 0.2, + LabelingMethod labeling_method = CVSEQ) + { + int ret; + + // The existing markers in the container are marked to not have valid p2d + // (has_p2d=false) + typename std::map::iterator iter = container.begin(); + typename std::map::iterator iter_end = container.end(); + for (; iter != iter_end; iter++) + { + T& f = iter->second; + if (f.type_id != type_id) + continue; + f.has_p2d = false; // This is updated again to true if tracking succeeds + } + + // Detect without making the unnecessary pose update + ret = MarkerDetector::Detect(image, cam, track, visualize, + max_new_marker_error, max_track_error, + labeling_method, false); + + // Fill in the detected markers to the container + for (size_t i = 0; i < MarkerDetector::markers->size(); i++) + { + M& m = (*(MarkerDetector::markers))[i]; + for (int corner = 0; corner < 4; corner++) + { + int id = MarkerIdToContainerId(m.GetId(), corner, first_id, last_id); + if (id != -1) + { + T& f = container[id]; + f.type_id = type_id; + f.has_p2d = true; + f.p2d.x = float(m.marker_corners_img[corner].x); + f.p2d.y = float(m.marker_corners_img[corner].y); + } + } + } + return ret; + } + + /** \brief Fill in 3D-coordinates for \e marker_id marker corners using \e + * edge_length and \e pose */ + template + void MarkerToEC(std::map& container, int marker_id, + double edge_length, Pose& pose, int type_id = 0, + int first_id = 0, int last_id = 65535) + { + CvMat* m3 = cvCreateMat(4, 4, CV_64F); + cvSetIdentity(m3); + pose.GetMatrix(m3); + + for (size_t corner = 0; corner < 4; ++corner) + { + // TODO: This should be exactly the same as in Marker class. + // Should we get the values from there somehow? + double X_data[4] = { 0, 0, 0, 1 }; + if (corner == 0) + { + X_data[0] = -0.5 * edge_length; + X_data[1] = -0.5 * edge_length; + } + else if (corner == 1) + { + X_data[0] = +0.5 * edge_length; + X_data[1] = -0.5 * edge_length; + } + else if (corner == 2) + { + X_data[0] = +0.5 * edge_length; + X_data[1] = +0.5 * edge_length; + } + else if (corner == 3) + { + X_data[0] = -0.5 * edge_length; + X_data[1] = +0.5 * edge_length; + } + + CvMat X = cvMat(4, 1, CV_64F, X_data); + cvMatMul(m3, &X, &X); + + int id = MarkerIdToContainerId(marker_id, corner, first_id, last_id); + T& f = container[id]; + f.type_id = type_id; + f.p3d.x = float(X_data[0] / X_data[3]); + f.p3d.y = float(X_data[1] / X_data[3]); + f.p3d.z = float(X_data[2] / X_data[3]); + f.has_p3d = true; + } + cvReleaseMat(&m3); + } }; /** \brief Version of \e MultiMarker using external container */ -class MultiMarkerEC : public MultiMarker { +class MultiMarkerEC : public MultiMarker +{ public: - /** \brief Fill in 3D-coordinates for marker corners to the given container */ - template - bool MarkersToEC(std::map &container, int type_id=0, int first_id=0,int last_id=65535) { - bool ret = false; - for (size_t i = 0; i < marker_indices.size(); i++) { - if (marker_status[i] == 0) continue; // Skip the ones not in point cloud - int marker_id = marker_indices[i]; - for (int corner = 0; corner < 4; corner++) { - int id = MarkerIdToContainerId(marker_id, corner, first_id, last_id); - if (id != -1) { - int pc_index = pointcloud_index(marker_id, corner); - T &f = container[id]; - f.type_id = type_id; - f.p3d.x = float(pointcloud[pc_index].x); - f.p3d.y = float(pointcloud[pc_index].y); - f.p3d.z = float(pointcloud[pc_index].z); - f.has_p3d = true; - ret = true; - } - } - } - return ret; - } - - template - bool MarkersFromEC(std::map &container, int type_id=0, int first_id=0,int last_id=65535) { - // TODO... - return false; - } - - template - bool Save(std::map &container, const char* fname, FILE_FORMAT format = FILE_FORMAT_DEFAULT, int type_id=0, int first_id=0,int last_id=65535) { - if (!MarkersFromEC(container, type_id, first_id, last_id)) return false; - if (!MultiMarker::Save(fname, format)) return false; - return true; - } - - /** \brief Fill in 3D-coordinates for marker corners to the given container using save \e MultiMarker file */ - template - bool Load(std::map &container, const char* fname, FILE_FORMAT format = FILE_FORMAT_DEFAULT, int type_id=0, int first_id=0,int last_id=65535) { - if (!MultiMarker::Load(fname, format)) return false; - return MarkersToEC(container, type_id, first_id, last_id); - } + /** \brief Fill in 3D-coordinates for marker corners to the given container */ + template + bool MarkersToEC(std::map& container, int type_id = 0, + int first_id = 0, int last_id = 65535) + { + bool ret = false; + for (size_t i = 0; i < marker_indices.size(); i++) + { + if (marker_status[i] == 0) + continue; // Skip the ones not in point cloud + int marker_id = marker_indices[i]; + for (int corner = 0; corner < 4; corner++) + { + int id = MarkerIdToContainerId(marker_id, corner, first_id, last_id); + if (id != -1) + { + int pc_index = pointcloud_index(marker_id, corner); + T& f = container[id]; + f.type_id = type_id; + f.p3d.x = float(pointcloud[pc_index].x); + f.p3d.y = float(pointcloud[pc_index].y); + f.p3d.z = float(pointcloud[pc_index].z); + f.has_p3d = true; + ret = true; + } + } + } + return ret; + } + + template + bool MarkersFromEC(std::map& container, int type_id = 0, + int first_id = 0, int last_id = 65535) + { + // TODO... + return false; + } + + template + bool Save(std::map& container, const char* fname, + FILE_FORMAT format = FILE_FORMAT_DEFAULT, int type_id = 0, + int first_id = 0, int last_id = 65535) + { + if (!MarkersFromEC(container, type_id, first_id, last_id)) + return false; + if (!MultiMarker::Save(fname, format)) + return false; + return true; + } + + /** \brief Fill in 3D-coordinates for marker corners to the given container + * using save \e MultiMarker file */ + template + bool Load(std::map& container, const char* fname, + FILE_FORMAT format = FILE_FORMAT_DEFAULT, int type_id = 0, + int first_id = 0, int last_id = 65535) + { + if (!MultiMarker::Load(fname, format)) + return false; + return MarkersToEC(container, type_id, first_id, last_id); + } }; -} +} // namespace alvar #endif - diff --git a/ar_track_alvar/include/ar_track_alvar/FernImageDetector.h b/ar_track_alvar/include/ar_track_alvar/FernImageDetector.h index 798f839..954bfa8 100644 --- a/ar_track_alvar/include/ar_track_alvar/FernImageDetector.h +++ b/ar_track_alvar/include/ar_track_alvar/FernImageDetector.h @@ -44,31 +44,29 @@ using namespace cv; -namespace alvar{ - +namespace alvar +{ /** * \brief FernClassifier subclass that implements binary reading and writting. */ class FernClassifierWrapper : public FernClassifier { public: - FernClassifierWrapper(); - FernClassifierWrapper(const FileNode &fileNode); - FernClassifierWrapper(const vector > &points, - const vector &referenceImages, - const vector > &labels = vector >(), - int _nclasses = 0, - int _patchSize = PATCH_SIZE, - int _signatureSize = DEFAULT_SIGNATURE_SIZE, - int _nstructs = DEFAULT_STRUCTS, - int _structSize = DEFAULT_STRUCT_SIZE, - int _nviews = DEFAULT_VIEWS, - int _compressionMethod = COMPRESSION_NONE, - const PatchGenerator &patchGenerator = PatchGenerator()); - virtual ~FernClassifierWrapper(); - - virtual void readBinary(std::fstream &stream); - virtual void writeBinary(std::fstream &stream) const; + FernClassifierWrapper(); + FernClassifierWrapper(const FileNode& fileNode); + FernClassifierWrapper( + const vector >& points, + const vector& referenceImages, + const vector >& labels = vector >(), + int _nclasses = 0, int _patchSize = PATCH_SIZE, + int _signatureSize = DEFAULT_SIGNATURE_SIZE, + int _nstructs = DEFAULT_STRUCTS, int _structSize = DEFAULT_STRUCT_SIZE, + int _nviews = DEFAULT_VIEWS, int _compressionMethod = COMPRESSION_NONE, + const PatchGenerator& patchGenerator = PatchGenerator()); + virtual ~FernClassifierWrapper(); + + virtual void readBinary(std::fstream& stream); + virtual void writeBinary(std::fstream& stream) const; }; /** @@ -76,41 +74,41 @@ class FernClassifierWrapper : public FernClassifier */ class ALVAR_EXPORT FernImageDetector { -public: - FernImageDetector(const bool visualize = false); - ~FernImageDetector(); - - void imagePoints(vector &points); - void modelPoints(vector &points, bool normalize = true); - - cv::Size size(); - cv::Mat homography(); - double inlierRatio(); - - void train(const std::string &filename); - void train(Mat &image); - void findFeatures(Mat &image, bool planeAssumption = true); - - bool read(const std::string &filename, const bool binary = true); - bool write(const std::string &filename, const bool binary = true); +public: + FernImageDetector(const bool visualize = false); + ~FernImageDetector(); + + void imagePoints(vector& points); + void modelPoints(vector& points, bool normalize = true); + + cv::Size size(); + cv::Mat homography(); + double inlierRatio(); + + void train(const std::string& filename); + void train(Mat& image); + void findFeatures(Mat& image, bool planeAssumption = true); + + bool read(const std::string& filename, const bool binary = true); + bool write(const std::string& filename, const bool binary = true); private: - PatchGenerator mPatchGenerator; - LDetector mLDetector; - std::vector mClassifier; - - vector mKeyPoints; - vector mImagePoints; - vector mModelPoints; - - bool mVisualize; - std::vector mObjects; - cv::Size mSize; - cv::Mat mCorrespondences; - cv::Mat mHomography; - double mInlierRatio; + PatchGenerator mPatchGenerator; + LDetector mLDetector; + std::vector mClassifier; + + vector mKeyPoints; + vector mImagePoints; + vector mModelPoints; + + bool mVisualize; + std::vector mObjects; + cv::Size mSize; + cv::Mat mCorrespondences; + cv::Mat mHomography; + double mInlierRatio; }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/FernPoseEstimator.h b/ar_track_alvar/include/ar_track_alvar/FernPoseEstimator.h index 6c7ae37..bc40f01 100644 --- a/ar_track_alvar/include/ar_track_alvar/FernPoseEstimator.h +++ b/ar_track_alvar/include/ar_track_alvar/FernPoseEstimator.h @@ -42,36 +42,35 @@ namespace alvar { - /** * \brief Pose estimation class for FernImageDetector. */ class ALVAR_EXPORT FernPoseEstimator { - public: - FernPoseEstimator(); - ~FernPoseEstimator(); + FernPoseEstimator(); + ~FernPoseEstimator(); + + Pose pose() const; + Camera camera() const; - Pose pose() const; - Camera camera() const; + bool setCalibration(const std::string& filename, int width, int height); + void setResolution(int width, int height); - bool setCalibration(const std::string &filename, int width, int height); - void setResolution(int width, int height); - - typedef std::vector ImagePointVector; - typedef std::vector ModelPointVector; - typedef std::map ExternalContainerMap; - void calculateFromPointCorrespondences(ModelPointVector &mpts, ImagePointVector &ipts); - void updateFromTrackedPoints(ExternalContainerMap &container); - void extractPlaneCoordinates(ExternalContainerMap &container); + typedef std::vector ImagePointVector; + typedef std::vector ModelPointVector; + typedef std::map ExternalContainerMap; + void calculateFromPointCorrespondences(ModelPointVector& mpts, + ImagePointVector& ipts); + void updateFromTrackedPoints(ExternalContainerMap& container); + void extractPlaneCoordinates(ExternalContainerMap& container); private: - Pose mPose; - Camera mCamera; - CameraEC mCameraEC; + Pose mPose; + Camera mCamera; + CameraEC mCameraEC; }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/FileFormat.h b/ar_track_alvar/include/ar_track_alvar/FileFormat.h index eacfb5f..9ba0165 100644 --- a/ar_track_alvar/include/ar_track_alvar/FileFormat.h +++ b/ar_track_alvar/include/ar_track_alvar/FileFormat.h @@ -30,42 +30,43 @@ * \brief This file defines various file formats. */ -namespace alvar { - - /** - * File format enumeration used when reading / writing configuration - * files. - */ - typedef enum { - /** - * \brief Default file format. - * - * Format is either OPENCV, TEXT or XML depending on load/store function used. - */ - FILE_FORMAT_DEFAULT, +namespace alvar +{ +/** + * File format enumeration used when reading / writing configuration + * files. + */ +typedef enum +{ + /** + * \brief Default file format. + * + * Format is either OPENCV, TEXT or XML depending on load/store function used. + */ + FILE_FORMAT_DEFAULT, - /** - * \brief File format written with cvWrite. - * - * File contents depend on the specific load/store function used. - */ - FILE_FORMAT_OPENCV, + /** + * \brief File format written with cvWrite. + * + * File contents depend on the specific load/store function used. + */ + FILE_FORMAT_OPENCV, - /** - * \brief Plain ASCII text file format. - * - * File contents depend on the specific load/store function used. - */ - FILE_FORMAT_TEXT, + /** + * \brief Plain ASCII text file format. + * + * File contents depend on the specific load/store function used. + */ + FILE_FORMAT_TEXT, - /** - * \brief XML file format. - * - * XML schema depends on the specific load/store function used. - */ - FILE_FORMAT_XML + /** + * \brief XML file format. + * + * XML schema depends on the specific load/store function used. + */ + FILE_FORMAT_XML - } FILE_FORMAT; -} +} FILE_FORMAT; +} // namespace alvar -#endif //FILEFORMAT_H +#endif // FILEFORMAT_H diff --git a/ar_track_alvar/include/ar_track_alvar/FileFormatUtils.h b/ar_track_alvar/include/ar_track_alvar/FileFormatUtils.h index 95c9b52..6d1f402 100644 --- a/ar_track_alvar/include/ar_track_alvar/FileFormatUtils.h +++ b/ar_track_alvar/include/ar_track_alvar/FileFormatUtils.h @@ -32,54 +32,54 @@ */ #include "Alvar.h" -#include -#include -#include -#include +#include +#include "tinyxml.h" -namespace alvar { - - /** \brief Utility functions for file reading / writing. - */ - class ALVAR_EXPORT FileFormatUtils { - private: - /** - * \brief Reads matrix type, rows and cols from XML element. - * \return true if XML element appears to be valid; otherwise false. - */ - static bool decodeXMLMatrix(const tinyxml2::XMLElement *xml_matrix, int &type, int &rows, int &cols); - - public: +namespace alvar +{ +/** \brief Utility functions for file reading / writing. + */ +class ALVAR_EXPORT FileFormatUtils +{ +private: + /** + * \brief Reads matrix type, rows and cols from XML element. + * \return true if XML element appears to be valid; otherwise false. + */ + static bool decodeXMLMatrix(const TiXmlElement* xml_matrix, int& type, + int& rows, int& cols); - /** \brief Allocates CvMat of a correct type and size. - * \param xml_matrix alvar:matrix element. - * \return CvMat that has the correct size for \e parseXMLMatrix. - */ - static CvMat* allocateXMLMatrix(const tinyxml2::XMLElement *xml_matrix); +public: + /** \brief Allocates cv::Mat of a correct type and size. + * \param xml_matrix alvar:matrix element. + * \return cv::Mat that has the correct size for \e parseXMLMatrix. + */ + static cv::Mat* allocateXMLMatrix(const TiXmlElement* xml_matrix); - /** \brief Reads contents of alvar:matrix into CvMat. - * - * Parsing fails if the matrix is not the same type or does not have - * the same number of rows and columns as the XML element. - * - * \param xml_matrix alvar:matrix element. If NULL no parsing is done and - * false is returned. - * \param matrix CvMat that has the correct size, populated with data in - * the xml_matrix. - * \return true if matrix was successfully parsed; otherwise false. - */ - static bool parseXMLMatrix(const tinyxml2::XMLElement *xml_matrix, CvMat *matrix); + /** \brief Reads contents of alvar:matrix into cv::Mat. + * + * Parsing fails if the matrix is not the same type or does not have + * the same number of rows and columns as the XML element. + * + * \param xml_matrix alvar:matrix element. If NULL no parsing is done and + * false is returned. + * \param matrix cv::Mat that has the correct size, populated with data in + * the xml_matrix. + * \return true if matrix was successfully parsed; otherwise false. + */ + static bool parseXMLMatrix(const TiXmlElement* xml_matrix, cv::Mat& matrix); - /** \brief Allocates new XML element and populates it with a CvMat data. - * - * The returned element needs to be deallocated by the caller. - * - * \param element_name Name of the allocated tiXmlElement. - * \param matrix Data that is written into the returned XML element. - * \return Newly allocated TiXmlElement. - */ - static tinyxml2::XMLElement* createXMLMatrix(const char* element_name, const CvMat *matrix); - }; -} + /** \brief Allocates new XML element and populates it with a cv::Mat data. + * + * The returned element needs to be deallocated by the caller. + * + * \param element_name Name of the allocated tiXmlElement. + * \param matrix Data that is written into the returned XML element. + * \return Newly allocated TiXmlElement. + */ + static TiXmlElement* createXMLMatrix(const char* element_name, + const cv::Mat& matrix); +}; +} // namespace alvar -#endif //FILEFORMATUTILS_H +#endif // FILEFORMATUTILS_H diff --git a/ar_track_alvar/include/ar_track_alvar/Filter.h b/ar_track_alvar/include/ar_track_alvar/Filter.h index 52943c6..d63f565 100644 --- a/ar_track_alvar/include/ar_track_alvar/Filter.h +++ b/ar_track_alvar/include/ar_track_alvar/Filter.h @@ -36,10 +36,11 @@ #include #include -namespace alvar { - +namespace alvar +{ /** - * \brief \e Filter is pure virtual class describing the basic virtual interface for all filters + * \brief \e Filter is pure virtual class describing the basic virtual interface + * for all filters * * Basic usage: * \code @@ -62,34 +63,39 @@ namespace alvar { * cout<<(d = 2.9)< buffer; - void push_to_buffer(double y); + unsigned int count; + unsigned int window_size; + std::deque buffer; + void push_to_buffer(double y); + public: - FilterAverage(int size=3) { setWindowSize(size); } - void setWindowSize(int size) { window_size=size; count=0; } - int getWindowSize() { return window_size; } - int getCurrentSize() { return (int) buffer.size(); } - double operator= (double _value) { return next(_value); } - virtual double next(double y); - virtual void reset(); - double deviation() const; + FilterAverage(int size = 3) + { + setWindowSize(size); + } + void setWindowSize(int size) + { + window_size = size; + count = 0; + } + int getWindowSize() + { + return window_size; + } + int getCurrentSize() + { + return (int)buffer.size(); + } + double operator=(double _value) + { + return next(_value); + } + virtual double next(double y); + virtual void reset(); + double deviation() const; }; /** @@ -125,21 +149,30 @@ class ALVAR_EXPORT FilterAverage : public Filter { * * The \e FilterMedian remembers \e window_size last elements * in the time series and returns always the middle element - * after sorting ((\e window_size / 2) + 1) elements. The size of the window - * \e window_size can be set in the constructor or with + * after sorting ((\e window_size / 2) + 1) elements. The size of the window + * \e window_size can be set in the constructor or with * \e setWindowSize() . * */ -class ALVAR_EXPORT FilterMedian : public FilterAverage { - std::vector sort_buffer; +class ALVAR_EXPORT FilterMedian : public FilterAverage +{ + std::vector sort_buffer; + public: - FilterMedian(int size=3) { setWindowSize(size); } - void setWindowSize(int size) { - FilterAverage::setWindowSize(size); - sort_buffer.resize(size); - } - double operator= (double _value) { return next(_value); } - virtual double next(double y); + FilterMedian(int size = 3) + { + setWindowSize(size); + } + void setWindowSize(int size) + { + FilterAverage::setWindowSize(size); + sort_buffer.resize(size); + } + double operator=(double _value) + { + return next(_value); + } + virtual double next(double y); }; /** @@ -147,55 +180,85 @@ class ALVAR_EXPORT FilterMedian : public FilterAverage { * \note This could be named also as FilterSingleExponentialSmoothing * * The \e FilterRunningAverage calculates a simple running average - * using the weight value \e alpha. + * using the weight value \e alpha. * \code * value = ((1.0-alpha) * value) + (alpha * (double)y); * \endcode - * If alpha is larger (near 1.0) the average reacts faster for - * changes and if it is near 0.0 then it reacts slowly. The - * weight value \e alpha may be set in the constructor or + * If alpha is larger (near 1.0) the average reacts faster for + * changes and if it is near 0.0 then it reacts slowly. The + * weight value \e alpha may be set in the constructor or * with \e setAlpha() . */ -class ALVAR_EXPORT FilterRunningAverage : public Filter { +class ALVAR_EXPORT FilterRunningAverage : public Filter +{ protected: - double alpha; - bool breset; + double alpha; + bool breset; + public: - FilterRunningAverage(double _alpha=0.5) { breset=true; setAlpha(_alpha); } - void setAlpha(double _alpha) { alpha=std::max(std::min(_alpha,1.0),0.0); } - double getAlpha() { return alpha; } - double operator= (double _value) { return next(_value); } - virtual double next(double y); - virtual void reset(); + FilterRunningAverage(double _alpha = 0.5) + { + breset = true; + setAlpha(_alpha); + } + void setAlpha(double _alpha) + { + alpha = std::max(std::min(_alpha, 1.0), 0.0); + } + double getAlpha() + { + return alpha; + } + double operator=(double _value) + { + return next(_value); + } + virtual double next(double y); + virtual void reset(); }; /** - * \brief \e FilterDoubleExponentialSmoothing provides an weighted running average filter + * \brief \e FilterDoubleExponentialSmoothing provides an weighted running + *average filter * * The \e FilterDoubleExponentialSmoothing calculates a simple running average * for both the \e average and \e slope using the weight values \e alpha and - * \e gamma. + * \e gamma. * \code * value = ((1.0-alpha) * (value + slope)) + (alpha * (double)y); * slope = ((1.0-gamma) * (slope)) + (gamma * (value - value_prev)); * \endcode - * If the weight values (\e alpha , \e gamma) are larger (near 1.0) - * the formulas react faster for changes and if they are near 0.0 + * If the weight values (\e alpha , \e gamma) are larger (near 1.0) + * the formulas react faster for changes and if they are near 0.0 * then the reaction is slower. The weight values \e alpha and \e gamma * may be set in the constructor or with \e setAlpha() and \e setGamma() . */ -class ALVAR_EXPORT FilterDoubleExponentialSmoothing : public FilterRunningAverage { +class ALVAR_EXPORT FilterDoubleExponentialSmoothing + : public FilterRunningAverage +{ protected: - double gamma; - double slope; + double gamma; + double slope; + public: - FilterDoubleExponentialSmoothing(double _alpha=0.5, double _gamma=1.0) : FilterRunningAverage(_alpha) { - setGamma(_gamma); - } - void setGamma(double _gamma) { gamma=std::max(std::min(_gamma,1.0),0.0); } - double getGamma() { return gamma; } - double operator= (double _value) { return next(_value); } - virtual double next(double y); + FilterDoubleExponentialSmoothing(double _alpha = 0.5, double _gamma = 1.0) + : FilterRunningAverage(_alpha) + { + setGamma(_gamma); + } + void setGamma(double _gamma) + { + gamma = std::max(std::min(_gamma, 1.0), 0.0); + } + double getGamma() + { + return gamma; + } + double operator=(double _value) + { + return next(_value); + } + virtual double next(double y); }; /** @@ -203,37 +266,47 @@ class ALVAR_EXPORT FilterDoubleExponentialSmoothing : public FilterRunningAverag * */ template -class ALVAR_EXPORT FilterArray { +class ALVAR_EXPORT FilterArray +{ protected: - double *tmp; - std::vector arr; + double* tmp; + std::vector arr; + public: - FilterArray(int size) { - tmp = NULL; - SetSize(size); - } - ~FilterArray() { - delete [] tmp; - } - size_t GetSize() { - return arr.size(); - } - void SetSize(size_t size) { - if (tmp) delete [] tmp; - tmp = new double[size]; - arr.resize(size); - } - F &operator[](size_t i) { - return arr[i]; - } - const double *as_double_array(size_t start_i=0) { - for (size_t i=0; i #ifdef WIN32 - #include - #include - #include - #include +#include +#include +#include +#include #else - #include - #include - #include +#include +#include +#include #endif class Drawable { public: - Drawable(double _scale=1, double _r=1, double _g=1, double _b=1); + Drawable(double _scale = 1, double _r = 1, double _g = 1, double _b = 1); - void SetScale(double _scale); - void SetColor(double _r=1, double _g=1, double _b=1); - - virtual void Draw(); - virtual void DrawAxis(double scale, double color[3]); - - void SetGLMatTraQuat(double *tra, double *quat, bool flip=false); - void SetGLMatTraRod(double *tra, double *rod); + void SetScale(double _scale); + void SetColor(double _r = 1, double _g = 1, double _b = 1); - double scale; - double color[3]; - double gl_mat[16]; + virtual void Draw(); + virtual void DrawAxis(double scale, double color[3]); + + void SetGLMatTraQuat(double* tra, double* quat, bool flip = false); + void SetGLMatTraRod(double* tra, double* rod); + + double scale; + double color[3]; + double gl_mat[16]; protected: - double ax_len; + double ax_len; }; namespace GlutViewer { - void Draw(); - void Exit(); - void Start(int argc, char** argv, int w, int h, float r=300.0); - void Reshape(int w, int h); +void Draw(); +void Exit(); +void Start(int argc, char** argv, int w, int h, float r = 300.0); +void Reshape(int w, int h); - void DrawableClear(); - void DrawableAdd(Drawable* item); +void DrawableClear(); +void DrawableAdd(Drawable* item); - void DrawVr(); - void DrawAr(); - void DrawFloor(); - void DrawContent(); - void DrawAxis(float scale); - void Mouse(int button, int state, int x, int y); - void Motion(int x, int y); - void SetGlProjectionMatrix(double p[16]); - void SetGlModelviewMatrix(double p[16]); - void KeyCallback(int key, int x, int y); +void DrawVr(); +void DrawAr(); +void DrawFloor(); +void DrawContent(); +void DrawAxis(float scale); +void Mouse(int button, int state, int x, int y); +void Motion(int x, int y); +void SetGlProjectionMatrix(double p[16]); +void SetGlModelviewMatrix(double p[16]); +void KeyCallback(int key, int x, int y); - void SetVideo(const IplImage* _image); - void DrawVideo(); +void SetVideo(const IplImage* _image); +void DrawVideo(); - double GetXOffset(); - double GetYOffset(); -} +double GetXOffset(); +double GetYOffset(); +} // namespace GlutViewer #endif diff --git a/ar_track_alvar/include/ar_track_alvar/IntegralImage.h b/ar_track_alvar/include/ar_track_alvar/IntegralImage.h index 4d32262..e73c065 100644 --- a/ar_track_alvar/include/ar_track_alvar/IntegralImage.h +++ b/ar_track_alvar/include/ar_track_alvar/IntegralImage.h @@ -35,13 +35,14 @@ #include #include -namespace alvar { - -/** \brief Class for calculating "evenly spaced" integer indices for data sequence +namespace alvar +{ +/** \brief Class for calculating "evenly spaced" integer indices for data + * sequence * - * If we have a data sequence we want to step through in certain amount of steps, - * \e IntIndex can be used for iterating through the steps using fast integer implementation. - * This class is related to stride iterators. + * If we have a data sequence we want to step through in certain amount of + * steps, \e IntIndex can be used for iterating through the steps using fast + * integer implementation. This class is related to stride iterators. * * \section Usage * \code @@ -52,116 +53,130 @@ namespace alvar { * } * \endcode */ -class ALVAR_EXPORT IntIndex { +class ALVAR_EXPORT IntIndex +{ protected: - int index; - int step; - int step_remainder; - int estep; - int next_step; - int res; - int steps; - void update_next_step(); + int index; + int step; + int step_remainder; + int estep; + int next_step; + int res; + int steps; + void update_next_step(); + public: - /** \brief Create \e IntIndex for indexing \e _res elements in predefined amount of \e _steps . - * \param _res The number of elements in the data sequence we want to index. - * \param _steps The number of steps to use to cover the \e _res elements (\e _steps < \e _res) - */ - IntIndex(int _res, int _steps); - /** \brief Set the integer index to the "grid" value nearest to \e v . - */ - int operator=(int v); - /** \brief Take the next integer index step. */ - int next(); - /** \brief Get the index value */ - int get() const; - /** \brief How much the index will be increased with the next \e next() */ - int get_next_step() const; - /** \brief For testing have we reached the end. */ - int end() const; + /** \brief Create \e IntIndex for indexing \e _res elements in predefined + * amount of \e _steps . \param _res The number of elements in the data + * sequence we want to index. \param _steps The number of steps to use to + * cover the \e _res elements (\e _steps < \e _res) + */ + IntIndex(int _res, int _steps); + /** \brief Set the integer index to the "grid" value nearest to \e v . + */ + int operator=(int v); + /** \brief Take the next integer index step. */ + int next(); + /** \brief Get the index value */ + int get() const; + /** \brief How much the index will be increased with the next \e next() */ + int get_next_step() const; + /** \brief For testing have we reached the end. */ + int end() const; }; - -/** \brief \e IntegralImage is used for calculating rectangular image sums and averages rapidly +/** \brief \e IntegralImage is used for calculating rectangular image sums and + * averages rapidly * - * The integral images are based on making intermediate representation of the image. Using this - * approach the sum/average of rectangular area can be calculated using only four references for - * the integral image. The integral images are commonly used with HAAR-like features. + * The integral images are based on making intermediate representation of the + * image. Using this approach the sum/average of rectangular area can be + * calculated using only four references for the integral image. The integral + * images are commonly used with HAAR-like features. * - * The \e IntegralImage should be used when we need to a lot of sum/average calculations for - * the same image. - * - * \section References - * - Konstantinos G. Derpanis (2007). Integral image-based representations. Department of Computer - * Science and Engineering, York University. - * - Viola, P. & Jones, M. (2001). Rapid object detection using a boosted cascade of simple - * features. In IEEE Computer Vision and Pattern Recognition (pp. I:511-518). + * The \e IntegralImage should be used when we need to a lot of sum/average + * calculations for the same image. + * + * \section References + * - Konstantinos G. Derpanis (2007). Integral image-based representations. + * Department of Computer Science and Engineering, York University. + * - Viola, P. & Jones, M. (2001). Rapid object detection using a boosted + * cascade of simple features. In IEEE Computer Vision and Pattern Recognition + * (pp. I:511-518). */ -class ALVAR_EXPORT IntegralImage { - IplImage *sum; +class ALVAR_EXPORT IntegralImage +{ + IplImage* sum; + public: - IntegralImage(); - ~IntegralImage(); - /** \brief Update integral image for the given image. - * \param gray The original grayscale image we want analyze - */ - void Update(IplImage *gray); - /** \brief Calculate the sum for the given rectangular area in the image. - * \param rect The rectancle - * \param count If this parameter is not 0 it is filled with number of pixels in the rectangle. - */ - double GetSum(CvRect &rect, int *count=0); - /** \brief Calculate the average for the given rectangular area in the image. */ - double GetAve(CvRect &rect); - /** \brief Get a sub-image using integral image representation. - * \param rect The rectangle we want to get the sub-image from - * \param sub The image where the sub-image is generated. Note, the desired resolution is defined by \e sub. - * - * Get an image \e sub with a predefined resolution from the given - * rectangular area \e rect . In practice the \e sub is filled by - * getting the average with \e GetAve() for every pixel area. - */ - void GetSubimage(const CvRect &rect, IplImage *sub); + IntegralImage(); + ~IntegralImage(); + /** \brief Update integral image for the given image. + * \param gray The original grayscale image we want analyze + */ + void Update(IplImage* gray); + /** \brief Calculate the sum for the given rectangular area in the image. + * \param rect The rectancle + * \param count If this parameter is not 0 it is filled with number of pixels + * in the rectangle. + */ + double GetSum(CvRect& rect, int* count = 0); + /** \brief Calculate the average for the given rectangular area in the image. + */ + double GetAve(CvRect& rect); + /** \brief Get a sub-image using integral image representation. + * \param rect The rectangle we want to get the sub-image from + * \param sub The image where the sub-image is generated. Note, the desired + * resolution is defined by \e sub. + * + * Get an image \e sub with a predefined resolution from the given + * rectangular area \e rect . In practice the \e sub is filled by + * getting the average with \e GetAve() for every pixel area. + */ + void GetSubimage(const CvRect& rect, IplImage* sub); }; -/** \brief \e IntegralGradient is used for calculating rectangular image gradients rapidly +/** \brief \e IntegralGradient is used for calculating rectangular image + * gradients rapidly * - * We calculate \e IntegralImage:s based on point normals for 4-pixel intersections - * (see Donahue1992). Using the integral images it is possible to make fast gradient - * calculations to any image rectangle. This approach is useful when we need to calculate many - * gradient rectangles for the same image. + * We calculate \e IntegralImage:s based on point normals for 4-pixel + * intersections (see Donahue1992). Using the integral images it is possible to + * make fast gradient calculations to any image rectangle. This approach is + * useful when we need to calculate many gradient rectangles for the same image. * * (See \e SampleIntegralImage.cpp) */ -class ALVAR_EXPORT IntegralGradient { +class ALVAR_EXPORT IntegralGradient +{ protected: - IplImage *normalx; - IplImage *normaly; - IntegralImage integx; - IntegralImage integy; - // Calculate point normals for 4-pixel intersection - // as described in Donahue1992 - void CalculatePointNormals(IplImage *gray); + IplImage* normalx; + IplImage* normaly; + IntegralImage integx; + IntegralImage integy; + // Calculate point normals for 4-pixel intersection + // as described in Donahue1992 + void CalculatePointNormals(IplImage* gray); + public: - IntegralGradient(); - ~IntegralGradient(); - /** \brief Update intermediate images for calculating the gradients to the given image. - * \param gray The original grayscale image we want analyze - */ - void Update(IplImage *gray); - /** \brief Calculate the gradient for the given rectangular area in the image. - * \param dirx Method fills in the x-component of the gradient here - * \param diry Method fills in the y-component of the gradient here - * \param count If this parameter is not 0 it is filled with number of pixels in the rectangle. - */ - void GetGradient(CvRect &rect, double *dirx, double *diry, int *count=0); - /** \brief Calculate the average gradient for the given rectangular area in the image. - * \param dirx Method fills in the x-component of the gradient here - * \param diry Method fills in the y-component of the gradient here - */ - void GetAveGradient(CvRect &rect, double *dirx, double *diry); + IntegralGradient(); + ~IntegralGradient(); + /** \brief Update intermediate images for calculating the gradients to the + * given image. \param gray The original grayscale image we want analyze + */ + void Update(IplImage* gray); + /** \brief Calculate the gradient for the given rectangular area in the image. + * \param dirx Method fills in the x-component of the gradient here + * \param diry Method fills in the y-component of the gradient here + * \param count If this parameter is not 0 it is filled with number of pixels + * in the rectangle. + */ + void GetGradient(CvRect& rect, double* dirx, double* diry, int* count = 0); + /** \brief Calculate the average gradient for the given rectangular area in + * the image. \param dirx Method fills in the x-component of the gradient here + * \param diry Method fills in the y-component of the gradient here + */ + void GetAveGradient(CvRect& rect, double* dirx, double* diry); }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/Kalman.h b/ar_track_alvar/include/ar_track_alvar/Kalman.h index e45f4b8..c1597dc 100644 --- a/ar_track_alvar/include/ar_track_alvar/Kalman.h +++ b/ar_track_alvar/include/ar_track_alvar/Kalman.h @@ -26,291 +26,333 @@ /** * \file Kalman.h - * + * * \brief This file implements a versatile Kalman filter. */ #include "Alvar.h" -#include -#include "opencv2/core/types_c.h" -#include -#include - - -namespace alvar { +#include +namespace alvar +{ /** \brief Core implementation for Kalman sensor */ -class ALVAR_EXPORT KalmanSensorCore { - friend class KalmanVisualize; +class ALVAR_EXPORT KalmanSensorCore +{ + friend class KalmanVisualize; + protected: - int n; - int m; - CvMat *H_trans; - CvMat *z_pred; - CvMat *z_residual; - CvMat *x_gain; + int n; + int m; + cv::Mat H_trans; + cv::Mat z_pred; + cv::Mat z_residual; + cv::Mat x_gain; + public: - /** \brief Latest measurement vector (m*1) */ - CvMat *z; - /** \brief The matrix (m*n) mapping Kalman state vector into this sensor's measurements vector */ - CvMat *H; - /** \brief The matrix (n*m) containing Kalman gain (something between 0 and H^-1). - * In this core-implementation we assume this to be precalculated. In \e KalmanSensor this is updated using \e update_K . - */ - CvMat *K; - /** \brief Copy constructor */ - KalmanSensorCore(const KalmanSensorCore &k); - /** - * \brief Constructor - * \param _n The number of items in the Kalman state vector - * \param _m The number of measurements given by this sensor - */ - KalmanSensorCore(int _n, int _m); - /** \brief Destructor */ - ~KalmanSensorCore(); - /** \brief Accessor for n */ - int get_n() { return n; } - /** \brief Accessor for m */ - int get_m() { return m; } - /** \brief Method for updating the state estimate x - * This is called from \e predict_update() of \e Kalman. - * In \e KalmanSensorCore and in \e KalmanSensor this update is made linearly - * but \e KalmanSensorEkf will override this method to use unlinear estimation. - */ - virtual void update_x(CvMat *x_pred, CvMat *x); + /** \brief Latest measurement vector (m*1) */ + cv::Mat z; + /** \brief The matrix (m*n) mapping Kalman state vector into this sensor's + * measurements vector */ + cv::Mat H; + /** \brief The matrix (n*m) containing Kalman gain (something between 0 and + * H^-1). In this core-implementation we assume this to be precalculated. In + * \e KalmanSensor this is updated using \e update_K . + */ + cv::Mat K; + /** \brief Copy constructor */ + KalmanSensorCore(const KalmanSensorCore& k); + /** + * \brief Constructor + * \param _n The number of items in the Kalman state vector + * \param _m The number of measurements given by this sensor + */ + KalmanSensorCore(int _n, int _m); + /** \brief Destructor */ + ~KalmanSensorCore(); + /** \brief Accessor for n */ + int get_n() + { + return n; + } + /** \brief Accessor for m */ + int get_m() + { + return m; + } + /** \brief Method for updating the state estimate x + * This is called from \e predict_update() of \e Kalman. + * In \e KalmanSensorCore and in \e KalmanSensor this update is made linearly + * but \e KalmanSensorEkf will override this method to use unlinear + * estimation. + */ + virtual void update_x(const cv::Mat& x_pred, cv::Mat& x); }; /** \brief Core implementation for Kalman */ -class ALVAR_EXPORT KalmanCore { - friend class KalmanVisualize; +class ALVAR_EXPORT KalmanCore +{ + friend class KalmanVisualize; + protected: - int n; - //CvMat *x_pred; - CvMat *F_trans; - virtual void predict_x(unsigned long tick); -public: - /** \brief The Kalman state vector (n*1) */ - CvMat *x; - /** \brief The matrix (n*n) containing the transition model for the internal state. */ - CvMat *F; - /** \brief Copy constructor */ - KalmanCore(const KalmanCore &s); - /** - * \brief Constructor - * \param _n The number of items in the Kalman state vector - */ - KalmanCore(int _n); - /** \brief Destructor */ - ~KalmanCore(); - /** \brief Accessor for n */ - int get_n() { return n; } - /** \brief Predict the Kalman state vector for the given time step . - * x_pred = F * x - */ - virtual CvMat *predict(); - /** \brief Predict the Kalman state vector and update the state using the constant Kalman gain. - * x = x_pred + K* ( z - H*x_pred) - */ - CvMat *predict_update(KalmanSensorCore *sensor); + int n; + // cv::Mat x_pred; + cv::Mat F_trans; + virtual void predict_x(unsigned long tick); - /** \brief Predicted state, TODO: should be protected?! */ - CvMat *x_pred; +public: + /** \brief The Kalman state vector (n*1) */ + cv::Mat x; + /** \brief The matrix (n*n) containing the transition model for the internal + * state. */ + cv::Mat F; + /** \brief Copy constructor */ + KalmanCore(const KalmanCore& s); + /** + * \brief Constructor + * \param _n The number of items in the Kalman state vector + */ + KalmanCore(int _n); + /** \brief Destructor */ + ~KalmanCore(); + /** \brief Accessor for n */ + int get_n() + { + return n; + } + /** \brief Predict the Kalman state vector for the given time step . + * x_pred = F * x + */ + virtual cv::Mat& predict(); + /** \brief Predict the Kalman state vector and update the state using the + * constant Kalman gain. x = x_pred + K* ( z - H*x_pred) + */ + cv::Mat& predict_update(KalmanSensorCore* sensor); + /** \brief Predicted state, TODO: should be protected?! */ + cv::Mat x_pred; }; /** \brief Kalman sensor implementation */ -class ALVAR_EXPORT KalmanSensor : public KalmanSensorCore { +class ALVAR_EXPORT KalmanSensor : public KalmanSensorCore +{ protected: - CvMat *R_tmp; - CvMat *P_tmp; + cv::Mat R_tmp; + cv::Mat P_tmp; + public: - /** \brief The covariance matrix for the observation noise */ - CvMat *R; - /** \brief Copy constructor */ - KalmanSensor(const KalmanSensor &k); - /** - * \brief Constructor - * \param _n The number of items in the Kalman state vector - * \param _m The number of measurements given by this sensor - */ - KalmanSensor(int n, int _m); - /** \brief Destructor */ - ~KalmanSensor(); - /** \brief Method for updating how the Kalman state vector is mapped into this sensor's measurements vector. - * This is called from \e predict_update() of \e Kalman. - * Please override this method if you want this mapping to change on the run (e.g. based on time?). - */ - virtual void update_H(CvMat *x_pred) {} - /** \brief Method for updating the Kalman gain. - * This is called from \e predict_update() of \e Kalman. - */ - virtual void update_K(CvMat *P_pred); - /** \brief Method for updating the error covariance matrix describing the accuracy of the state estimate. - * This is called from \e predict_update() of \e Kalman. - */ - virtual void update_P(CvMat *P_pred, CvMat *P); + /** \brief The covariance matrix for the observation noise */ + cv::Mat R; + /** \brief Copy constructor */ + KalmanSensor(const KalmanSensor& k); + /** + * \brief Constructor + * \param _n The number of items in the Kalman state vector + * \param _m The number of measurements given by this sensor + */ + KalmanSensor(int n, int _m); + /** \brief Destructor */ + ~KalmanSensor(); + /** \brief Method for updating how the Kalman state vector is mapped into + * this sensor's measurements vector. This is called from \e predict_update() + * of \e Kalman. Please override this method if you want this mapping to + * change on the run (e.g. based on time?). + */ + virtual void update_H(const cv::Mat& x_pred) + { + } + /** \brief Method for updating the Kalman gain. + * This is called from \e predict_update() of \e Kalman. + */ + virtual void update_K(const cv::Mat& P_pred); + /** \brief Method for updating the error covariance matrix describing the + * accuracy of the state estimate. This is called from \e predict_update() of + * \e Kalman. + */ + virtual void update_P(const cv::Mat& P_pred, cv::Mat& P); }; /** \brief Kalman implementation -* -* The Kalman filter provides an effective way of estimating a system/process recursively -* (http://www.cs.unc.edu/~welch/media/pdf/kalman_intro.pdf). In this implementation -* we have separated the Kalman class (\e KalmanCore, \e Kalman or \e KalmanEkf) -* from the sensor class (\e KalmanSensorCore, \e KalmanSensor or \e KalmanSensorEkf). -* The selected Kalman class contains always the latest estimation of the process. -* The estimation can be updated using one or several sensors. This implementation allows -* SCAAT approach, where there may be several sensors (and several controls) for each -* Kalman filter (See http://www.cs.unc.edu/~welch/scaat.html). -* -* Currently we have have three levels of implementations for both Kalman and Sensor (\e core, \e "normal" and \e EKF). -* -* The \e core implementations can be used for really fast bare-bones core implementations when -* we have precalculated and constant \e K. In systems where \e F, \e H, \e Q and \e R are constants -* the \e K will converge into a constant and it can be precalculated. Note, that the core -* implementation need to assume constant frame rate if \e F depends on the timestep between frames. -* Note also, that the core-classes cannot use \e EKF Jacobians because they change the \e H. -* -* The \e "normal" implementations are used when we have a linear \e F for \e Kalman, or linear \e H -* for \e KalmanSensor. The \e EKF implementations are used when we have non-linear function \e f() -* for \e KalmanEkf, or non-linear function \e h() for \e KalmanSensorEkf. -* -* Furthermore we have a class \e KalmanVisualize for visualizing the internal state of \e Kalman. -* -* Note, that now the \e KalmanControl is left out from this implementation. But it could be added -* using similar conventions as the \e KalmanSensor. -*/ -class ALVAR_EXPORT Kalman : public KalmanCore { + * + * The Kalman filter provides an effective way of estimating a system/process + * recursively (http://www.cs.unc.edu/~welch/media/pdf/kalman_intro.pdf). In + * this implementation we have separated the Kalman class (\e KalmanCore, \e + * Kalman or \e KalmanEkf) from the sensor class (\e KalmanSensorCore, \e + * KalmanSensor or \e KalmanSensorEkf). The selected Kalman class contains + * always the latest estimation of the process. The estimation can be updated + * using one or several sensors. This implementation allows SCAAT approach, + * where there may be several sensors (and several controls) for each Kalman + * filter (See http://www.cs.unc.edu/~welch/scaat.html). + * + * Currently we have have three levels of implementations for both Kalman and + * Sensor (\e core, \e "normal" and \e EKF). + * + * The \e core implementations can be used for really fast bare-bones core + * implementations when we have precalculated and constant \e K. In systems + * where \e F, \e H, \e Q and \e R are constants the \e K will converge into a + * constant and it can be precalculated. Note, that the core implementation need + * to assume constant frame rate if \e F depends on the timestep between frames. + * Note also, that the core-classes cannot use \e EKF Jacobians because they + * change the \e H. + * + * The \e "normal" implementations are used when we have a linear \e F for \e + * Kalman, or linear \e H for \e KalmanSensor. The \e EKF implementations are + * used when we have non-linear function \e f() for \e KalmanEkf, or non-linear + * function \e h() for \e KalmanSensorEkf. + * + * Furthermore we have a class \e KalmanVisualize for visualizing the internal + * state of \e Kalman. + * + * Note, that now the \e KalmanControl is left out from this implementation. But + * it could be added using similar conventions as the \e KalmanSensor. + */ +class ALVAR_EXPORT Kalman : public KalmanCore +{ protected: - int prev_tick; - void predict_P(); + int prev_tick; + void predict_P(); + public: - /** \brief The error covariance matrix describing the accuracy of the state estimate */ - CvMat *P; - /** \brief The covariance matrix for the process noise */ - CvMat *Q; - /** \brief The predicted error covariance matrix */ - CvMat *P_pred; - /** - * \brief Constructor - * \param n The number of items in the Kalman state vector - * \param _m The number of measurements given by this sensor - */ - Kalman(int _n); - /** \brief Destructor */ - ~Kalman(); - /** - * If your transition matrix F is based on time you need to override this method. - */ - virtual void update_F(unsigned long tick); - /** \brief Predict the Kalman state vector for the given time step - * This calls \e updateF for updating the transition matrix based on the real time step - * - * x_pred = F*x - * P_pred = F*P*trans(F) + Q - */ - CvMat *predict(unsigned long tick); - /** \brief Predict the Kalman state vector for the given time step and update the state using the Kalman gain. - * - Calls first the predict to ensure that the prediction is based on same timestep as the update - * - K = P_pred * trans(H) * inv(H*P_pred*trans(H) + R) - * - x = x_pred + K* ( z - H*x_pred) - * - P = (I - K*H) * P_pred - */ - CvMat *predict_update(KalmanSensor *sensor, unsigned long tick); - /** \brief Helper method. */ - double seconds_since_update(unsigned long tick); + /** \brief The error covariance matrix describing the accuracy of the state + * estimate */ + cv::Mat P; + /** \brief The covariance matrix for the process noise */ + cv::Mat Q; + /** \brief The predicted error covariance matrix */ + cv::Mat P_pred; + /** + * \brief Constructor + * \param n The number of items in the Kalman state vector + * \param _m The number of measurements given by this sensor + */ + Kalman(int _n); + /** \brief Destructor */ + ~Kalman(); + /** + * If your transition matrix F is based on time you need to override this + * method. + */ + virtual void update_F(unsigned long tick); + /** \brief Predict the Kalman state vector for the given time step + * This calls \e updateF for updating the transition matrix based on the real + * time step + * + * x_pred = F*x + * P_pred = F*P*trans(F) + Q + */ + cv::Mat& predict(unsigned long tick); + /** \brief Predict the Kalman state vector for the given time step and update + * the state using the Kalman gain. + * - Calls first the predict to ensure that the prediction is based on same + * timestep as the update + * - K = P_pred * trans(H) * inv(H*P_pred*trans(H) + R) + * - x = x_pred + K* ( z - H*x_pred) + * - P = (I - K*H) * P_pred + */ + cv::Mat& predict_update(KalmanSensor* sensor, unsigned long tick); + /** \brief Helper method. */ + double seconds_since_update(unsigned long tick); }; /** \brief Extended Kalman Filter (EKF) sensor implementation. -* -* Please override the pure virtual \e h() with the desired unlinear function. -* By default the \e upate_H calculates the Jacobian numerically, if you want other approach override -* also the \e update_H() -*/ -class ALVAR_EXPORT KalmanSensorEkf : public KalmanSensor { + * + * Please override the pure virtual \e h() with the desired unlinear function. + * By default the \e upate_H calculates the Jacobian numerically, if you want + * other approach override also the \e update_H() + */ +class ALVAR_EXPORT KalmanSensorEkf : public KalmanSensor +{ protected: - CvMat *delta; - CvMat *x_plus; - CvMat *x_minus; - CvMat *z_tmp1; - CvMat *z_tmp2; - virtual void h(CvMat *x_pred, CvMat *_z_pred) = 0; - virtual void update_H(CvMat *x_pred); - virtual void update_x(CvMat *x_pred, CvMat *x); + cv::Mat delta; + cv::Mat x_plus; + cv::Mat x_minus; + cv::Mat z_tmp1; + cv::Mat z_tmp2; + virtual void h(const cv::Mat& x_pred, const cv::Mat& _z_pred) = 0; + virtual void update_H(const cv::Mat& x_pred); + virtual void update_x(const cv::Mat& x_pred, cv::Mat& x); + public: - KalmanSensorEkf(const KalmanSensorEkf &k); - KalmanSensorEkf(int _n, int _m); - ~KalmanSensorEkf(); + KalmanSensorEkf(const KalmanSensorEkf& k); + KalmanSensorEkf(int _n, int _m); + ~KalmanSensorEkf(); }; /** \brief Extended Kalman Filter (EKF) implementation. -* -* Please override the pure virtual \e f() with the desired unlinear function. -* By default the \e upate_F calculates the Jacobian numerically, if you want other approach override -* also the \e update_F() -*/ -class ALVAR_EXPORT KalmanEkf : public Kalman { + * + * Please override the pure virtual \e f() with the desired unlinear function. + * By default the \e upate_F calculates the Jacobian numerically, if you want + * other approach override also the \e update_F() + */ +class ALVAR_EXPORT KalmanEkf : public Kalman +{ protected: - CvMat *delta; - CvMat *x_plus; - CvMat *x_minus; - CvMat *x_tmp1; - CvMat *x_tmp2; - virtual void f(CvMat *_x, CvMat *_x_pred, double dt) = 0; - virtual void update_F(unsigned long tick); - virtual void predict_x(unsigned long tick); + cv::Mat delta; + cv::Mat x_plus; + cv::Mat x_minus; + cv::Mat x_tmp1; + cv::Mat x_tmp2; + virtual void f(const cv::Mat& _x, const cv::Mat& _x_pred, double dt) = 0; + virtual void update_F(unsigned long tick); + virtual void predict_x(unsigned long tick); + public: - KalmanEkf(int _n); - ~KalmanEkf(); + KalmanEkf(int _n); + ~KalmanEkf(); }; /** \brief Class for visualizing Kalman filter Usage: \code - KalmanVisualize kvis(&kalman, &sensor); - ... - kvis.update_pre(); - kalman.predict_update(&sensor); - kvis.update_post(); - kvis.show(); + KalmanVisualize kvis(&kalman, &sensor); + ... + kvis.update_pre(); + kalman.predict_update(&sensor); + kvis.update_post(); + kvis.show(); \endcode */ -class ALVAR_EXPORT KalmanVisualize { - int n; - int m; - KalmanCore *kalman; - KalmanSensorCore *sensor; - Kalman *kalman_ext; - KalmanSensor *sensor_ext; - /** \brief Image collecting visualization of the Kalman filter */ - cv::Mat *img; - /** \brief Image to show */ - cv::Mat *img_legend; - /** \brief Image to show */ - cv::Mat *img_show; - /** \brief visualization scale before show */ - int img_scale; - /** \brief Add matrix to the image */ - void img_matrix(CvMat *mat, int top, int left); - /** \brief Init everything. Called from constructors. */ - void Init(); +class ALVAR_EXPORT KalmanVisualize +{ + int n; + int m; + KalmanCore* kalman; + KalmanSensorCore* sensor; + Kalman* kalman_ext; + KalmanSensor* sensor_ext; + /** \brief Image collecting visualization of the Kalman filter */ + cv::Mat img; + /** \brief Image to show */ + cv::Mat img_legend; + /** \brief Image to show */ + cv::Mat img_show; + /** \brief visualization scale before show */ + int img_scale; + /** \brief Add matrix to the image */ + void img_matrix(const cv::Mat& mat, int top, int left); + /** \brief Init everything. Called from constructors. */ + void Init(); + public: - /** \brief Helper method for outputting matrices (for debug purposes) */ - static void out_matrix(CvMat *m, char *name); - /** \brief Constructor for full Kalman implementation */ - KalmanVisualize(Kalman *_kalman, KalmanSensor *_sensor); - /** \brief Constructor for core Kalman implementation (not all visualizations possible) */ - KalmanVisualize(KalmanCore *_kalman, KalmanSensorCore *_sensor); - /** \brief Destructor */ - ~KalmanVisualize(); - /** \brief Update the visualization image - call this before the Kalman's predict_update */ - void update_pre(); - /** \brief Update the visualization image - call this after the Kalman's predict_update */ - void update_post(); - /** \brief Show the genrated visualization image */ - void show(); + /** \brief Helper method for outputting matrices (for debug purposes) */ + static void out_matrix(const cv::Mat& m, char* name); + /** \brief Constructor for full Kalman implementation */ + KalmanVisualize(Kalman* _kalman, KalmanSensor* _sensor); + /** \brief Constructor for core Kalman implementation (not all visualizations + * possible) */ + KalmanVisualize(KalmanCore* _kalman, KalmanSensorCore* _sensor); + /** \brief Destructor */ + ~KalmanVisualize(); + /** \brief Update the visualization image - call this before the Kalman's + * predict_update */ + void update_pre(); + /** \brief Update the visualization image - call this after the Kalman's + * predict_update */ + void update_post(); + /** \brief Show the genrated visualization image */ + void show(); }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/Line.h b/ar_track_alvar/include/ar_track_alvar/Line.h index a9b1f07..4acf004 100644 --- a/ar_track_alvar/include/ar_track_alvar/Line.h +++ b/ar_track_alvar/include/ar_track_alvar/Line.h @@ -33,42 +33,46 @@ #include "Alvar.h" #include "Util.h" -namespace alvar { - +namespace alvar +{ /** - * \brief Struct representing a line. The line is parametrized by its center and direction vector. + * \brief Struct representing a line. The line is parametrized by its center and + * direction vector. */ struct ALVAR_EXPORT Line { - /** - * \brief Constructor. - * \param params params[0] and params[1] are the x and y components of the direction vector, params[2] and params[3] are the x and y coordinates of the line center. - */ - Line(cv::Vec4f params); - Line() - {} - - /** - * \brief Line center. - */ - PointDouble c; // center - /** - * \brief Direction vector. - */ - PointDouble s; // direction vector + /** + * \brief Constructor. + * \param params params[0] and params[1] are the x and y components of the + * direction vector, params[2] and params[3] are the x and y coordinates of + * the line center. + */ + Line(const cv::Vec4f& params); + Line() + { + } + + /** + * \brief Line center. + */ + PointDouble c; // center + /** + * \brief Direction vector. + */ + PointDouble s; // direction vector }; /** - * \brief Fit lines to vector of points. + * \brief Fit lines to vector of points. * \param lines Resulting set of lines. * \param corners Index list of line breaks. * \param edge Vector of points (pixels) where the line is fitted. - * \param grey In the future, we may want to fit lines directly to grayscale image instead of thresholded edge. + * \param grey In the future, we may want to fit lines directly to grayscale + * image instead of thresholded edge. */ -int ALVAR_EXPORT FitLines(std::vector &lines, - const std::vector& corners, - const std::vector& edge, - cv::Mat *grey=0); +int ALVAR_EXPORT FitLines(std::vector& lines, + const std::vector& corners, + const std::vector& edge); /** * \brief Calculates an intersection point of two lines. @@ -78,15 +82,6 @@ int ALVAR_EXPORT FitLines(std::vector &lines, */ PointDouble ALVAR_EXPORT Intersection(const Line& l1, const Line& l2); -} // namespace alvar +} // namespace alvar #endif - - - - - - - - - diff --git a/ar_track_alvar/include/ar_track_alvar/Lock.h b/ar_track_alvar/include/ar_track_alvar/Lock.h index 546b780..ac5d184 100644 --- a/ar_track_alvar/include/ar_track_alvar/Lock.h +++ b/ar_track_alvar/include/ar_track_alvar/Lock.h @@ -34,8 +34,8 @@ #include "Mutex.h" #include "Uncopyable.h" -namespace alvar { - +namespace alvar +{ /** * \brief Lock for simplifying mutex handling. * @@ -46,29 +46,28 @@ namespace alvar { class ALVAR_EXPORT Lock : private Uncopyable { public: - /** - * \brief Constructor. - * - * \param mutex The mutex to lock. - */ - Lock(Mutex *mutex) - : mMutex(mutex) - { - mMutex->lock(); - } + /** + * \brief Constructor. + * + * \param mutex The mutex to lock. + */ + Lock(Mutex* mutex) : mMutex(mutex) + { + mMutex->lock(); + } - /** - * \brief Destructor. - */ - ~Lock() - { - mMutex->unlock(); - } + /** + * \brief Destructor. + */ + ~Lock() + { + mMutex->unlock(); + } private: - Mutex *mMutex; + Mutex* mMutex; }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/Marker.h b/ar_track_alvar/include/ar_track_alvar/Marker.h index 8240b36..f0992c5 100644 --- a/ar_track_alvar/include/ar_track_alvar/Marker.h +++ b/ar_track_alvar/include/ar_track_alvar/Marker.h @@ -41,305 +41,399 @@ #include #include "filter/kinect_filtering.h" #include +#include -namespace alvar { +namespace alvar +{ +/** + * \brief Basic 2D \e Marker functionality. + * + * This class contains the basic \e Marker functionality for planar markers. + */ +class ALVAR_EXPORT Marker +{ +protected: + void VisualizeMarkerPose(cv::Mat& image, Camera* cam, + double visualize2d_points[12][2], + const cv::Scalar& color = CV_RGB(255, 0, 0)) const; + virtual void VisualizeMarkerContent(cv::Mat& image, Camera* cam, + double datatext_point[2], + double content_point[2]) const; + virtual void VisualizeMarkerError(cv::Mat& image, Camera* cam, + double errortext_point[2]) const; + bool UpdateContentBasic(std::vector& _marker_corners_img, + cv::Mat& gray, Camera* cam, int frame_no = 0); - /** - * \brief Basic 2D \e Marker functionality. +public: + EIGEN_MAKE_ALIGNED_OPERATOR_NEW + bool valid; + + /** \brief Compares the marker corners with the previous match. * - * This class contains the basic \e Marker functionality for planar markers. + * In some cases the tracking of the marker can be accepted solely based on + * this. Returns the marker orientation and an error value describing the + * pixel error relative to the marker diameter. + */ + void CompareCorners(std::vector& _marker_corners_img, + int* orientation, double* error); + /** \brief Compares the marker corners with the previous match. + */ + void CompareContent(std::vector& _marker_corners_img, + cv::Mat& gray, Camera* cam, int* orientation) const; + /** \brief Updates the \e marker_content from the image using \e Homography + */ + virtual bool UpdateContent(std::vector& _marker_corners_img, + cv::Mat& gray, Camera* cam, int frame_no = 0); + /** \brief Updates the markers \e pose estimation + */ + void UpdatePose(std::vector& _marker_corners_img, Camera* cam, + int orientation, int frame_no = 0, bool update_pose = true); + /** \brief Decodes the marker content. Please call \e UpdateContent before + * this. This virtual method is meant to be implemented by heirs. + */ + virtual bool DecodeContent(int* orientation); + + /** \brief Returns the content as a matrix */ - class ALVAR_EXPORT Marker + cv::Mat GetContent() const { - protected: - void VisualizeMarkerPose(cv::Mat *image, Camera *cam, double visualize2d_points[12][2], cv::Scalar color=cv::Scalar(255,0,0)) const; - virtual void VisualizeMarkerContent(cv::Mat *image, Camera *cam, double datatext_point[2], double content_point[2]) const; - virtual void VisualizeMarkerError(cv::Mat *image, Camera *cam, double errortext_point[2]) const; - bool UpdateContentBasic(std::vector > &_marker_corners_img, cv::Mat *gray, Camera *cam, int frame_no = 0); - - public: - EIGEN_MAKE_ALIGNED_OPERATOR_NEW - bool valid; - - /** \brief Compares the marker corners with the previous match. - * - * In some cases the tracking of the marker can be accepted solely based on this. - * Returns the marker orientation and an error value describing the pixel error - * relative to the marker diameter. - */ - void CompareCorners(std::vector &_marker_corners_img, int *orientation, double *error); - /** \brief Compares the marker corners with the previous match. - */ - void CompareContent(std::vector &_marker_corners_img, cv::Mat *gray, Camera *cam, int *orientation) const; - /** \brief Updates the \e marker_content from the image using \e Homography - */ - virtual bool UpdateContent(std::vector &_marker_corners_img, cv::Mat *gray, Camera *cam, int frame_no = 0); - /** \brief Updates the markers \e pose estimation - */ - void UpdatePose(std::vector &_marker_corners_img, Camera *cam, int orientation, int frame_no = 0, bool update_pose = true); - /** \brief Decodes the marker content. Please call \e UpdateContent before this. - * This virtual method is meant to be implemented by heirs. - */ - virtual bool DecodeContent(int *orientation); - - /** \brief Returns the content as a matrix - */ - CvMat *GetContent() const { - return marker_content; + return marker_content; + } + /** \brief Saves the marker as an image + */ + void SaveMarkerImage(const char* filename, int save_res = 0) const; + /** \brief Draw the marker filling the ROI in the given image + */ + void ScaleMarkerToImage(cv::Mat& image) const; + /** \brief Visualize the marker + */ + void Visualize(cv::Mat& image, Camera* cam, + const cv::Scalar& color = CV_RGB(255, 0, 0)) const; + /** \brief Method for resizing the marker dimensions */ + void SetMarkerSize(double _edge_length = 0, int _res = 0, double _margin = 0); + /** \brief Get edge length (to support different size markers */ + double GetMarkerEdgeLength() const + { + return edge_length; + } + /** \brief Destructor */ + ~Marker(); + /** \brief Default constructor + * \param _edge_length Length of the marker's edge in whatever units you are + * using (e.g. cm) \param _res The marker content resolution in pixels (this + * is actually \param _margin The marker margin resolution in pixels (The + * actual captured marker image has pixel resolution of _margin+_res+_margin) + */ + Marker(double _edge_length = 0, int _res = 0, double _margin = 0); + /** \brief Copy constructor */ + Marker(const Marker& m); + /** \brief Get id for this marker + * This is used e.g. in MarkerDetector to associate a marker id with + * an appropriate edge length. This method should be overwritten + * to return a suitable identification number for each marker type. + */ + virtual unsigned long GetId() const + { + return 0; + } + virtual void SetId(unsigned long _id){}; + + /** + * Returns the resolution (the number of square rows and columns) of the + * marker content area. The total number of content squares within the + * content area is resolution*resolution. + */ + int GetRes() const + { + return res; + } + + /** + * Returns the margin thickness, that is, the number of rows or columns of + * black squares surrounding the content area. + */ + double GetMargin() const + { + return margin; + } + + /** \brief The current marker \e Pose + */ + Pose pose; + /** \brief Get marker detection error estimate + * \param errors Flags indicating what error elements are combined + * The marker detection error can consist of several elements: + * MARGIN_ERROR is updated in \e UpdateContent and it indicates erroneous + * values inside the marginal area. DECODE_ERROR is updated in \e + * DecodeContent and it indicates erroneous values inside the actual marker + * content area. TRACK_ERROR is updated in \e MarkerDetector.Detect and it + * indicates the amount of tracking error returned from \e CompareCorners + */ + double GetError(int errors = (MARGIN_ERROR | DECODE_ERROR)) const + { + int count = 0; + double error = 0; + if (errors & MARGIN_ERROR) + { + error += margin_error; + count++; } - /** \brief Saves the marker as an image - */ - void SaveMarkerImage(const char *filename, int save_res = 0) const; - /** \brief Draw the marker filling the ROI in the given image - */ - void ScaleMarkerToImage(cv::Mat *image) const; - /** \brief Visualize the marker - */ - void Visualize(cv::Mat *image, Camera *cam, cv::Scalar color=cv::Scalar(255,0,0)) const; - /** \brief Method for resizing the marker dimensions */ - void SetMarkerSize(double _edge_length = 0, int _res = 0, double _margin = 0); - /** \brief Get edge length (to support different size markers */ - double GetMarkerEdgeLength() const { return edge_length; } - /** \brief Destructor */ - ~Marker(); - /** \brief Default constructor - * \param _edge_length Length of the marker's edge in whatever units you are using (e.g. cm) - * \param _res The marker content resolution in pixels (this is actually - * \param _margin The marker margin resolution in pixels (The actual captured marker image has pixel resolution of _margin+_res+_margin) - */ - Marker(double _edge_length = 0, int _res = 0, double _margin = 0); - /** \brief Copy constructor */ - Marker(const Marker& m); - /** \brief Get id for this marker - * This is used e.g. in MarkerDetector to associate a marker id with - * an appropriate edge length. This method should be overwritten - * to return a suitable identification number for each marker type. - */ - virtual unsigned long GetId() const { return 0; } - virtual void SetId(unsigned long _id) {}; - - /** - * Returns the resolution (the number of square rows and columns) of the - * marker content area. The total number of content squares within the - * content area is resolution*resolution. - */ - int GetRes() const { return res; } - - /** - * Returns the margin thickness, that is, the number of rows or columns of - * black squares surrounding the content area. - */ - double GetMargin() const { return margin; } - - /** \brief The current marker \e Pose - */ - Pose pose; - /** \brief Get marker detection error estimate - * \param errors Flags indicating what error elements are combined - * The marker detection error can consist of several elements: - * MARGIN_ERROR is updated in \e UpdateContent and it indicates erroneous values inside the marginal area. - * DECODE_ERROR is updated in \e DecodeContent and it indicates erroneous values inside the actual marker content area. - * TRACK_ERROR is updated in \e MarkerDetector.Detect and it indicates the amount of tracking error returned from \e CompareCorners - */ - double GetError(int errors = (MARGIN_ERROR | DECODE_ERROR)) const { - int count = 0; - double error = 0; - if (errors & MARGIN_ERROR) {error+=margin_error; count++;} - if (errors & DECODE_ERROR) {error+=decode_error; count++;} - if (errors & TRACK_ERROR) {error+=track_error; count++;} - return error/count; + if (errors & DECODE_ERROR) + { + error += decode_error; + count++; } - /** \brief Set the marker error estimate */ - void SetError(int error_type, double value) { - if (error_type == MARGIN_ERROR) margin_error = value; - else if (error_type == DECODE_ERROR) decode_error = value; - else if (error_type == TRACK_ERROR) track_error = value; + if (errors & TRACK_ERROR) + { + error += track_error; + count++; } - static const int MARGIN_ERROR=1; - static const int DECODE_ERROR=2; - static const int TRACK_ERROR=4; - protected: - double margin_error; - double decode_error; - double track_error; - double edge_length; - int res; - double margin; - CvMat *marker_content; - - public: - - /** \brief Marker color points in marker coordinates */ - std::vector marker_points; - /** \brief Marker corners in marker coordinates */ - std::vector marker_corners; - /** \brief Marker corners in image coordinates */ - std::vector marker_corners_img; - /** \brief Marker points in image coordinates */ - std::vector ros_marker_points_img; - ar_track_alvar::ARCloud ros_corners_3D; - int ros_orientation; - /** \brief Samples to be used in figuring out min/max for thresholding */ - std::vector marker_margin_w; - /** \brief Samples to be used in figuring out min/max for thresholding */ - std::vector marker_margin_b; + return error / count; + } + /** \brief Set the marker error estimate */ + void SetError(int error_type, double value) + { + if (error_type == MARGIN_ERROR) + margin_error = value; + else if (error_type == DECODE_ERROR) + decode_error = value; + else if (error_type == TRACK_ERROR) + track_error = value; + } + static const int MARGIN_ERROR = 1; + static const int DECODE_ERROR = 2; + static const int TRACK_ERROR = 4; + +protected: + double margin_error; + double decode_error; + double track_error; + double edge_length; + int res; + double margin; + cv::Mat marker_content; + +public: + /** \brief Marker color points in marker coordinates */ + std::vector marker_points; + /** \brief Marker corners in marker coordinates */ + std::vector marker_corners; + /** \brief Marker corners in image coordinates */ + std::vector marker_corners_img; + /** \brief Marker points in image coordinates */ + std::vector ros_marker_points_img; + ar_track_alvar::ARCloud ros_corners_3D; + int ros_orientation; + /** \brief Samples to be used in figuring out min/max for thresholding */ + std::vector marker_margin_w; + /** \brief Samples to be used in figuring out min/max for thresholding */ + std::vector marker_margin_b; #ifdef VISUALIZE_MARKER_POINTS - std::vector marker_allpoints_img; + std::vector marker_allpoints_img; #endif - }; +}; - /** - * \brief \e MarkerArtoolkit for using matrix markers similar with the ones used in ARToolkit. - */ - class ALVAR_EXPORT MarkerArtoolkit : public Marker +/** + * \brief \e MarkerArtoolkit for using matrix markers similar with the ones used + * in ARToolkit. + */ +class ALVAR_EXPORT MarkerArtoolkit : public Marker +{ +protected: + int default_res() + { + std::cout << "MarkerArtoolkit::default_res" << std::endl; + return 3; + } + double default_margin() { - protected: - int default_res() { std::cout<<"MarkerArtoolkit::default_res"<& _marker_corners_img, + cv::Mat& gray, Camera* cam); + +public: + EIGEN_MAKE_ALIGNED_OPERATOR_NEW + static const int MAX_MARKER_STRING_LEN = 2048; + enum MarkerContentType { - protected: - virtual void VisualizeMarkerContent(cv::Mat *image, Camera *cam, double datatext_point[2], double content_point[2]) const; - void DecodeOrientation(int *error, int *total, int *orientation); - int DecodeCode(int orientation, BitsetExt *bs, int *erroneous, int *total, unsigned char* content_type); - void Read6bitStr(BitsetExt *bs, char *s, size_t s_max_len); - void Add6bitStr(BitsetExt *bs, char *s); - int UsableDataBits(int marker_res, int hamming); - bool DetectResolution(std::vector > &_marker_corners_img, cv::Mat *gray, Camera *cam); - - public: - EIGEN_MAKE_ALIGNED_OPERATOR_NEW - static const int MAX_MARKER_STRING_LEN=2048; - enum MarkerContentType { - MARKER_CONTENT_TYPE_NUMBER, - MARKER_CONTENT_TYPE_STRING, - MARKER_CONTENT_TYPE_FILE, - MARKER_CONTENT_TYPE_HTTP - }; - unsigned char content_type; - - /** \brief \e MarkerData content can be presented either as number (\e MARKER_CONTENT_TYPE_NUMBER) or string */ - union { - unsigned long id; - char str[MAX_MARKER_STRING_LEN]; - } data; - - /** \brief Default constructor - * \param _edge_length Length of the marker's edge in whatever units you are using (e.g. cm) - * \param _res The marker content resolution in pixels (this is actually - * \param _margin The marker margin resolution in pixels (The actual captured marker image has pixel resolution of _margin+_res+_margin) - */ - MarkerData(double _edge_length = 0, int _res = 0, double _margin = 0) : - Marker(_edge_length, _res, (_margin?_margin:2)) - { - } - /** \brief Get ID for recognizing this marker */ - unsigned long GetId() const { return data.id; } - /** \brief Set the ID */ - void SetId(unsigned long _id) { data.id = _id; } - /** \brief Updates the \e marker_content from the image using \e Homography - * Compared to the basic implementation in \e Marker this will also detect the marker - * resolution automatically when the marker resolution is specified to be 0. - */ - virtual bool UpdateContent(std::vector > &_marker_corners_img, cv::Mat *gray, Camera *cam, int frame_no = 0); - /** \brief \e DecodeContent should be called after \e UpdateContent to fill \e content_type, \e decode_error and \e data - */ - bool DecodeContent(int *orientation); - /** \brief Updates the \e marker_content by "encoding" the given parameters - */ - void SetContent(MarkerContentType content_type, unsigned long id, const char *str, bool force_strong_hamming=false, bool verbose=false); + MARKER_CONTENT_TYPE_NUMBER, + MARKER_CONTENT_TYPE_STRING, + MARKER_CONTENT_TYPE_FILE, + MARKER_CONTENT_TYPE_HTTP }; + unsigned char content_type; - /** \brief Iterator type for traversing templated Marker vector without the template. - */ - class ALVAR_EXPORT MarkerIterator : public std::iterator { - public: - virtual bool operator==(const MarkerIterator& other) = 0; - virtual bool operator!=(const MarkerIterator& other) = 0; - virtual MarkerIterator& operator++() = 0; - virtual const Marker* operator*() = 0; - virtual const Marker* operator->() = 0; - virtual MarkerIterator& reset() = 0; - - void *_data; - }; + /** \brief \e MarkerData content can be presented either as number (\e + * MARKER_CONTENT_TYPE_NUMBER) or string */ + union + { + unsigned long id; + char str[MAX_MARKER_STRING_LEN]; + } data; - /** \brief Iterator implementation for traversing templated Marker vector - * without the template. - * \param T T extends Marker + /** \brief Default constructor + * \param _edge_length Length of the marker's edge in whatever units you are + * using (e.g. cm) \param _res The marker content resolution in pixels (this + * is actually \param _margin The marker margin resolution in pixels (The + * actual captured marker image has pixel resolution of _margin+_res+_margin) */ - template - class ALVAR_EXPORT MarkerIteratorImpl : public MarkerIterator { - public: - EIGEN_MAKE_ALIGNED_OPERATOR_NEW - typedef typename std::vector >::const_iterator const_iterator_aligntype; - MarkerIteratorImpl(const_iterator_aligntype i) : _begin(i), _i(i) { - _data = this; - } + MarkerData(double _edge_length = 0, int _res = 0, double _margin = 0) + : Marker(_edge_length, _res, (_margin ? _margin : 2)) + { + } + /** \brief Get ID for recognizing this marker */ + unsigned long GetId() const + { + return data.id; + } + /** \brief Set the ID */ + void SetId(unsigned long _id) + { + data.id = _id; + } + /** \brief Updates the \e marker_content from the image using \e Homography + * Compared to the basic implementation in \e Marker this will also detect the + * marker resolution automatically when the marker resolution is specified to + * be 0. + */ + virtual bool UpdateContent(std::vector& _marker_corners_img, + cv::Mat& gray, Camera* cam, int frame_no = 0); + /** \brief \e DecodeContent should be called after \e UpdateContent to fill \e + * content_type, \e decode_error and \e data + */ + bool DecodeContent(int* orientation); + /** \brief Updates the \e marker_content by "encoding" the given parameters + */ + void SetContent(MarkerContentType content_type, unsigned long id, + const char* str, bool force_strong_hamming = false, + bool verbose = false); +}; - ~MarkerIteratorImpl() {} +/** \brief Iterator type for traversing templated Marker vector without the + * template. + */ +class ALVAR_EXPORT MarkerIterator + : public std::iterator +{ +public: + virtual bool operator==(const MarkerIterator& other) = 0; + virtual bool operator!=(const MarkerIterator& other) = 0; + virtual MarkerIterator& operator++() = 0; + virtual const Marker* operator*() = 0; + virtual const Marker* operator->() = 0; + virtual MarkerIterator& reset() = 0; - // The assignment and relational operators are straightforward - MarkerIteratorImpl& operator=(const MarkerIteratorImpl& other) { - _i = other._i; - return(*this); - } + void* _data; +}; - bool operator==(const MarkerIterator& other) { - MarkerIteratorImpl* pother = (MarkerIteratorImpl*)other._data; - return (_i == pother->_i); - } +/** \brief Iterator implementation for traversing templated Marker vector + * without the template. + * \param T T extends Marker + */ +template +class ALVAR_EXPORT MarkerIteratorImpl : public MarkerIterator +{ +public: + EIGEN_MAKE_ALIGNED_OPERATOR_NEW + typedef typename std::vector >::const_iterator + const_iterator_aligntype; + MarkerIteratorImpl(const_iterator_aligntype i) : _begin(i), _i(i) + { + _data = this; + } - bool operator!=(const MarkerIterator& other) { - MarkerIteratorImpl* pother = (MarkerIteratorImpl*)other._data; - return (_i != pother->_i); - } + ~MarkerIteratorImpl() + { + } - MarkerIterator& operator++() { - _i++; - return(*this); - } + // The assignment and relational operators are straightforward + MarkerIteratorImpl& operator=(const MarkerIteratorImpl& other) + { + _i = other._i; + return (*this); + } - const Marker* operator*() { - return &(*_i); - } + bool operator==(const MarkerIterator& other) + { + MarkerIteratorImpl* pother = (MarkerIteratorImpl*)other._data; + return (_i == pother->_i); + } - const Marker* operator->() { - return &(*_i); - } + bool operator!=(const MarkerIterator& other) + { + MarkerIteratorImpl* pother = (MarkerIteratorImpl*)other._data; + return (_i != pother->_i); + } - MarkerIterator& reset() { - _i = _begin; - return (*this); - } + MarkerIterator& operator++() + { + _i++; + return (*this); + } - private: - const_iterator_aligntype _begin; - const_iterator_aligntype _i; - }; + const Marker* operator*() + { + return &(*_i); + } + + const Marker* operator->() + { + return &(*_i); + } + + MarkerIterator& reset() + { + _i = _begin; + return (*this); + } + +private: + const_iterator_aligntype _begin; + const_iterator_aligntype _i; +}; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/MarkerDetector.h b/ar_track_alvar/include/ar_track_alvar/MarkerDetector.h index 724f9e6..5f8f816 100644 --- a/ar_track_alvar/include/ar_track_alvar/MarkerDetector.h +++ b/ar_track_alvar/include/ar_track_alvar/MarkerDetector.h @@ -39,143 +39,173 @@ #include "Rotation.h" #include "Line.h" #include +using std::rotate; #include #include #include #include #include -using std::rotate; - -namespace alvar { - +namespace alvar +{ /** - * \brief Templateless version of MarkerDetector. Please use MarkerDetector instead. + * \brief Templateless version of MarkerDetector. Please use MarkerDetector + * instead. */ -class ALVAR_EXPORT MarkerDetectorImpl { +class ALVAR_EXPORT MarkerDetectorImpl +{ protected: - virtual Marker* new_M(double _edge_length = 0, int _res = 0, double _margin = 0) = 0; - virtual void _markers_clear() = 0; - virtual void _markers_push_back(Marker *mn) = 0; - virtual size_t _markers_size() = 0; - virtual void _track_markers_clear() = 0; - virtual void _track_markers_push_back(Marker *mn) = 0; - virtual size_t _track_markers_size() = 0; - virtual Marker* _track_markers_at(size_t i) = 0; - virtual void _swap_marker_tables() = 0; - - Labeling* labeling; - - std::map map_edge_length; - double edge_length; - int res; - double margin; - bool detect_pose_grayscale; - - MarkerDetectorImpl(); - virtual ~MarkerDetectorImpl(); + virtual Marker* new_M(double _edge_length = 0, int _res = 0, + double _margin = 0) = 0; + virtual void _markers_clear() = 0; + virtual void _markers_push_back(Marker* mn) = 0; + virtual size_t _markers_size() = 0; + virtual void _track_markers_clear() = 0; + virtual void _track_markers_push_back(Marker* mn) = 0; + virtual size_t _track_markers_size() = 0; + virtual Marker* _track_markers_at(size_t i) = 0; + virtual void _swap_marker_tables() = 0; + + Labeling* labeling; + + std::map map_edge_length; + double edge_length; + int res; + double margin; + bool detect_pose_grayscale; + + MarkerDetectorImpl(); + virtual ~MarkerDetectorImpl(); public: - /** \brief Clear the markers that are tracked */ - void TrackMarkersReset(); - - /** \brief Add markers to be tracked - * Sometimes application or e.g. the \e MultiMarker implementation knows - * more about marker locations. Then this method can be used after \e Detect - * to indicate where additional trackable markers could be found. The - * \e DetectAdditional is called for tracking these. - */ - void TrackMarkerAdd(int id, PointDouble corners[4]); - - /** Set the default marker size to be used for all markers unless - * \param _edge_length Length of the marker's edge in whatever units you are using (e.g. cm) - * \param _res The marker content resolution in pixels. By default we use 5x5 markers. If you use 0 with \e MarkerData, the marker resolution is detected automatically. - * \param _margin The marker margin resolution in pixels (The actual captured marker image has pixel resolution of _margin+_res+_margin) - * - * \note The default marker content resolution (_res) of 5 can only detect marker ids from 0 to 255. For larger marker ids, you need to increase the marker content resolution accordingly. - */ - void SetMarkerSize(double _edge_length = 1, int _res = 5, double _margin = 2); - - /** Set marker size for specified marker id. This needs to be called after setting the default marker size. - * \param id The specified marker id - * \param _edge_length Length of the marker's edge in whatever units you are using (e.g. cm) - */ - void SetMarkerSizeForId(unsigned long id, double _edge_length = 1); - - /** Set marker size for specified marker id. This needs to be called after setting the default marker size. - * \param _detect_pose_grayscale Do we detect marker pose using grayscale optimization? - */ - void SetOptions(bool _detect_pose_grayscale=false); - - /** - * \brief \e Detect \e Marker 's from \e image - * - * The coordinates are little tricky. Here is a short summary. - * - * - Image (top-left origin). - * - The marker corners in the image are searched in sub-pixel accuracy in - * counter-clockwise order starting from "lower-left" corner. - * - The corresponding marker corners and marker points are in marker coordinates - * (x is to east, y is to north, and z is up from the marker) - * - The marker points are read from inside the margins starting from top-left - * and reading the bits first left-to-right one line at a time. - */ - int Detect(cv::Mat *image, - Camera *cam, - bool track=false, - bool visualize=false, - double max_new_marker_error=0.08, - double max_track_error=0.2, - LabelingMethod labeling_method=CVSEQ, - bool update_pose=true); - - int DetectAdditional(cv::Mat *image, Camera *cam, bool visualize=false, double max_track_error=0.2); + /** \brief Clear the markers that are tracked */ + void TrackMarkersReset(); + + /** \brief Add markers to be tracked + * Sometimes application or e.g. the \e MultiMarker implementation knows + * more about marker locations. Then this method can be used after \e Detect + * to indicate where additional trackable markers could be found. The + * \e DetectAdditional is called for tracking these. + */ + void TrackMarkerAdd(int id, PointDouble corners[4]); + + /** Set the default marker size to be used for all markers unless + * \param _edge_length Length of the marker's edge in whatever units you are + * using (e.g. cm) \param _res The marker content resolution in pixels. By + * default we use 5x5 markers. If you use 0 with \e MarkerData, the marker + * resolution is detected automatically. \param _margin The marker margin + * resolution in pixels (The actual captured marker image has pixel resolution + * of _margin+_res+_margin) + * + * \note The default marker content resolution (_res) of 5 can only detect + * marker ids from 0 to 255. For larger marker ids, you need to increase the + * marker content resolution accordingly. + */ + void SetMarkerSize(double _edge_length = 1, int _res = 5, double _margin = 2); + + /** Set marker size for specified marker id. This needs to be called after + * setting the default marker size. \param id The specified marker id \param + * _edge_length Length of the marker's edge in whatever units you are using + * (e.g. cm) + */ + void SetMarkerSizeForId(unsigned long id, double _edge_length = 1); + + /** Set marker size for specified marker id. This needs to be called after + * setting the default marker size. \param _detect_pose_grayscale Do we detect + * marker pose using grayscale optimization? + */ + void SetOptions(bool _detect_pose_grayscale = false); + + /** + * \brief \e Detect \e Marker 's from \e image + * + * The coordinates are little tricky. Here is a short summary. + * + * - Image (top-left origin). + * - The marker corners in the image are searched in sub-pixel accuracy in + * counter-clockwise order starting from "lower-left" corner. + * - The corresponding marker corners and marker points are in marker + * coordinates (x is to east, y is to north, and z is up from the marker) + * - The marker points are read from inside the margins starting from top-left + * and reading the bits first left-to-right one line at a time. + */ + int Detect(cv::Mat& image, Camera* cam, bool track = false, + bool visualize = false, double max_new_marker_error = 0.08, + double max_track_error = 0.2, + LabelingMethod labeling_method = CVSEQ, bool update_pose = true); + + int DetectAdditional(cv::Mat& image, Camera* cam, bool visualize = false, + double max_track_error = 0.2); }; /** * \brief \e MarkerDetector for detecting markers of type \e M * \param M Class that extends \e Marker */ -template +template class ALVAR_EXPORT MarkerDetector : public MarkerDetectorImpl { protected: - Marker* new_M(double _edge_length = 0, int _res = 0, double _margin = 0) { + Marker* new_M(double _edge_length = 0, int _res = 0, double _margin = 0) + { return new M(_edge_length, _res, _margin); } - void _markers_clear() { markers->clear(); } - void _markers_push_back(Marker *mn) { markers->push_back(*((M*)mn)); } - size_t _markers_size() { return markers->size(); } - void _track_markers_clear() { track_markers->clear(); } - void _track_markers_push_back(Marker *mn) { track_markers->push_back(*((M*)mn)); } - size_t _track_markers_size() { return track_markers->size(); } - Marker* _track_markers_at(size_t i) { return &track_markers->at(i); } - - void _swap_marker_tables() { - std::vector > *tmp_markers = markers; - markers = track_markers; - track_markers = tmp_markers; + void _markers_clear() + { + markers->clear(); + } + void _markers_push_back(Marker* mn) + { + markers->push_back(*((M*)mn)); + } + size_t _markers_size() + { + return markers->size(); + } + void _track_markers_clear() + { + track_markers->clear(); + } + void _track_markers_push_back(Marker* mn) + { + track_markers->push_back(*((M*)mn)); + } + size_t _track_markers_size() + { + return track_markers->size(); + } + Marker* _track_markers_at(size_t i) + { + return &track_markers->at(i); } -public: + void _swap_marker_tables() + { + std::vector >* tmp_markers = markers; + markers = track_markers; + track_markers = tmp_markers; + } - std::vector > *markers; - std::vector > *track_markers; +public: + std::vector >* markers; + std::vector >* track_markers; - /** Constructor */ - MarkerDetector() : MarkerDetectorImpl() { + /** Constructor */ + MarkerDetector() : MarkerDetectorImpl() + { markers = new std::vector >; track_markers = new std::vector >; - } + } - /** Destructor */ - ~MarkerDetector() { + /** Destructor */ + ~MarkerDetector() + { delete markers; delete track_markers; - } + } }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/MultiMarker.h b/ar_track_alvar/include/ar_track_alvar/MultiMarker.h index c55848f..cda20ad 100644 --- a/ar_track_alvar/include/ar_track_alvar/MultiMarker.h +++ b/ar_track_alvar/include/ar_track_alvar/MultiMarker.h @@ -39,160 +39,209 @@ #include #include -namespace alvar { - +namespace alvar +{ /** * \brief Base class for using MultiMarker. */ -class ALVAR_EXPORT MultiMarker { - - +class ALVAR_EXPORT MultiMarker +{ private: - - bool SaveXML(const char* fname); - bool SaveText(const char* fname); - bool LoadText(const char* fname); - bool LoadXML(const char* fname); + bool SaveXML(const char* fname); + bool SaveText(const char* fname); + bool LoadText(const char* fname); + bool LoadXML(const char* fname); public: + // The marker information is stored in all three tables using // The marker information is stored in all three tables using + // The marker information is stored in all three tables using + // the indices-order given in constructor. // the indices-order given in constructor. + // the indices-order given in constructor. + // One idea is that the same 'pointcloud' could contain feature // One idea is that the same 'pointcloud' could contain feature - // points after marker-corner-points. This way they would be - // optimized simultaneously with marker corners... - std::map pointcloud; - std::vector marker_indices; // The marker id's to be used in marker field (first being the base) - std::vector marker_status; // 0: not in point cloud, 1: in point cloud, 2: used in GetPose() - std::vector< std::vector > rel_corners; //The coords of the master marker relative to each child marker in marker_indices - - int pointcloud_index(int marker_id, int marker_corner, bool add_if_missing=false); - int get_id_index(int id, bool add_if_missing=false); - - double _GetPose(MarkerIterator &begin, MarkerIterator &end, Camera* cam, Pose& pose, cv::Mat* image); - - int _SetTrackMarkers(MarkerDetectorImpl &marker_detector, Camera* cam, Pose& pose, cv::Mat *image); - int master_id; //The id of the first marker specified in the XML file - - - /** \brief Resets the multi marker. */ - virtual void Reset(); - - /** \brief Saves multi marker configuration to a text file. - * \param fname file name - * \param format FILE_FORMAT_TEXT (default) or FILE_FORMAT_XML (see doc/MultiMarker.xsd). - */ - bool Save(const char* fname, FILE_FORMAT format = FILE_FORMAT_DEFAULT); - - /** \brief Loads multi marker configuration from a text file. - * \param fname file name - * \param format FILE_FORMAT_TEXT (default) or FILE_FORMAT_XML (see doc/MultiMarker.xsd). - */ - bool Load(const char* fname, FILE_FORMAT format = FILE_FORMAT_DEFAULT); - - /** \brief Constructor. - \param indices Vector of marker codes that are included into multi marker. The first element defines origin. - */ - MultiMarker(std::vector& indices); - - /** \brief Default constructor */ - MultiMarker() {} - - /** \brief Calculates the pose of the camera from multi marker. Method uses the true 3D coordinates of - markers to get the initial pose and then optimizes it by minimizing the reprojection error. - - \param markers Vector of markers seen by camera. - \param cam Camera object containing internal calibration etc. - \param pose The resulting pose is stored here. - \param image If != 0 some visualizations are drawn. - */ - template - double GetPose(const std::vector >* markers, Camera* cam, Pose& pose, cv::Mat* image = 0) - { - MarkerIteratorImpl begin(markers->begin()); - MarkerIteratorImpl end(markers->end()); - return _GetPose(begin, end, - cam, pose, image); - } - - /** \brief Calls GetPose to obtain camera pose. - */ - template - double Update(const std::vector >* markers, Camera* cam, Pose& pose, cv::Mat* image = 0) - { - if(markers->size() < 1) return -1.0; - - return GetPose(markers, cam, pose, image); - } - - /** \brief Reserts the 3D point cloud of the markers. - */ - void PointCloudReset(); - - /** \brief Calculates 3D coordinates of marker corners relative to given pose (camera). - - \param edge_length The edge length of the marker. - \param pose Current pose of the camera. - \param corners Resulted 3D corner points are stored here. - */ - void PointCloudCorners3d(double edge_length, Pose &pose, cv::Point3f corners[4]); - - /** \brief Adds marker corners to 3D point cloud of multi marker. - \param marker_id Id of the marker to be added. - \param edge_length Edge length of the marker. - \param pose Current camera pose. - */ - void PointCloudAdd(int marker_id, double edge_length, Pose &pose); - - /** \brief Copies the 3D point cloud from other multi marker object. - */ - void PointCloudCopy(const MultiMarker *m); - - /** \brief Returns true if the are not points in the 3D opint cloud. - */ - bool PointCloudIsEmpty() { - return pointcloud.empty(); - } - - /** \brief Return the number of markers added using PointCloudAdd */ - size_t Size() { - return marker_indices.size(); - } - - /** \brief Returns 3D co-ordinates of one corner point of a marker. - * \param marker_id ID of the marker which corner point is returned. - * \param point Number of the corner point to return, from 0 to 3. - * \param x x co-ordinate is returned. - * \param y y co-ordinate is returned. - * \param z z co-ordinate is returned. - */ - void PointCloudGet(int marker_id, int point, - double &x, double &y, double &z); - - /** \brief Returns true if the marker is in the point cloud. - * \param marker_id ID of the marker. - */ - bool IsValidMarker(int marker_id); - - std::vector getIndices(){ - return marker_indices; - } - - int getMasterId(){ - return master_id; - } - - /** \brief Set new markers to be tracked for \e MarkerDetector - * Sometimes the \e MultiMarker implementation knows more about marker - * locations compared to \e MarkerDetector . Then this method can be - * used to reset the internal state of \e MarkerDetector for tracking - * also these markers. - */ - template - int SetTrackMarkers(MarkerDetector &marker_detector, Camera* cam, Pose& pose, cv::Mat *image=0) { + // One idea is that the same 'pointcloud' could contain feature + // points after marker-corner-points. This way they would be + // optimized simultaneously with marker corners... + std::map pointcloud; + std::vector marker_indices; // The marker id's to be used in marker + // field (first being the base) + std::vector marker_status; // 0: not in point cloud, 1: in point cloud, + // 2: used in GetPose() + std::vector > rel_corners; // The coords of the + // master marker relative + // to each child marker + // in marker_indices + + int pointcloud_index(int marker_id, int marker_corner, + bool add_if_missing = false); + int get_id_index(int id, bool add_if_missing = false); + + double _GetPose(MarkerIterator& begin, MarkerIterator& end, Camera* cam, + Pose& pose, cv::Mat& image); + double _GetPose(MarkerIterator& begin, MarkerIterator& end, Camera* cam, + Pose& pose) + { + cv::Mat empty_img; + return _GetPose(begin, end, cam, pose, empty_img); + } + + int _SetTrackMarkers(MarkerDetectorImpl& marker_detector, Camera* cam, + Pose& pose, cv::Mat& image); + int master_id; // The id of the first marker specified in the XML file + + /** \brief Resets the multi marker. */ + virtual void Reset(); + + /** \brief Saves multi marker configuration to a text file. + * \param fname file name + * \param format FILE_FORMAT_TEXT (default) or FILE_FORMAT_XML (see + * doc/MultiMarker.xsd). + */ + bool Save(const char* fname, FILE_FORMAT format = FILE_FORMAT_DEFAULT); + + /** \brief Loads multi marker configuration from a text file. + * \param fname file name + * \param format FILE_FORMAT_TEXT (default) or FILE_FORMAT_XML (see + * doc/MultiMarker.xsd). + */ + bool Load(const char* fname, FILE_FORMAT format = FILE_FORMAT_DEFAULT); + + /** \brief Constructor. + \param indices Vector of marker codes that are included into multi marker. + The first element defines origin. + */ + MultiMarker(std::vector& indices); + + /** \brief Default constructor */ + MultiMarker() + { + } + + /** \brief Calculates the pose of the camera from multi marker. Method uses + the true 3D coordinates of markers to get the initial pose and then + optimizes it by minimizing the reprojection error. + + \param markers Vector of markers seen by camera. + \param cam Camera object containing internal calibration etc. + \param pose The resulting pose is stored here. + \param image If != 0 some visualizations are drawn. + */ + template + double GetPose(const std::vector >* markers, + Camera* cam, Pose& pose, cv::Mat& image) + { + MarkerIteratorImpl begin(markers->begin()); + MarkerIteratorImpl end(markers->end()); + return _GetPose(begin, end, cam, pose, image); + } + template + double GetPose(const std::vector >* markers, + Camera* cam, Pose& pose) + { + MarkerIteratorImpl begin(markers->begin()); + MarkerIteratorImpl end(markers->end()); + return _GetPose(begin, end, cam, pose); + } + + /** \brief Calls GetPose to obtain camera pose. + */ + template + double Update(const std::vector >* markers, + Camera* cam, Pose& pose, cv::Mat& image) + { + if (markers->size() < 1) + return -1.0; + + return GetPose(markers, cam, pose, image); + } + template + double Update(const std::vector >* markers, + Camera* cam, Pose& pose) + { + if (markers->size() < 1) + return -1.0; + + return GetPose(markers, cam, pose); + } + + /** \brief Reserts the 3D point cloud of the markers. + */ + void PointCloudReset(); + + /** \brief Calculates 3D coordinates of marker corners relative to given pose + (camera). + + \param edge_length The edge length of the marker. + \param pose Current pose of the camera. + \param corners Resulted 3D corner points are stored here. + */ + void PointCloudCorners3d(double edge_length, Pose& pose, + cv::Point3d corners[4]); + + /** \brief Adds marker corners to 3D point cloud of multi marker. + \param marker_id Id of the marker to be added. + \param edge_length Edge length of the marker. + \param pose Current camera pose. + */ + void PointCloudAdd(int marker_id, double edge_length, Pose& pose); + + /** \brief Copies the 3D point cloud from other multi marker object. + */ + void PointCloudCopy(const MultiMarker* m); + + /** \brief Returns true if the are not points in the 3D opint cloud. + */ + bool PointCloudIsEmpty() + { + return pointcloud.empty(); + } + + /** \brief Return the number of markers added using PointCloudAdd */ + size_t Size() + { + return marker_indices.size(); + } + + /** \brief Returns 3D co-ordinates of one corner point of a marker. + * \param marker_id ID of the marker which corner point is returned. + * \param point Number of the corner point to return, from 0 to 3. + * \param x x co-ordinate is returned. + * \param y y co-ordinate is returned. + * \param z z co-ordinate is returned. + */ + void PointCloudGet(int marker_id, int point, double& x, double& y, double& z); + + /** \brief Returns true if the marker is in the point cloud. + * \param marker_id ID of the marker. + */ + bool IsValidMarker(int marker_id); + + std::vector getIndices() + { + return marker_indices; + } + + int getMasterId() + { + return master_id; + } + + /** \brief Set new markers to be tracked for \e MarkerDetector + * Sometimes the \e MultiMarker implementation knows more about marker + * locations compared to \e MarkerDetector . Then this method can be + * used to reset the internal state of \e MarkerDetector for tracking + * also these markers. + */ + template + int SetTrackMarkers(MarkerDetector& marker_detector, Camera* cam, + Pose& pose, cv::Mat& image) + { return _SetTrackMarkers(marker_detector, cam, pose, image); - } + } }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/MultiMarkerBundle.h b/ar_track_alvar/include/ar_track_alvar/MultiMarkerBundle.h index cf9d126..e6ca73b 100644 --- a/ar_track_alvar/include/ar_track_alvar/MultiMarkerBundle.h +++ b/ar_track_alvar/include/ar_track_alvar/MultiMarkerBundle.h @@ -35,69 +35,93 @@ #include "Optimization.h" #include -namespace alvar { - +namespace alvar +{ /** - * \brief Multi marker that uses bundle adjustment to refine the 3D positions of the markers (point cloud). + * \brief Multi marker that uses bundle adjustment to refine the 3D positions of + * the markers (point cloud). * * This can be initialized by using e.g. MultiMarkerAverage class. */ class ALVAR_EXPORT MultiMarkerBundle : public MultiMarker { protected: - int optimization_keyframes; - int optimization_markers; - double optimization_error; - bool optimizing; - std::vector camera_poses; // Estimated camera pose for every frame - std::map measurements; // - int measurements_index(int frame, int marker_id, int marker_corner) { - // hop return (int) (frame*marker_indices.size()*4)+(marker_id*4)+marker_corner; - return (int) (frame*marker_indices.size()*4)+(get_id_index(marker_id)*4)+marker_corner; - } + int optimization_keyframes; + int optimization_markers; + double optimization_error; + bool optimizing; + std::vector camera_poses; // Estimated camera pose for every frame + std::map measurements; // + int measurements_index(int frame, int marker_id, int marker_corner) + { + // hop return (int) + // (frame*marker_indices.size()*4)+(marker_id*4)+marker_corner; + return (int)(frame * marker_indices.size() * 4) + + (get_id_index(marker_id) * 4) + marker_corner; + } - void _MeasurementsAdd(MarkerIterator &begin, MarkerIterator &end, const Pose& camera_pose); + void _MeasurementsAdd(MarkerIterator& begin, MarkerIterator& end, + const Pose& camera_pose); public: + /** \brief Constructor. + \param indices Vector of marker codes that are included into multi marker. + The first element defines origin. + */ + MultiMarkerBundle(std::vector& indices); - /** \brief Constructor. - \param indices Vector of marker codes that are included into multi marker. The first element defines origin. - */ - MultiMarkerBundle(std::vector& indices); - - ~MultiMarkerBundle(); + ~MultiMarkerBundle(); - /** \brief Resets the measurements and camera poses that are stored for bundle adjustment. If something goes from in - optimization one will call this and acquire new measurements. - */ - void MeasurementsReset(); + /** \brief Resets the measurements and camera poses that are stored for bundle + adjustment. If something goes from in optimization one will call this and + acquire new measurements. + */ + void MeasurementsReset(); - double GetOptimizationError() { return optimization_error; } - int GetOptimizationKeyframes() { return optimization_keyframes; } - int GetOptimizationMarkers() { return optimization_markers; } - bool GetOptimizing() { return optimizing; } + double GetOptimizationError() + { + return optimization_error; + } + int GetOptimizationKeyframes() + { + return optimization_keyframes; + } + int GetOptimizationMarkers() + { + return optimization_markers; + } + bool GetOptimizing() + { + return optimizing; + } - /** \brief Adds new measurements that are used in bundle adjustment. - \param markers Vector of markers detected by MarkerDetector. - \param camera_pose Current camera pose. - */ - template - void MeasurementsAdd(const std::vector > *markers, const Pose& camera_pose) { - MarkerIteratorImpl begin(markers->begin()); - MarkerIteratorImpl end(markers->end()); - _MeasurementsAdd(begin, end, - camera_pose); - } + /** \brief Adds new measurements that are used in bundle adjustment. + \param markers Vector of markers detected by MarkerDetector. + \param camera_pose Current camera pose. + */ + template + void + MeasurementsAdd(const std::vector >* markers, + const Pose& camera_pose) + { + MarkerIteratorImpl begin(markers->begin()); + MarkerIteratorImpl end(markers->end()); + _MeasurementsAdd(begin, end, camera_pose); + } - /** \brief Runs the bundle adjustment optimization. - \param markers Vector of markers detected by MarkerDetector. - \param camera_pose Current camera pose. - \param max_iter Maximum number of iteration loops. - \param method The method that is applied inside optimization. Try Optimization::LEVENBERGMARQUARDT or Optimization::GAUSSNEWTON or Optmization::TUKEY_LM - */ //LEVENBERGMARQUARDT - bool Optimize(Camera *_cam, double stop, int max_iter, Optimization::OptimizeMethod method = Optimization::TUKEY_LM); //TUKEY_LM + /** \brief Runs the bundle adjustment optimization. + \param markers Vector of markers detected by MarkerDetector. + \param camera_pose Current camera pose. + \param max_iter Maximum number of iteration loops. + \param method The method that is applied inside optimization. Try + Optimization::LEVENBERGMARQUARDT or Optimization::GAUSSNEWTON or + Optmization::TUKEY_LM + */ //LEVENBERGMARQUARDT + bool Optimize(Camera* _cam, double stop, int max_iter, + Optimization::OptimizeMethod method = + Optimization::TUKEY_LM); // TUKEY_LM }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/MultiMarkerFiltered.h b/ar_track_alvar/include/ar_track_alvar/MultiMarkerFiltered.h index c6b4eac..91e23ed 100644 --- a/ar_track_alvar/include/ar_track_alvar/MultiMarkerFiltered.h +++ b/ar_track_alvar/include/ar_track_alvar/MultiMarkerFiltered.h @@ -33,8 +33,8 @@ #include "MultiMarker.h" -namespace alvar { - +namespace alvar +{ /** * \brief Multi marker that is constructed by first calculating the marker * poses directly relative to base marker and then filtering the results @@ -45,52 +45,56 @@ namespace alvar { class ALVAR_EXPORT MultiMarkerFiltered : public MultiMarker { protected: - static const int filter_buffer_max=15; - FilterMedian *pointcloud_filtered; + static const int filter_buffer_max = 15; + FilterMedian* pointcloud_filtered; - double _Update(MarkerIterator &begin, MarkerIterator &end, - Camera* cam, Pose& pose, IplImage* image); + double _Update(MarkerIterator& begin, MarkerIterator& end, Camera* cam, + Pose& pose, IplImage* image); public: - /** \brief Constructor. - \param indices Vector of marker codes that are included into multi marker. The first element defines origin. - */ - MultiMarkerFiltered(std::vector& indices); + /** \brief Constructor. + \param indices Vector of marker codes that are included into multi marker. + The first element defines origin. + */ + MultiMarkerFiltered(std::vector& indices); - /** \brief Destructor. */ - ~MultiMarkerFiltered(); + /** \brief Destructor. */ + ~MultiMarkerFiltered(); - /** \brief Updates the 3D point cloud by averaging the calculated results. - \param marker_id The id of the marker whose corner positions are updated. - \param edge_length The edge length of the marker. - \param pose Current camera pose that is used to estimate the marker position. - */ - void PointCloudAverage(int marker_id, double edge_length, Pose &pose); + /** \brief Updates the 3D point cloud by averaging the calculated results. + \param marker_id The id of the marker whose corner positions are updated. + \param edge_length The edge length of the marker. + \param pose Current camera pose that is used to estimate the marker + position. + */ + void PointCloudAverage(int marker_id, double edge_length, Pose& pose); - /** \brief Updates the marker field and camera pose. - \param markers Markers seen by the camera. - \param camera Camera object used to detect markers. - \param pose Current camera pose. This is also updated. - \param image If != 0 some visualization will be drawn. - */ - template - double Update(const std::vector* markers, Camera* cam, Pose& pose, IplImage* image = 0) - { - if(markers->size() < 1) return false; - MarkerIteratorImpl begin(markers->begin()); - MarkerIteratorImpl end(markers->end()); - return _Update(begin, end, - cam, pose, image); - } + /** \brief Updates the marker field and camera pose. + \param markers Markers seen by the camera. + \param camera Camera object used to detect markers. + \param pose Current camera pose. This is also updated. + \param image If != 0 some visualization will be drawn. + */ + template + double Update(const std::vector* markers, Camera* cam, Pose& pose, + IplImage* image = 0) + { + if (markers->size() < 1) + return false; + MarkerIteratorImpl begin(markers->begin()); + MarkerIteratorImpl end(markers->end()); + return _Update(begin, end, cam, pose, image); + } - /** - * \brief Reset the measurements - */ - void MeasurementsReset() { - pointcloud_filtered->reset(); - } + /** + * \brief Reset the measurements + */ + void MeasurementsReset() + { + pointcloud_filtered->reset(); + } }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/MultiMarkerInitializer.h b/ar_track_alvar/include/ar_track_alvar/MultiMarkerInitializer.h index 4ad4190..cd3645e 100644 --- a/ar_track_alvar/include/ar_track_alvar/MultiMarkerInitializer.h +++ b/ar_track_alvar/include/ar_track_alvar/MultiMarkerInitializer.h @@ -34,8 +34,8 @@ #include "MultiMarker.h" #include -namespace alvar { - +namespace alvar +{ /** * \brief Initializes multi marker by estimating their relative positions * from one or more images. @@ -50,79 +50,110 @@ namespace alvar { class ALVAR_EXPORT MultiMarkerInitializer : public MultiMarker { public: - /** - * \brief MarkerMeasurement that holds the maker id. - */ - class MarkerMeasurement : public Marker { - long _id; - public: - MarkerMeasurement() : globalPose(false) {} - bool globalPose; - unsigned long GetId() const { return _id; } - void SetId(unsigned long _id) { this->_id = _id; } - }; + /** + * \brief MarkerMeasurement that holds the maker id. + */ + class MarkerMeasurement : public Marker + { + long _id; + + public: + MarkerMeasurement() : globalPose(false) + { + } + bool globalPose; + unsigned long GetId() const + { + return _id; + } + void SetId(unsigned long _id) + { + this->_id = _id; + } + }; protected: - std::vector marker_detected; - std::vector > > measurements; - typedef std::vector > >::iterator MeasurementIterator; - FilterMedian *pointcloud_filtered; - int filter_buffer_min; - - bool updateMarkerPoses(std::vector > &markers, const Pose &pose); - void MeasurementsAdd(MarkerIterator &begin, MarkerIterator &end); + std::vector marker_detected; + std::vector > > + measurements; + typedef std::vector< + std::vector > >::iterator + MeasurementIterator; + FilterMedian* pointcloud_filtered; + int filter_buffer_min; + + bool updateMarkerPoses( + std::vector >& markers, + const Pose& pose); + void MeasurementsAdd(MarkerIterator& begin, MarkerIterator& end); public: - MultiMarkerInitializer(std::vector& indices, int filter_buffer_min = 4, int filter_buffer_max = 15); - ~MultiMarkerInitializer(); - - /** - * Adds a new measurement for marker field initialization. - * Each measurement should contain at least two markers. - * It does not matter which markers are visible, especially - * the zero marker does not have to be visible in every measurement. - * It suffices that there exists a 'path' from the zero marker - * to every other marker in the marker field. - * - * For example: - * - first measurement contains marker A and B. - * - second measurement containt markers ZERO and A. - * - * When Initialize is called, the system can first deduce the pose of A - * and then the pose of B. - */ - template - void MeasurementsAdd(const std::vector > *markers) { - MarkerIteratorImpl begin(markers->begin()); - MarkerIteratorImpl end(markers->end()); - MeasurementsAdd(begin, end); - } - - /** - * - */ - void MeasurementsReset(); - - /** - * Tries to deduce marker poses from measurements. - * - * Returns the number of initialized markers. - */ - int Initialize(Camera* cam); - - int getMeasurementCount() { return measurements.size(); } - - const std::vector >& getMeasurementMarkers(int measurement) { - return measurements[measurement]; - } - - double getMeasurementPose(int measurement, Camera *cam, Pose &pose) { - MarkerIteratorImpl m_begin(measurements[measurement].begin()); - MarkerIteratorImpl m_end(measurements[measurement].end()); - return _GetPose(m_begin, m_end, cam, pose, NULL); - } + MultiMarkerInitializer(std::vector& indices, int filter_buffer_min = 4, + int filter_buffer_max = 15); + ~MultiMarkerInitializer(); + + /** + * Adds a new measurement for marker field initialization. + * Each measurement should contain at least two markers. + * It does not matter which markers are visible, especially + * the zero marker does not have to be visible in every measurement. + * It suffices that there exists a 'path' from the zero marker + * to every other marker in the marker field. + * + * For example: + * - first measurement contains marker A and B. + * - second measurement containt markers ZERO and A. + * + * When Initialize is called, the system can first deduce the pose of A + * and then the pose of B. + */ + template + void + MeasurementsAdd(const std::vector >* markers) + { + MarkerIteratorImpl begin(markers->begin()); + MarkerIteratorImpl end(markers->end()); + MeasurementsAdd(begin, end); + } + + /** + * + */ + void MeasurementsReset(); + + /** + * Tries to deduce marker poses from measurements. + * + * Returns the number of initialized markers. + */ + int Initialize(Camera* cam); + + int getMeasurementCount() + { + return measurements.size(); + } + + const std::vector >& + getMeasurementMarkers(int measurement) + { + return measurements[measurement]; + } + + double getMeasurementPose(int measurement, Camera* cam, Pose& pose) + { + MarkerIteratorImpl m_begin( + measurements[measurement].begin()); + MarkerIteratorImpl m_end( + measurements[measurement].end()); + cv::Mat empty_img; + return _GetPose(m_begin, m_end, cam, pose); + } }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/Mutex.h b/ar_track_alvar/include/ar_track_alvar/Mutex.h index ef2437d..483d519 100644 --- a/ar_track_alvar/include/ar_track_alvar/Mutex.h +++ b/ar_track_alvar/include/ar_track_alvar/Mutex.h @@ -32,8 +32,8 @@ #include "Alvar.h" -namespace alvar { - +namespace alvar +{ class MutexPrivate; /** @@ -44,33 +44,33 @@ class MutexPrivate; class ALVAR_EXPORT Mutex { public: - /** - * \brief Constructor. - */ - Mutex(); + /** + * \brief Constructor. + */ + Mutex(); - /** - * \brief Destructor. - */ - ~Mutex(); + /** + * \brief Destructor. + */ + ~Mutex(); - /** - * \brief Locks the mutex. - * - * If the mutex is already locked by another thread, this method will - * block until the other thread unlocks the mutex. - */ - void lock(); + /** + * \brief Locks the mutex. + * + * If the mutex is already locked by another thread, this method will + * block until the other thread unlocks the mutex. + */ + void lock(); - /** - * \brief Unlocks the mutex. - */ - void unlock(); + /** + * \brief Unlocks the mutex. + */ + void unlock(); private: - MutexPrivate *d; + MutexPrivate* d; }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/Mutex_private.h b/ar_track_alvar/include/ar_track_alvar/Mutex_private.h index 99bd9bd..a4d76ca 100644 --- a/ar_track_alvar/include/ar_track_alvar/Mutex_private.h +++ b/ar_track_alvar/include/ar_track_alvar/Mutex_private.h @@ -24,21 +24,21 @@ #ifndef MUTEX_PRIVATE_H #define MUTEX_PRIVATE_H -namespace alvar { - +namespace alvar +{ class MutexPrivateData; class MutexPrivate { public: - MutexPrivate(); - ~MutexPrivate(); - void lock(); - void unlock(); + MutexPrivate(); + ~MutexPrivate(); + void lock(); + void unlock(); - MutexPrivateData *d; + MutexPrivateData* d; }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/Optimization.h b/ar_track_alvar/include/ar_track_alvar/Optimization.h index 9ed2a19..bd9d0fa 100644 --- a/ar_track_alvar/include/ar_track_alvar/Optimization.h +++ b/ar_track_alvar/include/ar_track_alvar/Optimization.h @@ -25,116 +25,117 @@ #define OPTIMIZATION_H #include "Alvar.h" -#include -#include -#include +#include //#include - /** * \file Optimization.h * * \brief This file implements several optimization algorithms. */ -namespace alvar { - -/** - * \brief Non-linear optimization routines. There are three methods implemented that include Gauss-Newton, Levenberg-Marquardt and Tukey m-estimator. - * - */ -class ALVAR_EXPORT Optimization +namespace alvar +{ +/** + * \brief Non-linear optimization routines. There are three methods implemented + * that include Gauss-Newton, Levenberg-Marquardt and Tukey m-estimator. + * + */ +class ALVAR_EXPORT Optimization { - private: - - void *estimate_param; - CvMat *J; - CvMat *JtJ; - CvMat *W; - CvMat *diag; - CvMat *tmp; - CvMat *err; - CvMat *delta; - CvMat *x_plus; - CvMat *x_minus; - CvMat *x_tmp1; - CvMat *x_tmp2; - CvMat *tmp_par; - - double CalcTukeyWeight(double residual, double c); - double CalcTukeyWeightSimple(double residual, double c); - - double lambda; + void* estimate_param; + cv::Mat J; + cv::Mat JtJ; + cv::Mat W; + cv::Mat diag; + cv::Mat tmp; + cv::Mat err; + cv::Mat delta; + cv::Mat x_plus; + cv::Mat x_minus; + cv::Mat x_tmp1; + cv::Mat x_tmp2; + cv::Mat tmp_par; + + double CalcTukeyWeight(double residual, double c); + double CalcTukeyWeightSimple(double residual, double c); + + double lambda; public: - - /** - * \brief Selection between the algorithm used in optimization. Following should be noticed: - * \li GAUSSNEWTON - */ - enum OptimizeMethod - { - GAUSSNEWTON, - LEVENBERGMARQUARDT, - TUKEY_LM - }; - - /** - * \brief Constructor. - * \param n_params Number of parameters to be optimized. - * \param n_meas Number of measurements that are observed. - */ - Optimization(int n_params, int n_meas); - ~Optimization(); - - /** - * \brief Returns the current residual vector. - * \return Pointer to the residual vector. - */ - CvMat *GetErr() { return err; } - - /** - * \brief Pointer to the function that projects the state of the system to the measurements. - * \param state System parameters, e.g. camera parameterization in optical tracking. - * \param projection The system state projection is stored here. E.g image measurements in optical tracking. - * \param param Additional parameters to the function. E.g. some constant parameters that are not optimized. - */ - typedef void (*EstimateCallback)(CvMat* state, CvMat *projection, void *param); - - /** - * \brief Numerically differentiates and calculates the Jacobian around x. - * \param x The set of parameters around which the Jacobian is evaluated. - * \param J Resulting Jacobian matrix is stored here. - * \param Estimate The function to be differentiated. - */ - void CalcJacobian(CvMat* x, CvMat* J, EstimateCallback Estimate); - - /** - * \brief Runs the optimization loop with selected parameters. - * \param parameters Vector of parameters to be optimized. Initial values should be set. - * \param measurements Vector of measurements that are observed. - * \param stop Optimization loop ends as the \e stop limit is reached. Criteria is calculated as - * \param max_iter Maximum number of iteration loops that are evaluated if \e stop is not reached. - * \param Estimate Pointer to the function that maps the state to the measurements. See \e EstimateCallback. - * \param method One of the three possible optimization methods. - * \param parameters_mask Vector that defines the parameters that are optimized. If vector element is 0, corresponding parameter is not altered. - * \param J_mat Jacobian matrix. If not given, numerical differentation is used. - * \param weights Weight vector that can be submitted to give different weights to different measurements. Currently works only with OptimizeMethod::TUKEY_LM. - */ - double Optimize(CvMat* parameters, - CvMat* measurements, - double stop, - int max_iter, - EstimateCallback Estimate, - void *param = 0, - OptimizeMethod method = LEVENBERGMARQUARDT, - CvMat* parameters_mask = 0, - CvMat* J_mat = 0, - CvMat* weights = 0); - + /** + * \brief Selection between the algorithm used in optimization. Following + * should be noticed: \li GAUSSNEWTON + */ + enum OptimizeMethod + { + GAUSSNEWTON, + LEVENBERGMARQUARDT, + TUKEY_LM + }; + + /** + * \brief Constructor. + * \param n_params Number of parameters to be optimized. + * \param n_meas Number of measurements that are observed. + */ + Optimization(int n_params, int n_meas); + ~Optimization(); + + /** + * \brief Returns the current residual vector. + * \return Pointer to the residual vector. + */ + cv::Mat& GetErr() + { + return err; + } + + /** + * \brief Pointer to the function that projects the state of the system to the + * measurements. + * \param state System parameters, e.g. camera parameterization in optical + * tracking. + * \param projection The system state projection is stored here. E.g image + * measurements in optical tracking. \param param Additional parameters to + * the function. E.g. some constant parameters that are not optimized. + */ + typedef void (*EstimateCallback)(cv::Mat& state, cv::Mat& projection, + void* param); + + /** + * \brief Numerically differentiates and calculates the Jacobian around x. + * \param x The set of parameters around which the Jacobian is evaluated. + * \param J Resulting Jacobian matrix is stored here. + * \param Estimate The function to be differentiated. + */ + void CalcJacobian(cv::Mat& x, cv::Mat& J, EstimateCallback Estimate); + + /** + * \brief Runs the optimization loop with selected parameters. + * \param parameters Vector of parameters to be optimized. Initial values + * should be set. \param measurements Vector of measurements that are + * observed. \param stop Optimization loop ends as the \e stop limit is + * reached. Criteria is calculated as \param max_iter Maximum number of + * iteration loops that are evaluated if \e stop is not reached. \param + * Estimate Pointer to the function that maps the state to the + * measurements. See \e EstimateCallback. \param method One of the three + * possible optimization methods. \param parameters_mask Vector that defines + * the parameters that are optimized. If vector element is 0, corresponding + * parameter is not altered. \param J_mat Jacobian matrix. If not given, + * numerical differentation is used. \param weights Weight vector that can + * be submitted to give different weights to different measurements. Currently + * works only with OptimizeMethod::TUKEY_LM. + */ + double Optimize(cv::Mat& parameters, cv::Mat& measurements, double stop, + int max_iter, EstimateCallback Estimate, void* param = 0, + OptimizeMethod method = LEVENBERGMARQUARDT, + const cv::Mat& parameters_mask = cv::Mat(), + const cv::Mat& J_mat = cv::Mat(), + const cv::Mat& weights = cv::Mat()); }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/Platform.h b/ar_track_alvar/include/ar_track_alvar/Platform.h index a28701c..063af8e 100644 --- a/ar_track_alvar/include/ar_track_alvar/Platform.h +++ b/ar_track_alvar/include/ar_track_alvar/Platform.h @@ -37,8 +37,8 @@ #include "Timer.h" #include "Uncopyable.h" -namespace alvar { - +namespace alvar +{ /** * \brief Error reporting function inspired by error_at_line() on Linux. * @@ -47,22 +47,22 @@ namespace alvar { * the corresponding error message from strerror(). If status is not zero, it * exits the process. */ -void ALVAR_EXPORT errorAtLine(int status, int error, const char *filename, - unsigned int line, const char *format, ...); +void ALVAR_EXPORT errorAtLine(int status, int error, const char* filename, + unsigned int line, const char* format, ...); /** * \brief Sleep for a specified amount of milliseconds. */ void ALVAR_EXPORT sleep(unsigned long milliseconds); -} // namespace alvar +} // namespace alvar #ifdef min - #undef min +#undef min #endif #ifdef max - #undef max +#undef max #endif #endif diff --git a/ar_track_alvar/include/ar_track_alvar/Plugin.h b/ar_track_alvar/include/ar_track_alvar/Plugin.h index 9d033bb..de625ce 100644 --- a/ar_track_alvar/include/ar_track_alvar/Plugin.h +++ b/ar_track_alvar/include/ar_track_alvar/Plugin.h @@ -34,64 +34,67 @@ #include -namespace alvar { - +namespace alvar +{ class PluginPrivate; /** * \brief Plugin for loading dynamic libraries. * - * Plugin class for loading dynamic libraires. The library is loaded during construction and - * unloaded during destruction. + * Plugin class for loading dynamic libraires. The library is loaded during + * construction and unloaded during destruction. */ class Plugin { public: - /** - * \brief Constructor. - * - * Constructing a Plugin object will attempt to load the plugin dynamic library. - * - * \param filename The filename of the dynamic library to load. - * \exception AlvarException An exeption is thrown if the library can't be loaded. - */ - Plugin(const std::string filename); + /** + * \brief Constructor. + * + * Constructing a Plugin object will attempt to load the plugin dynamic + * library. + * + * \param filename The filename of the dynamic library to load. + * \exception AlvarException An exeption is thrown if the library can't be + * loaded. + */ + Plugin(const std::string filename); - /** - * \brief Copy constructor. - * - * \param plugin The Plugin to copy. - */ - Plugin(const Plugin &plugin); + /** + * \brief Copy constructor. + * + * \param plugin The Plugin to copy. + */ + Plugin(const Plugin& plugin); - /** - * \brief Assignment operator. - * - * \param plugin The Plugin to copy. - */ - Plugin &operator=(const Plugin &plugin); + /** + * \brief Assignment operator. + * + * \param plugin The Plugin to copy. + */ + Plugin& operator=(const Plugin& plugin); - /** - * \brief Destructor. - */ - ~Plugin(); + /** + * \brief Destructor. + */ + ~Plugin(); - /** - * \brief Resolves the address of a symbol. - * - * The symbol must be exported from the library as a C function. - * - * \param symbol The signature of the symbol. - * \return The address of the symbol. - * \exception AlvarException An exception is thrown if the symbol is not found. - */ - void *resolve(const char *symbol); + /** + * \brief Resolves the address of a symbol. + * + * The symbol must be exported from the library as a C function. + * + * \param symbol The signature of the symbol. + * \return The address of the symbol. + * \exception AlvarException An exception is thrown if the symbol is not + * found. + */ + void* resolve(const char* symbol); private: - PluginPrivate *d; - int *mReferenceCount; + PluginPrivate* d; + int* mReferenceCount; }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/Plugin_private.h b/ar_track_alvar/include/ar_track_alvar/Plugin_private.h index 6f4f592..d81eb9b 100644 --- a/ar_track_alvar/include/ar_track_alvar/Plugin_private.h +++ b/ar_track_alvar/include/ar_track_alvar/Plugin_private.h @@ -26,22 +26,22 @@ #include -namespace alvar { - +namespace alvar +{ class PluginPrivateData; class PluginPrivate { public: - PluginPrivate(); - ~PluginPrivate(); - void load(const std::string filename); - void unload(); - void *resolve(const char *symbol); + PluginPrivate(); + ~PluginPrivate(); + void load(const std::string filename); + void unload(); + void* resolve(const char* symbol); - PluginPrivateData *d; + PluginPrivateData* d; }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/Pose.h b/ar_track_alvar/include/ar_track_alvar/Pose.h index ec51455..51d69de 100644 --- a/ar_track_alvar/include/ar_track_alvar/Pose.h +++ b/ar_track_alvar/include/ar_track_alvar/Pose.h @@ -26,7 +26,6 @@ #include "Alvar.h" #include "Rotation.h" -#include "opencv2/core/types_c.h" /** * \file Pose.h @@ -34,91 +33,96 @@ * \brief This file implements a pose. */ -namespace alvar { - +namespace alvar +{ /** * \brief \e Pose representation derived from the \e Rotation class * - * The rotation part of the transformation is handled by \e Rotation . + * The rotation part of the transformation is handled by \e Rotation . * The translation part is stored internally using homogeneous 4-vector. * * Internally in ALVAR we assume coordinate system where - * 'x' is right, 'y' is down, and 'z' is forward. However - * the \e SetMatrixGL and \e GetMatrixGL change the pose + * 'x' is right, 'y' is down, and 'z' is forward. However + * the \e SetMatrixGL and \e GetMatrixGL change the pose * to support coordinates commonly used in OpenGL: * 'x' is right, 'y' is up, and 'z' is backward. */ -class ALVAR_EXPORT Pose : public Rotation { +class ALVAR_EXPORT Pose : public Rotation +{ protected: - // Note, although we are using homogeneous coordinates x, y, z, w -- w is now mostly ignored + // Note, although we are using homogeneous coordinates x, y, z, w -- w is now + // mostly ignored public: - double translation[4]; - CvMat translation_mat; + double translation[4]; + cv::Mat translation_mat; - /** \e Output for debugging purposes */ - void Output() const; - /** \e Constructor */ - Pose(); - /** \e Constructor using the given translation and rotation elements - * \param tra Column vector containing three translation elements - * \param rot Handled using the \e Rotation class - * \param t Handled using the \e Rotation class - */ - Pose(CvMat *tra, CvMat *rot, RotationType t); - /** \e Constructor with 3x3, 3x4 or 4x4 matrix representation - * \param mat A 3x3 rotation matrix or 3x4 / 4x4 transformation matrix - */ - Pose(CvMat *mat); - /** \e Copy constructor */ - Pose(const Pose& p); - /** \e Reset the pose */ - void Reset(); - /** Set the transformation from the given matrix \e mat - * \param mat A 3x3 rotation matrix or 3x4 / 4x4 transformation matrix - */ - void SetMatrix(const CvMat *mat); - /** \brief Set the \e Pose using OpenGL's transposed format. - * Note, that by default this also mirrors both the y- and z-axis (see \e Camera and \e Pose for more information) - * \param gl OpenGL 4x4 transformation matrix elements in column-order - */ - void SetMatrixGL(double gl[16], bool mirror=true); - /** Get the transformation into the given matrix \e mat - * \param mat A 3x3 rotation matrix or 3x4 / 4x4 transformation matrix - */ - void GetMatrix(CvMat *mat) const; - /** \brief Get the transformation matrix representation of the \e Pose using OpenGL's transposed format. - * Note, that by default this also mirrors both the y- and z-axis (see \e Camera and \e Pose for more information) - * \param gl OpenGL 4x4 transformation matrix elements in column-order - */ - void GetMatrixGL(double gl[16], bool mirror=true); - /** \e Transpose the transformation */ - void Transpose(); - /** Invert the pose */ - void Invert(); - /** \e Mirror the \e Pose - * \param x If true mirror the x-coordinates - * \param y If true mirror the y-coordinates - * \param z If true mirror the z-coordinates - */ - void Mirror(bool x, bool y, bool z); - /** Set the translation part for the \e Pose - * \param tra Column vector containing three translation elements - */ - void SetTranslation(const CvMat *tra); - /** Set the translation part for the \e Pose - * \param tra Array containing three translation elements - */ - void SetTranslation(const double *tra); - /** Set the translation part for the \e Pose */ - void SetTranslation(const double x, const double y, const double z); - /** Get the translation part from the \e Pose - * \param tra Column vector where the three translation elements are filled in - */ - void GetTranslation(CvMat *tra) const; - /** Assignment operator for copying \e Pose class */ - Pose& operator = (const Pose& p); + /** \e Output for debugging purposes */ + void Output() const; + /** \e Constructor */ + Pose(); + /** \e Constructor using the given translation and rotation elements + * \param tra Column vector containing three translation elements + * \param rot Handled using the \e Rotation class + * \param t Handled using the \e Rotation class + */ + Pose(const cv::Mat& tra, const cv::Mat& rot, RotationType t); + /** \e Constructor with 3x3, 3x4 or 4x4 matrix representation + * \param mat A 3x3 rotation matrix or 3x4 / 4x4 transformation matrix + */ + Pose(const cv::Mat& mat); + /** \e Copy constructor */ + Pose(const Pose& p); + /** \e Reset the pose */ + void Reset(); + /** Set the transformation from the given matrix \e mat + * \param mat A 3x3 rotation matrix or 3x4 / 4x4 transformation matrix + */ + void SetMatrix(const cv::Mat& mat); + /** \brief Set the \e Pose using OpenGL's transposed format. + * Note, that by default this also mirrors both the y- and z-axis (see \e + * Camera and \e Pose for more information) \param gl OpenGL 4x4 + * transformation matrix elements in column-order + */ + void SetMatrixGL(double gl[16], bool mirror = true); + /** Get the transformation into the given matrix \e mat + * \param mat A 3x3 rotation matrix or 3x4 / 4x4 transformation matrix + */ + void GetMatrix(cv::Mat& mat) const; + /** \brief Get the transformation matrix representation of the \e Pose using + * OpenGL's transposed format. Note, that by default this also mirrors both + * the y- and z-axis (see \e Camera and \e Pose for more information) \param + * gl OpenGL 4x4 transformation matrix elements in column-order + */ + void GetMatrixGL(double gl[16], bool mirror = true); + /** \e Transpose the transformation */ + void Transpose(); + /** Invert the pose */ + void Invert(); + /** \e Mirror the \e Pose + * \param x If true mirror the x-coordinates + * \param y If true mirror the y-coordinates + * \param z If true mirror the z-coordinates + */ + void Mirror(bool x, bool y, bool z); + /** Set the translation part for the \e Pose + * \param tra Column vector containing three translation elements + */ + void SetTranslation(const cv::Mat& tra); + /** Set the translation part for the \e Pose + * \param tra Array containing three translation elements + */ + void SetTranslation(const double* tra); + /** Set the translation part for the \e Pose */ + void SetTranslation(const double x, const double y, const double z); + /** Get the translation part from the \e Pose + * \param tra Column vector where the three translation elements are filled + * in + */ + void GetTranslation(cv::Mat& tra) const; + /** Assignment operator for copying \e Pose class */ + Pose& operator=(const Pose& p); }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/Ransac.h b/ar_track_alvar/include/ar_track_alvar/Ransac.h index f43ae3d..c6f1d6b 100644 --- a/ar_track_alvar/include/ar_track_alvar/Ransac.h +++ b/ar_track_alvar/include/ar_track_alvar/Ransac.h @@ -33,14 +33,14 @@ * \brief This file implements a generic RANSAC algorithm. */ -namespace alvar { - +namespace alvar +{ /** * \brief Internal implementation of RANSAC. Please use Ransac or IndexRansac. */ -class ALVAR_EXPORT RansacImpl { - - protected: +class ALVAR_EXPORT RansacImpl +{ +protected: void** samples; void* hypothesis; int min_params; @@ -48,40 +48,39 @@ class ALVAR_EXPORT RansacImpl { int sizeof_param; int sizeof_model; - RansacImpl(int min_params, int max_params, - int sizeof_param, int sizeof_model); + RansacImpl(int min_params, int max_params, int sizeof_param, + int sizeof_model); virtual ~RansacImpl(); - int _estimate(void* params, int param_c, - int support_limit, int max_rounds, - void* model); + int _estimate(void* params, int param_c, int support_limit, int max_rounds, + void* model); - int _refine(void* params, int param_c, - int support_limit, int max_rounds, - void* model, char *inlier_mask = NULL); + int _refine(void* params, int param_c, int support_limit, int max_rounds, + void* model, char* inlier_mask = NULL); - virtual void _doEstimate(void** params, int param_c, void* model) {}; - virtual bool _doSupports(void* param, void* model) { return false; }; + virtual void _doEstimate(void** params, int param_c, void* model){}; + virtual bool _doSupports(void* param, void* model) + { + return false; + }; /** IndexRansac version */ - int *indices; - - RansacImpl(int min_params, int max_params, - int sizeof_model); + int* indices; - int _estimate(int param_c, - int support_limit, int max_rounds, - void* model); + RansacImpl(int min_params, int max_params, int sizeof_model); - int _refine(int param_c, - int support_limit, int max_rounds, - void* model, char *inlier_mask = NULL); + int _estimate(int param_c, int support_limit, int max_rounds, void* model); - virtual void _doEstimate(int* params, int param_c, void* model) {}; - virtual bool _doSupports(int param, void* model) { return false; }; + int _refine(int param_c, int support_limit, int max_rounds, void* model, + char* inlier_mask = NULL); - public: + virtual void _doEstimate(int* params, int param_c, void* model){}; + virtual bool _doSupports(int param, void* model) + { + return false; + }; +public: /** \brief How many rounds are needed for the Ransac to work. * * Computes the required amount of rounds from the estimated @@ -96,360 +95,371 @@ class ALVAR_EXPORT RansacImpl { * max_rounds when calling the estimate method. */ int estimateRequiredRounds(float success_propability, - float inlier_percentage); - + float inlier_percentage); }; - /** - * \brief Implementation of a general RANdom SAmple Consensus algorithm. +/** + * \brief Implementation of a general RANdom SAmple Consensus algorithm. + * + * This implementation can be used to estimate model from a set of input + * data. The user needs to provide support methods to compute the best + * model given a set of input data and to classify input data into inliers + * and outliers. + * + * For more information see "Martin A Fischler and Robrt C. Bolles: + * Random Sample Consensus: a paradigm for model fitting with applications + * to image analysis and automated cartography. Comm of the ACM 24: 381-395" + * (http://portal.acm.org/citation.cfm?doid=358669.358692). + * + * MODEL is the estimated model, for example endpoints of a line for + * line fitting. + * + * PARAMETER is the input for model estimation, for example 2D point + * for line fitting. + * + * MODEL must support an assigment operator. + * + * The user needs to extend this class and provide two methods: + * \code + * void doEstimate(PARAMETER** params, int param_c, MODEL* model); + * bool doSupports(PARAMETER* param, MODEL* model); + * \endcode + * + * Example: Fitting points to a line: + * + * \code + * typedef struct Point { double x, double y; }; + * typedef struct Line { Point p1, p2; }; + * + * class MyLineFittingRansac : public Ransac { + * // Line fitting needs at least 2 parameters and here we want + * // to support at most 16 parameters. + * MyLineFittingRansac() : Ransac(2, 16) {} + * + * void doEstimate(Point **points, int points_c, Line *line) { + * if (points_c == 2) { return Line(*points[0], *points[1]); } + * else { // compute best line fitting up to 16 points. } + * } + * + * bool doSupports(Point *point, Line *line) { + * double distance = // compute point distance to line. + * return distance < POINT_DISTANCE_LIMIT; + * } + * }; + * + * Point input[N_POINTS] = { .. }; + * Line line; + * MyLineFittingRansac ransac; + * + * // assuming 60% of inliers, run RANSAC until the best model is found with 99% + * propability. int max_rounds = ransac.estimateRequiredRounds(0.99, 0.6); int + * number_of_inliers = ransac.estimate(input, N_POINTS, N_POINTS, max_rounds, + * &line); + * + * // lets make the estimate even better. + * if (number_of_inliers > 0 && number_of_inliers < N_POINTS) + * number_of_inliers = ransac.refine(input, N_POINTS, N_POINTS, 10, &line); + * + * // you should keep track of the real percentage of inliers to determine + * // the required number of RANSAC rounds. + * double inlier_percentage = (double)number_of_inliers / (double)N_POINTS; + * + * \endcode + */ +template +class Ransac : public RansacImpl +{ +protected: + /** \brief Creates a model estimate from a set of parameters. * - * This implementation can be used to estimate model from a set of input - * data. The user needs to provide support methods to compute the best - * model given a set of input data and to classify input data into inliers - * and outliers. + * The user must implement this method to compute model parameters + * from the input data. * - * For more information see "Martin A Fischler and Robrt C. Bolles: - * Random Sample Consensus: a paradigm for model fitting with applications - * to image analysis and automated cartography. Comm of the ACM 24: 381-395" - * (http://portal.acm.org/citation.cfm?doid=358669.358692). - * - * MODEL is the estimated model, for example endpoints of a line for - * line fitting. + * \param params An array of pointers to sampled parameters (input data). + * \param param_c The number of parameter pointers in the params array. + * \param model Pointer to the model where to store the estimate. + */ + virtual void doEstimate(PARAMETER** params, int param_c, MODEL* model) = 0; + + /** \brief Computes how well a parameters supports a model. * - * PARAMETER is the input for model estimation, for example 2D point - * for line fitting. + * This method is used by the RANSAC algorithm to count how many + * parameters support the estimated model (inliers). Althought this + * is case specific, usually parameter supports the model when the distance + * from model prediction is not too far away from the parameter. * - * MODEL must support an assigment operator. + * \param param Pointer to the parameter to check. + * \param model Pointer to the model to check the parameter against. + * \return True when the parameter supports the model. + */ + virtual bool doSupports(PARAMETER* param, MODEL* model) = 0; + + /** + * Wrapper for templated parameters. + */ + void _doEstimate(void** params, int param_c, void* model) + { + doEstimate((PARAMETER**)params, param_c, (MODEL*)model); + } + + /** + * Wrapper for templated parameters. + */ + bool _doSupports(void* param, void* model) + { + return doSupports((PARAMETER*)param, (MODEL*)model); + } + +public: + /** \brief Initialize the algorithm. * - * The user needs to extend this class and provide two methods: - * \code - * void doEstimate(PARAMETER** params, int param_c, MODEL* model); - * bool doSupports(PARAMETER* param, MODEL* model); - * \endcode + * Uses at least min_params and at most max_params number of input data + * elements for model estimation. * - * Example: Fitting points to a line: + * Must be: max_params >= min_params * - * \code - * typedef struct Point { double x, double y; }; - * typedef struct Line { Point p1, p2; }; + * \param min_params is the minimum number of parameters needed to create + * a model. + * \param max_params is the maximum number of parameters to using in refining + * the model. + */ + Ransac(int min_params, int max_params) + : RansacImpl(min_params, max_params, sizeof(PARAMETER), sizeof(MODEL)) + { + } + + virtual ~Ransac() + { + } + + /** \brief Estimates a model from input data parameters. * - * class MyLineFittingRansac : public Ransac { - * // Line fitting needs at least 2 parameters and here we want - * // to support at most 16 parameters. - * MyLineFittingRansac() : Ransac(2, 16) {} + * Randomly samples min_params number of input data elements from params array + * and chooses the model that has the largest set of supporting parameters + * (inliers) in the params array. * - * void doEstimate(Point **points, int points_c, Line *line) { - * if (points_c == 2) { return Line(*points[0], *points[1]); } - * else { // compute best line fitting up to 16 points. } - * } + * Note that this method always uses min_params number of parameters, that is, + * \e doEstimate method can be implemented to support only the minimum number + * of parameters unless \e refine method is used. * - * bool doSupports(Point *point, Line *line) { - * double distance = // compute point distance to line. - * return distance < POINT_DISTANCE_LIMIT; - * } - * }; + * \param params Parameters that the model is estimated from (input data). + * \param param_c Number of elements in the params array. + * \param support_limit The search is stopped if a model receives + * more support that this limit. + * \param max_rounds How many different samples are tried before + * stopping the search. + * \param model The estimated model is stored here. + * \return the number of parameters supporting the model, or 0 + * if a suitable model could not be build at all. + */ + int estimate(PARAMETER* params, int param_c, int support_limit, + int max_rounds, MODEL* model) + { + return _estimate(params, param_c, support_limit, max_rounds, model); + } + + /** \brief Iteratively makes the estimated model better. * - * Point input[N_POINTS] = { .. }; - * Line line; - * MyLineFittingRansac ransac; + * Starting with the estimated model, computes the model from all + * inlier parameters and interates until no new parameters support + * the model. * - * // assuming 60% of inliers, run RANSAC until the best model is found with 99% propability. - * int max_rounds = ransac.estimateRequiredRounds(0.99, 0.6); - * int number_of_inliers = ransac.estimate(input, N_POINTS, N_POINTS, max_rounds, &line); + * Note that this method uses up to max_params number of parameters, + * that is, \e doEstimate method must be implemented in such a way + * that it can estimate a model from a variable number of parameters. * - * // lets make the estimate even better. - * if (number_of_inliers > 0 && number_of_inliers < N_POINTS) - * number_of_inliers = ransac.refine(input, N_POINTS, N_POINTS, 10, &line); + * \param params Parameters that the model is estimated from. + * \param param_c Number of parameters. + * \param support_limit The search is stopped is a model receives + * more support that this limit. + * \param max_rounds How many iterations of the refinement are run. + * \param model The estimated model that is refined. + * \param inlier_mask Byte array where 1 is stored for inliers and 0 for + * outliers. \return the number of parameters supporting the model. + */ + int refine(PARAMETER* params, int param_c, int support_limit, int max_rounds, + MODEL* model, char* inlier_mask = NULL) + { + return _refine(params, param_c, support_limit, max_rounds, model, + inlier_mask); + } +}; + +/** + * \brief Implementation of a general RANdom SAmple Consensus algorithm + * with implicit parameters. + * + * These parameters are accessed by indises. The benefit of this is that + * we avoid copying input data from an array into another. See \e Ransac + * class for more details. + * + * Extending class must provide two methods: + * \code + * void doEstimate(int* params, int param_c, MODEL* model); + * bool doSupports(int param, MODEL* model); + * \endcode + * + * Example fitting points to a line (compare this with the example + * in the \e Ransac class): + * + * \code + * typedef struct Point { double x, double y; }; + * typedef struct Line { Point p1, p2; }; + * + * class MyLineFittingRansac : public IndexRansac { + * Point *points; + * + * // Line fitting needs at least 2 parameters and here we want + * // to support at most 16 parameters. + * MyLineFittingRansac(Points *input) : Ransac(2, 16), points(input) {} + * + * void doEstimate(int *indises, int points_c, Line *line) { + * if (points_c == 2) { return Line(points[indises[0]], + * *points[indises[1]]); } else { // compute best line fitting up to 16 points. + * } + * } + * + * bool doSupports(int index, Line *line) { + * Point *point = &points[index]; + * double distance = // compute point distance to line. + * return distance < POINT_DISTANCE_LIMIT; + * } + * }; + * + * Point input[N_POINTS] = { .. }; + * Line line; + * MyLineFittingRansac ransac(input); + * + * // assuming 60% of inliers, run RANSAC until the best model is found with 99% + * propability. int max_rounds = ransac.estimateRequiredRounds(0.99, 0.6); int + * number_of_inliers = ransac.estimate(input, N_POINTS, N_POINTS, max_rounds, + * &line); + * + * // lets make the estimate even better. + * if (number_of_inliers > 0 && number_of_inliers < N_POINTS) + * number_of_inliers = ransac.refine(input, N_POINTS, N_POINTS, 10, &line); + * + * // you should keep track of the real percentage of inliers to determine + * // the required number of RANSAC rounds. + * double inlier_percentage = (double)number_of_inliers / (double)N_POINTS; + * + * \endcode + */ +template +class IndexRansac : public RansacImpl +{ +protected: + /** \brief Creates a model estimate from a set of parameters. * - * // you should keep track of the real percentage of inliers to determine - * // the required number of RANSAC rounds. - * double inlier_percentage = (double)number_of_inliers / (double)N_POINTS; + * The user must implement this method to compute model parameters + * from the input data. * - * \endcode + * \param params An array of indises of sampled parameters. + * \param param_c The number of parameter indises in the params array. + * \param model Pointer to the model where to store the estimate. */ - template - class Ransac : public RansacImpl { - - protected: - /** \brief Creates a model estimate from a set of parameters. - * - * The user must implement this method to compute model parameters - * from the input data. - * - * \param params An array of pointers to sampled parameters (input data). - * \param param_c The number of parameter pointers in the params array. - * \param model Pointer to the model where to store the estimate. - */ - virtual void doEstimate(PARAMETER** params, int param_c, MODEL* model) = 0; - - /** \brief Computes how well a parameters supports a model. - * - * This method is used by the RANSAC algorithm to count how many - * parameters support the estimated model (inliers). Althought this - * is case specific, usually parameter supports the model when the distance - * from model prediction is not too far away from the parameter. - * - * \param param Pointer to the parameter to check. - * \param model Pointer to the model to check the parameter against. - * \return True when the parameter supports the model. - */ - virtual bool doSupports(PARAMETER* param, MODEL* model) = 0; - - /** - * Wrapper for templated parameters. - */ - void _doEstimate(void** params, int param_c, void* model) { - doEstimate((PARAMETER**)params, param_c, (MODEL*)model); - } - - /** - * Wrapper for templated parameters. - */ - bool _doSupports(void* param, void* model) { - return doSupports((PARAMETER*)param, (MODEL*)model); - } - - public: - /** \brief Initialize the algorithm. - * - * Uses at least min_params and at most max_params number of input data - * elements for model estimation. - * - * Must be: max_params >= min_params - * - * \param min_params is the minimum number of parameters needed to create - * a model. - * \param max_params is the maximum number of parameters to using in refining - * the model. - */ - Ransac(int min_params, int max_params) - : RansacImpl(min_params, max_params, sizeof(PARAMETER), sizeof(MODEL)) {} - - virtual ~Ransac() {} - - /** \brief Estimates a model from input data parameters. - * - * Randomly samples min_params number of input data elements from params array - * and chooses the model that has the largest set of supporting parameters - * (inliers) in the params array. - * - * Note that this method always uses min_params number of parameters, that is, - * \e doEstimate method can be implemented to support only the minimum number - * of parameters unless \e refine method is used. - * - * \param params Parameters that the model is estimated from (input data). - * \param param_c Number of elements in the params array. - * \param support_limit The search is stopped if a model receives - * more support that this limit. - * \param max_rounds How many different samples are tried before - * stopping the search. - * \param model The estimated model is stored here. - * \return the number of parameters supporting the model, or 0 - * if a suitable model could not be build at all. - */ - int estimate(PARAMETER* params, int param_c, - int support_limit, int max_rounds, - MODEL* model) { - return _estimate(params, param_c, support_limit, max_rounds, model); - } - - /** \brief Iteratively makes the estimated model better. - * - * Starting with the estimated model, computes the model from all - * inlier parameters and interates until no new parameters support - * the model. - * - * Note that this method uses up to max_params number of parameters, - * that is, \e doEstimate method must be implemented in such a way - * that it can estimate a model from a variable number of parameters. - * - * \param params Parameters that the model is estimated from. - * \param param_c Number of parameters. - * \param support_limit The search is stopped is a model receives - * more support that this limit. - * \param max_rounds How many iterations of the refinement are run. - * \param model The estimated model that is refined. - * \param inlier_mask Byte array where 1 is stored for inliers and 0 for outliers. - * \return the number of parameters supporting the model. - */ - int refine(PARAMETER* params, int param_c, - int support_limit, int max_rounds, - MODEL* model, char *inlier_mask = NULL) { - return _refine(params, param_c, support_limit, max_rounds, model, inlier_mask); - } + virtual void doEstimate(int* params, int param_c, MODEL* model) = 0; - }; + /** \brief Computes how well a parameters supports a model. + * + * This method is used by the RANSAC algorithm to count how many + * parameters support the estimated model (inliers). Althought this + * is case specific, usually parameter supports the model when the distance + * from model prediction is not too far away from the parameter. + * + * \param param Index of the parameter to check. + * \param model Pointer to the model to check the parameter against. + * \return True when the parameter supports the model. + */ + virtual bool doSupports(int param, MODEL* model) = 0; + /** + * Wrapper for templated parameters. + */ + void _doEstimate(int* params, int param_c, void* model) + { + doEstimate(params, param_c, (MODEL*)model); + } /** - * \brief Implementation of a general RANdom SAmple Consensus algorithm - * with implicit parameters. - * - * These parameters are accessed by indises. The benefit of this is that - * we avoid copying input data from an array into another. See \e Ransac - * class for more details. - * - * Extending class must provide two methods: - * \code - * void doEstimate(int* params, int param_c, MODEL* model); - * bool doSupports(int param, MODEL* model); - * \endcode - * - * Example fitting points to a line (compare this with the example - * in the \e Ransac class): - * - * \code - * typedef struct Point { double x, double y; }; - * typedef struct Line { Point p1, p2; }; + * Wrapper for templated parameters. + */ + bool _doSupports(int param, void* model) + { + return doSupports(param, (MODEL*)model); + } + +public: + /** \brief Initialize the algorithm. * - * class MyLineFittingRansac : public IndexRansac { - * Point *points; + * Uses at least min_params and at most max_params number of input data + * elements for model estimation. * - * // Line fitting needs at least 2 parameters and here we want - * // to support at most 16 parameters. - * MyLineFittingRansac(Points *input) : Ransac(2, 16), points(input) {} + * Must be: max_params >= min_params * - * void doEstimate(int *indises, int points_c, Line *line) { - * if (points_c == 2) { return Line(points[indises[0]], *points[indises[1]]); } - * else { // compute best line fitting up to 16 points. } - * } + * \param min_params is the minimum number of parameters needed to create + * a model. + * \param max_params is the maximum number of parameters to using in refining + * the model. + */ + IndexRansac(int min_params, int max_params) + : RansacImpl(min_params, max_params, sizeof(MODEL)) + { + } + + virtual ~IndexRansac() + { + } + + /** \brief Estimates a model from input data parameters. * - * bool doSupports(int index, Line *line) { - * Point *point = &points[index]; - * double distance = // compute point distance to line. - * return distance < POINT_DISTANCE_LIMIT; - * } - * }; + * Randomly samples min_params number of input data elements from params array + * and chooses the model that has the largest set of supporting parameters + * (inliers) in the params array. * - * Point input[N_POINTS] = { .. }; - * Line line; - * MyLineFittingRansac ransac(input); + * Note that this method always uses min_params number of parameters, that is, + * \e doEstimate method can be implemented to support only the minimum number + * of parameters unless \e refine method is used. * - * // assuming 60% of inliers, run RANSAC until the best model is found with 99% propability. - * int max_rounds = ransac.estimateRequiredRounds(0.99, 0.6); - * int number_of_inliers = ransac.estimate(input, N_POINTS, N_POINTS, max_rounds, &line); + * \param param_c Number of parameters available in estimation. + * \param support_limit The search is stopped if a model receives + * more support that this limit. + * \param max_rounds How many different samples are tried before + * stopping the search. + * \param model The estimated model is stored here. + * \return the number of parameters supporting the model, or 0 + * if a suitable model could not be build at all. + */ + int estimate(int param_c, int support_limit, int max_rounds, MODEL* model) + { + return _estimate(param_c, support_limit, max_rounds, model); + } + + /** \brief Iteratively makes the estimated model better. * - * // lets make the estimate even better. - * if (number_of_inliers > 0 && number_of_inliers < N_POINTS) - * number_of_inliers = ransac.refine(input, N_POINTS, N_POINTS, 10, &line); + * Starting with the estimated model, computes the model from all + * inlier parameters and interates until no new parameters support + * the model. * - * // you should keep track of the real percentage of inliers to determine - * // the required number of RANSAC rounds. - * double inlier_percentage = (double)number_of_inliers / (double)N_POINTS; + * Note that this method uses up to max_params number of parameters, + * that is, \e doEstimate method must be implemented in such a way + * that it can estimate a model from a variable number of parameters. * - * \endcode + * \param param_c Number of parameters available for estimation. + * \param support_limit The search is stopped if a model receives + * more support that this limit. + * \param max_rounds How many iterations of the refinement are run. + * \param model The estimated model that is refined. + * \param inlier_mask Byte array where 1 is stored for inliers and 0 for + * outliers. \return the number of parameters supporting the model. */ - template - class IndexRansac : public RansacImpl { - - protected: - /** \brief Creates a model estimate from a set of parameters. - * - * The user must implement this method to compute model parameters - * from the input data. - * - * \param params An array of indises of sampled parameters. - * \param param_c The number of parameter indises in the params array. - * \param model Pointer to the model where to store the estimate. - */ - virtual void doEstimate(int* params, int param_c, MODEL* model) = 0; - - /** \brief Computes how well a parameters supports a model. - * - * This method is used by the RANSAC algorithm to count how many - * parameters support the estimated model (inliers). Althought this - * is case specific, usually parameter supports the model when the distance - * from model prediction is not too far away from the parameter. - * - * \param param Index of the parameter to check. - * \param model Pointer to the model to check the parameter against. - * \return True when the parameter supports the model. - */ - virtual bool doSupports(int param, MODEL* model) = 0; - - /** - * Wrapper for templated parameters. - */ - void _doEstimate(int* params, int param_c, void* model) { - doEstimate(params, param_c, (MODEL*)model); - } - - /** - * Wrapper for templated parameters. - */ - bool _doSupports(int param, void* model) { - return doSupports(param, (MODEL*)model); - } - - public: - /** \brief Initialize the algorithm. - * - * Uses at least min_params and at most max_params number of input data - * elements for model estimation. - * - * Must be: max_params >= min_params - * - * \param min_params is the minimum number of parameters needed to create - * a model. - * \param max_params is the maximum number of parameters to using in refining - * the model. - */ - IndexRansac(int min_params, int max_params) - : RansacImpl(min_params, max_params, sizeof(MODEL)) {} - - virtual ~IndexRansac() {} - - /** \brief Estimates a model from input data parameters. - * - * Randomly samples min_params number of input data elements from params array - * and chooses the model that has the largest set of supporting parameters - * (inliers) in the params array. - * - * Note that this method always uses min_params number of parameters, that is, - * \e doEstimate method can be implemented to support only the minimum number - * of parameters unless \e refine method is used. - * - * \param param_c Number of parameters available in estimation. - * \param support_limit The search is stopped if a model receives - * more support that this limit. - * \param max_rounds How many different samples are tried before - * stopping the search. - * \param model The estimated model is stored here. - * \return the number of parameters supporting the model, or 0 - * if a suitable model could not be build at all. - */ - int estimate(int param_c, - int support_limit, int max_rounds, - MODEL* model) { - return _estimate(param_c, support_limit, max_rounds, model); - } - - /** \brief Iteratively makes the estimated model better. - * - * Starting with the estimated model, computes the model from all - * inlier parameters and interates until no new parameters support - * the model. - * - * Note that this method uses up to max_params number of parameters, - * that is, \e doEstimate method must be implemented in such a way - * that it can estimate a model from a variable number of parameters. - * - * \param param_c Number of parameters available for estimation. - * \param support_limit The search is stopped if a model receives - * more support that this limit. - * \param max_rounds How many iterations of the refinement are run. - * \param model The estimated model that is refined. - * \param inlier_mask Byte array where 1 is stored for inliers and 0 for outliers. - * \return the number of parameters supporting the model. - */ - int refine(int param_c, - int support_limit, int max_rounds, - MODEL* model, char *inlier_mask = NULL) { - return _refine(param_c, support_limit, max_rounds, model, inlier_mask); - } - - }; + int refine(int param_c, int support_limit, int max_rounds, MODEL* model, + char* inlier_mask = NULL) + { + return _refine(param_c, support_limit, max_rounds, model, inlier_mask); + } +}; -} // namespace alvar +} // namespace alvar -#endif //__Ransac_h__ +#endif //__Ransac_h__ diff --git a/ar_track_alvar/include/ar_track_alvar/Rotation.h b/ar_track_alvar/include/ar_track_alvar/Rotation.h index 1798571..ab5c509 100644 --- a/ar_track_alvar/include/ar_track_alvar/Rotation.h +++ b/ar_track_alvar/include/ar_track_alvar/Rotation.h @@ -34,190 +34,199 @@ #include "Alvar.h" #include "Util.h" -#include "opencv2/core/types_c.h" - -namespace alvar { +namespace alvar +{ /** - * \brief \e Rotation structure and transformations between different parameterizations. + * \brief \e Rotation structure and transformations between different + * parameterizations. */ class ALVAR_EXPORT Rotation { - -public: - CvMat quaternion_mat; - double quaternion[4]; - - /** - * \brief Rotation can be represented in four ways: quaternion (QUAT), matrix (MAT), euler angles (EUL) and exponential map (ROD). - */ - enum RotationType {QUAT, MAT, EUL, ROD}; - - Rotation(); - Rotation(const Rotation& r); - - /** - * \brief Constructor. - * \param data Rotation data stored in CvMat. With RotationType::MAT both 3x3 and 4x4 matrices are allowed. - * \param t Rotation type that corresponds to data. - */ - Rotation(CvMat *data, RotationType t); - - Rotation& operator = (const Rotation& p); - Rotation& operator += (const Rotation& v); - //Rotation& operator + () {return *this;} - - void Transpose(); - - /** - * \brief Simple function to mirror a rotation matrix in different directions. - * \param mat Matrix to be mirrored. - * \param x - * \param y - * \param z - */ - static void MirrorMat(CvMat *mat, bool x, bool y, bool z); - - /** - * \brief Mirrors the rotation in selected directions. - * \param x - * \param y - * \param z - */ - void Mirror(bool x, bool y, bool z); - - /** - * \brief Resets the rotation into identity. - */ - void Reset(); - - /** - * \brief Converts 3x3 rotation matrix into Rodriques representation. - * \param mat 3x3 rotation matrix. - * \param rod Resulting 3x1 rotation vector. - */ - static void Mat9ToRod(double *mat, double *rod); - - /** - * \brief Converts 3x1 rotation vector into 3x3 rotation matrix using Rodriques' formula. - * \param rod 3x1 rotation vector. - * \param Resulting 3x3 rotation matrix. - */ - static void RodToMat9(double *rod, double *mat); - - /** - * \brief Inverts unit quaternion. - * \param q Unit quaternion to be inverted. - * \param qi Resulting quaternion. - */ - static void QuatInv(const double *q, double *qi); - - /** - * \brief Normalizes a quaternion. - * \param q Quaternion to be normalized. - */ - static void QuatNorm(double *q); - - /** - * \brief Quaternion multiplication. - * \param q1 - * \param q2 - * \param q3 Resulting quaternion. - */ - static void QuatMul(const double *q1, const double *q2, double *q3); - - //% The quaternion has to be normalized!!! - /** - * \brief Converts a rotation described by a quaternion into 3x3 rotation matrix. - * \param quat Rotation in quaternion form. - * \param mat Corresponding 3x3 rotation matrix. - */ - static void QuatToMat9(const double *quat, double *mat); - - // TODO: Now we don't want to eliminate the translation part from the matrix here. Did this change break something??? - /** - * \brief Converts a rotation described by a quaternion into 4x4 OpenGL-like transformation matrix. The translation part is not altered. - * \param quat Rotation in quaternion form. - * \param mat Resulting 4x4 transformation matrix. - */ - static void QuatToMat16(const double *quat, double *mat); - - /** - * \brief Converts a rotation described by a quaternion into Euler angles. - * \param q Rotation in quaternion form. - * \param eul Resulting Euler angles. - */ - static void QuatToEul(const double *q, double *eul); - - /** - * \brief Converts a 3x3 rotation martix into quaternion form. - * \param mat 3x3 rotation matrix. - * \param quat Resulting quaternion. - */ - static void Mat9ToQuat(const double *mat, double *quat); - - /** - * \brief Converts a rotation described by Euler angles into quaternion form. - * \param eul Rotation in Euler angles. - * \param quat Resulting quaternion. - */ - static void EulToQuat(const double *eul, double *quat); - - /** - * \brief Sets the rotation from given quaternion. - * \param mat Input quaternion (4x1 CvMat). - */ - void SetQuaternion(CvMat *mat); - - /** - * \brief Sets the rotation from given quaternion. - * \param mat Input quaternion (4x1 double array). - */ - void SetQuaternion(const double *quat); - - /** - * \brief Sets the rotation from given Euler angles. - * \param mat Input Euler angles (3x1 CvMat). - */ - void SetEuler(const CvMat *mat); - - /** - * \brief Sets the rotation from given rotation vector. - * \param mat Input rotation vector (3x1 CvMat). - */ - void SetRodriques(const CvMat *mat); - - /** - * \brief Sets the rotation from given rotation matrix. 3x3 and 4x4 matrices are allowed. - * \param mat Input rotation matrix (3x3 or 4x4 CvMat). - */ - void SetMatrix(const CvMat *mat); - - /** - * \brief Returns the rotation in matrix form. 3x3 and 4x4 matrices are allowed. - * \param mat The rotation is stored here. - */ - void GetMatrix(CvMat *mat) const; - - /** - * \brief Returns the rotation in rotation vector form. - * \param mat The rotation is stored here (3x1 CvMat). - */ - void GetRodriques(CvMat *mat) const; - - /** - * \brief Returns the rotation in Euler angles. - * \param mat The rotation is stored here (3x1 CvMat). - */ - void GetEuler(CvMat *mat) const; - - /** - * \brief Returns the rotation in quaternion form. - * \param mat The rotation is stored here (4x1 CvMat). - */ - void GetQuaternion(CvMat *mat) const; +public: + cv::Mat quaternion_mat; + double quaternion[4]; + + /** + * \brief Rotation can be represented in four ways: quaternion (QUAT), matrix + * (MAT), euler angles (EUL) and exponential map (ROD). + */ + enum RotationType + { + QUAT, + MAT, + EUL, + ROD + }; + + Rotation(); + Rotation(const Rotation& r); + + /** + * \brief Constructor. + * \param data Rotation data stored in cv::Mat. With RotationType::MAT both + * 3x3 and 4x4 matrices are allowed. \param t Rotation type that corresponds + * to data. + */ + Rotation(const cv::Mat& data, RotationType t); + + Rotation& operator=(const Rotation& p); + Rotation& operator+=(const Rotation& v); + // Rotation& operator + () {return *this;} + + void Transpose(); + + /** + * \brief Simple function to mirror a rotation matrix in different directions. + * \param mat Matrix to be mirrored. + * \param x + * \param y + * \param z + */ + static void MirrorMat(cv::Mat& mat, bool x, bool y, bool z); + + /** + * \brief Mirrors the rotation in selected directions. + * \param x + * \param y + * \param z + */ + void Mirror(bool x, bool y, bool z); + + /** + * \brief Resets the rotation into identity. + */ + void Reset(); + + /** + * \brief Converts 3x3 rotation matrix into Rodriques representation. + * \param mat 3x3 rotation matrix. + * \param rod Resulting 3x1 rotation vector. + */ + static void Mat9ToRod(double* mat, double* rod); + + /** + * \brief Converts 3x1 rotation vector into 3x3 rotation matrix using + * Rodriques' formula. \param rod 3x1 rotation vector. \param Resulting 3x3 + * rotation matrix. + */ + static void RodToMat9(const double* rod, double* mat); + + /** + * \brief Inverts unit quaternion. + * \param q Unit quaternion to be inverted. + * \param qi Resulting quaternion. + */ + static void QuatInv(const double* q, double* qi); + + /** + * \brief Normalizes a quaternion. + * \param q Quaternion to be normalized. + */ + static void QuatNorm(double* q); + + /** + * \brief Quaternion multiplication. + * \param q1 + * \param q2 + * \param q3 Resulting quaternion. + */ + static void QuatMul(const double* q1, const double* q2, double* q3); + + //% The quaternion has to be normalized!!! + /** + * \brief Converts a rotation described by a quaternion into 3x3 rotation + * matrix. \param quat Rotation in quaternion form. \param mat Corresponding + * 3x3 rotation matrix. + */ + static void QuatToMat9(const double* quat, double* mat); + + // TODO: Now we don't want to eliminate the translation part from the matrix + // here. Did this change break something??? + /** + * \brief Converts a rotation described by a quaternion into 4x4 OpenGL-like + * transformation matrix. The translation part is not altered. \param quat + * Rotation in quaternion form. \param mat Resulting 4x4 transformation + * matrix. + */ + static void QuatToMat16(const double* quat, double* mat); + + /** + * \brief Converts a rotation described by a quaternion into Euler angles. + * \param q Rotation in quaternion form. + * \param eul Resulting Euler angles. + */ + static void QuatToEul(const double* q, double* eul); + + /** + * \brief Converts a 3x3 rotation martix into quaternion form. + * \param mat 3x3 rotation matrix. + * \param quat Resulting quaternion. + */ + static void Mat9ToQuat(const double* mat, double* quat); + + /** + * \brief Converts a rotation described by Euler angles into quaternion form. + * \param eul Rotation in Euler angles. + * \param quat Resulting quaternion. + */ + static void EulToQuat(const double* eul, double* quat); + + /** + * \brief Sets the rotation from given quaternion. + * \param mat Input quaternion (4x1 cv::Mat). + */ + void SetQuaternion(const cv::Mat& mat); + + /** + * \brief Sets the rotation from given quaternion. + * \param mat Input quaternion (4x1 double array). + */ + void SetQuaternion(const double* quat); + + /** + * \brief Sets the rotation from given Euler angles. + * \param mat Input Euler angles (3x1 cv::Mat). + */ + void SetEuler(const cv::Mat& mat); + + /** + * \brief Sets the rotation from given rotation vector. + * \param mat Input rotation vector (3x1 cv::Mat). + */ + void SetRodriques(const cv::Mat& mat); + + /** + * \brief Sets the rotation from given rotation matrix. 3x3 and 4x4 matrices + * are allowed. \param mat Input rotation matrix (3x3 or 4x4 cv::Mat). + */ + void SetMatrix(const cv::Mat& mat); + + /** + * \brief Returns the rotation in matrix form. 3x3 and 4x4 matrices are + * allowed. \param mat The rotation is stored here. + */ + void GetMatrix(cv::Mat& mat) const; + + /** + * \brief Returns the rotation in rotation vector form. + * \param mat The rotation is stored here (3x1 cv::Mat). + */ + void GetRodriques(cv::Mat& mat) const; + + /** + * \brief Returns the rotation in Euler angles. + * \param mat The rotation is stored here (3x1 cv::Mat). + */ + void GetEuler(cv::Mat& mat) const; + + /** + * \brief Returns the rotation in quaternion form. + * \param mat The rotation is stored here (4x1 cv::Mat). + */ + void GetQuaternion(cv::Mat& mat) const; }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/SfM.h b/ar_track_alvar/include/ar_track_alvar/SfM.h index f36fa62..47808ec 100644 --- a/ar_track_alvar/include/ar_track_alvar/SfM.h +++ b/ar_track_alvar/include/ar_track_alvar/SfM.h @@ -32,92 +32,121 @@ #include "EC.h" -namespace alvar { - -/** \brief Simple structure from motion implementation using \e CameraEC , \e MarkerDetectorEC and \e TrackerFeaturesEC +namespace alvar +{ +/** \brief Simple structure from motion implementation using \e CameraEC , \e + * MarkerDetectorEC and \e TrackerFeaturesEC * * See \e SamplePointcloud for usage example. */ -class ALVAR_EXPORT SimpleSfM { +class ALVAR_EXPORT SimpleSfM +{ public: - /** \brief Extended version of \e ExternalContainer structure used internally in \e SimpleSfM */ - class Feature : public ExternalContainer { - public: - bool has_stored_pose; - Pose pose1; - CvPoint2D32f p2d1; - CvPoint3D32f p3d_sh; - CvPoint2D32f projected_p2d_sh; - int tri_cntr; // This is needed only by UpdateTriangulateOnly - int estimation_type; + /** \brief Extended version of \e ExternalContainer structure used internally + * in \e SimpleSfM */ + class Feature : public ExternalContainer + { + public: + bool has_stored_pose; + Pose pose1; + CvPoint2D32f p2d1; + CvPoint3D32f p3d_sh; + CvPoint2D32f projected_p2d_sh; + int tri_cntr; // This is needed only by UpdateTriangulateOnly + int estimation_type; - Feature() : ExternalContainer() { - has_stored_pose = false; - p3d_sh.x = 0.f; p3d_sh.y = 0.f; p3d_sh.z = 0.f; - tri_cntr = 0; - estimation_type = 0; - } - Feature(const Feature &c) : ExternalContainer(c) { - has_stored_pose = c.has_stored_pose; - pose1 = c.pose1; - p2d1 = c.p2d1; - p3d_sh = c.p3d_sh; - projected_p2d_sh = c.projected_p2d_sh; - tri_cntr = c.tri_cntr; - estimation_type = c.estimation_type; - } - }; - /** \brief The map of all tracked features */ - std::map container; - std::map container_triangulated; + Feature() : ExternalContainer() + { + has_stored_pose = false; + p3d_sh.x = 0.f; + p3d_sh.y = 0.f; + p3d_sh.z = 0.f; + tri_cntr = 0; + estimation_type = 0; + } + Feature(const Feature& c) : ExternalContainer(c) + { + has_stored_pose = c.has_stored_pose; + pose1 = c.pose1; + p2d1 = c.p2d1; + p3d_sh = c.p3d_sh; + projected_p2d_sh = c.projected_p2d_sh; + tri_cntr = c.tri_cntr; + estimation_type = c.estimation_type; + } + }; + /** \brief The map of all tracked features */ + std::map container; + std::map container_triangulated; protected: - std::map container_reset_point; - std::map container_triangulated_reset_point; + std::map container_reset_point; + std::map container_triangulated_reset_point; - CameraEC cam; - MarkerDetectorEC marker_detector; - TrackerFeaturesEC tf; - Pose pose; - bool pose_ok; + CameraEC cam; + MarkerDetectorEC marker_detector; + TrackerFeaturesEC tf; + Pose pose; + bool pose_ok; - bool update_tri; - std::string multi_marker_file; - bool markers_found; - double scale; - Pose pose_original; - Pose pose_difference; + bool update_tri; + std::string multi_marker_file; + bool markers_found; + double scale; + Pose pose_original; + Pose pose_difference; public: - /** \brief Constructor */ - SimpleSfM() : tf(200, 200, 0.01, 20, 4, 6), pose_ok(false), update_tri(false), markers_found(false) {} - /** \brief Reset the situation back to the point it was when \e SetResetPoint was called */ - void Reset(bool reset_also_triangulated = true); - /** \brief Remember the current state and return here when the \e Reset is called */ - void SetResetPoint(); - /** \brief Clear all tracked features */ - void Clear(); - /** \brief Set the suitable scale to be used. This affects quite much how the tracking behaves (when features are triangulated etc.) */ - void SetScale(double s) { scale = s; } - /** \brief Get the camera used internally. You need to use this to set correct camera calibration (see \e SamplePointcloud) */ - CameraEC *GetCamera(); - /** \brief Get the estimated pose */ - Pose *GetPose(); - /** \brief Add \e MultiMarker from file as a basis for tracking. It is good idea to call \e SetResetPoint after these. */ - bool AddMultiMarker(const char *fname, FILE_FORMAT format = FILE_FORMAT_XML); - /** \brief Add \e MultiMarker to be a basis for tracking. It is good idea to call \e SetResetPoint after these. */ - bool AddMultiMarker(MultiMarkerEC *mm); - /** \brief Add an marker to be a basis for tracking. It is good idea to call \e SetResetPoint after these. */ - void AddMarker(int marker_id, double edge_length, Pose &pose); - /** \brief Update position assuming that camera is moving with 6-DOF */ - bool Update(IplImage *image, bool assume_plane=true, bool triangulate=true, float reproj_err_limit=5.f, float triangulate_angle=15.f); - /** \brief Update camera 6-DOF position using triangulated points only (This is the old version of Update) */ - bool UpdateTriangulateOnly(IplImage *image); - /** \brief Update position assuming that user is more standing still and viewing the environment. */ - bool UpdateRotationsOnly(IplImage *image); - /** \brief Draw debug information about the tracked features and detected markers. */ - void Draw(IplImage *rgba); + /** \brief Constructor */ + SimpleSfM() + : tf(200, 200, 0.01, 20, 4, 6) + , pose_ok(false) + , update_tri(false) + , markers_found(false) + { + } + /** \brief Reset the situation back to the point it was when \e SetResetPoint + * was called */ + void Reset(bool reset_also_triangulated = true); + /** \brief Remember the current state and return here when the \e Reset is + * called */ + void SetResetPoint(); + /** \brief Clear all tracked features */ + void Clear(); + /** \brief Set the suitable scale to be used. This affects quite much how the + * tracking behaves (when features are triangulated etc.) */ + void SetScale(double s) + { + scale = s; + } + /** \brief Get the camera used internally. You need to use this to set correct + * camera calibration (see \e SamplePointcloud) */ + CameraEC* GetCamera(); + /** \brief Get the estimated pose */ + Pose* GetPose(); + /** \brief Add \e MultiMarker from file as a basis for tracking. It is good + * idea to call \e SetResetPoint after these. */ + bool AddMultiMarker(const char* fname, FILE_FORMAT format = FILE_FORMAT_XML); + /** \brief Add \e MultiMarker to be a basis for tracking. It is good idea to + * call \e SetResetPoint after these. */ + bool AddMultiMarker(MultiMarkerEC* mm); + /** \brief Add an marker to be a basis for tracking. It is good idea to call + * \e SetResetPoint after these. */ + void AddMarker(int marker_id, double edge_length, Pose& pose); + /** \brief Update position assuming that camera is moving with 6-DOF */ + bool Update(IplImage* image, bool assume_plane = true, + bool triangulate = true, float reproj_err_limit = 5.f, + float triangulate_angle = 15.f); + /** \brief Update camera 6-DOF position using triangulated points only (This + * is the old version of Update) */ + bool UpdateTriangulateOnly(IplImage* image); + /** \brief Update position assuming that user is more standing still and + * viewing the environment. */ + bool UpdateRotationsOnly(IplImage* image); + /** \brief Draw debug information about the tracked features and detected + * markers. */ + void Draw(IplImage* rgba); }; -} +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/Shared.h b/ar_track_alvar/include/ar_track_alvar/Shared.h old mode 100755 new mode 100644 index 34335da..0a28bb1 --- a/ar_track_alvar/include/ar_track_alvar/Shared.h +++ b/ar_track_alvar/include/ar_track_alvar/Shared.h @@ -5,47 +5,56 @@ using namespace alvar; -void outputEnumeratedPlugins(CaptureFactory::CapturePluginVector &plugins) +void outputEnumeratedPlugins(CaptureFactory::CapturePluginVector& plugins) { - for (int i = 0; i < (int)plugins.size(); ++i) { - if (i != 0) { - std::cout << ", "; - } - std::cout << plugins.at(i); + for (int i = 0; i < (int)plugins.size(); ++i) + { + if (i != 0) + { + std::cout << ", "; } + std::cout << plugins.at(i); + } - std::cout << std::endl; + std::cout << std::endl; } -void outputEnumeratedDevices(CaptureFactory::CaptureDeviceVector &devices, int selectedDevice) +void outputEnumeratedDevices(CaptureFactory::CaptureDeviceVector& devices, + int selectedDevice) { - for (int i = 0; i < (int)devices.size(); ++i) { - if (selectedDevice == i) { - std::cout << "* "; - } - else { - std::cout << " "; - } - - std::cout << i << ": " << devices.at(i).uniqueName(); + for (int i = 0; i < (int)devices.size(); ++i) + { + if (selectedDevice == i) + { + std::cout << "* "; + } + else + { + std::cout << " "; + } - if (devices[i].description().length() > 0) { - std::cout << ", " << devices.at(i).description(); - } + std::cout << i << ": " << devices.at(i).uniqueName(); - std::cout << std::endl; + if (devices[i].description().length() > 0) + { + std::cout << ", " << devices.at(i).description(); } + + std::cout << std::endl; + } } -int defaultDevice(CaptureFactory::CaptureDeviceVector &devices) +int defaultDevice(CaptureFactory::CaptureDeviceVector& devices) { - for (int i = 0; i < (int)devices.size(); ++i) { - if (devices.at(i).captureType() == "highgui") { - return i; - } + for (int i = 0; i < (int)devices.size(); ++i) + { + if (devices.at(i).captureType() == "highgui") + { + return i; } + } - return 0; + return 0; } #endif diff --git a/ar_track_alvar/include/ar_track_alvar/Threads.h b/ar_track_alvar/include/ar_track_alvar/Threads.h index 376a24e..1d330ca 100644 --- a/ar_track_alvar/include/ar_track_alvar/Threads.h +++ b/ar_track_alvar/include/ar_track_alvar/Threads.h @@ -32,8 +32,8 @@ #include "Alvar.h" -namespace alvar { - +namespace alvar +{ class ThreadsPrivate; /** @@ -44,28 +44,28 @@ class ThreadsPrivate; class ALVAR_EXPORT Threads { public: - /** - * \brief Constructor. - */ - Threads(); + /** + * \brief Constructor. + */ + Threads(); - /** - * \brief Destructor. - */ - ~Threads(); + /** + * \brief Destructor. + */ + ~Threads(); - /** - * \brief Creates a new thread and returns true on success. - * - * \param method The method that the thread will execute. - * \param parameters The parameters sent to the method. - */ - bool create(void *(*method)(void *), void *parameters); + /** + * \brief Creates a new thread and returns true on success. + * + * \param method The method that the thread will execute. + * \param parameters The parameters sent to the method. + */ + bool create(void* (*method)(void*), void* parameters); private: - ThreadsPrivate *d; + ThreadsPrivate* d; }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/Threads_private.h b/ar_track_alvar/include/ar_track_alvar/Threads_private.h index 11bae01..e2c0509 100644 --- a/ar_track_alvar/include/ar_track_alvar/Threads_private.h +++ b/ar_track_alvar/include/ar_track_alvar/Threads_private.h @@ -24,20 +24,20 @@ #ifndef THREADS_PRIVATE_H #define THREADS_PRIVATE_H -namespace alvar { - +namespace alvar +{ class ThreadsPrivateData; class ThreadsPrivate { public: - ThreadsPrivate(); - ~ThreadsPrivate(); - bool create(void *(*method)(void *), void *parameters); + ThreadsPrivate(); + ~ThreadsPrivate(); + bool create(void* (*method)(void*), void* parameters); - ThreadsPrivateData *d; + ThreadsPrivateData* d; }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/Timer.h b/ar_track_alvar/include/ar_track_alvar/Timer.h index d7c145f..0398079 100644 --- a/ar_track_alvar/include/ar_track_alvar/Timer.h +++ b/ar_track_alvar/include/ar_track_alvar/Timer.h @@ -32,8 +32,8 @@ #include "Alvar.h" -namespace alvar { - +namespace alvar +{ class TimerPrivate; /** @@ -44,30 +44,30 @@ class TimerPrivate; class ALVAR_EXPORT Timer { public: - /** - * \brief Constructor. - */ - Timer(); + /** + * \brief Constructor. + */ + Timer(); - /** - * \brief Destructor. - */ - ~Timer(); + /** + * \brief Destructor. + */ + ~Timer(); - /** - * \brief Starts the timer. - */ - void start(); + /** + * \brief Starts the timer. + */ + void start(); - /** - * \brief Stops the timer and returns the elapsed time in seconds. - */ - double stop(); + /** + * \brief Stops the timer and returns the elapsed time in seconds. + */ + double stop(); private: - TimerPrivate *d; + TimerPrivate* d; }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/Timer_private.h b/ar_track_alvar/include/ar_track_alvar/Timer_private.h index 5874b62..bb87cfd 100644 --- a/ar_track_alvar/include/ar_track_alvar/Timer_private.h +++ b/ar_track_alvar/include/ar_track_alvar/Timer_private.h @@ -24,21 +24,21 @@ #ifndef TIMER_PRIVATE_H #define TIMER_PRIVATE_H -namespace alvar { - +namespace alvar +{ class TimerPrivateData; class TimerPrivate { public: - TimerPrivate(); - ~TimerPrivate(); - void start(); - double stop(); + TimerPrivate(); + ~TimerPrivate(); + void start(); + double stop(); - TimerPrivateData *d; + TimerPrivateData* d; }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/Tracker.h b/ar_track_alvar/include/ar_track_alvar/Tracker.h index 50829b1..92abfe2 100644 --- a/ar_track_alvar/include/ar_track_alvar/Tracker.h +++ b/ar_track_alvar/include/ar_track_alvar/Tracker.h @@ -33,22 +33,29 @@ #include #include "Alvar.h" -namespace alvar { - +namespace alvar +{ /** * \brief Pure virtual base class for tracking optical flow * - * The idea is to make own versions of \e Track method which updates the class member variables accordingly + * The idea is to make own versions of \e Track method which updates the class + * member variables accordingly */ -class ALVAR_EXPORT Tracker { +class ALVAR_EXPORT Tracker +{ public: - Tracker() {} - /** \brief Pure virtual function for making the next track step. This analyses the image and updates class member variables accordingly */ - virtual double Track(IplImage *img) = 0; - - virtual void Compensate(double *x, double *y) {} + Tracker() + { + } + /** \brief Pure virtual function for making the next track step. This analyses + * the image and updates class member variables accordingly */ + virtual double Track(IplImage* img) = 0; + + virtual void Compensate(double* x, double* y) + { + } }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/TrackerFeatures.h b/ar_track_alvar/include/ar_track_alvar/TrackerFeatures.h index 106f2c9..6cbcb09 100644 --- a/ar_track_alvar/include/ar_track_alvar/TrackerFeatures.h +++ b/ar_track_alvar/include/ar_track_alvar/TrackerFeatures.h @@ -32,85 +32,102 @@ #include "Tracker.h" -namespace alvar { - +namespace alvar +{ /** - * \brief \e TrackerFeatures tracks features using OpenCV's cvGoodFeaturesToTrack and cvCalcOpticalFlowPyrLK + * \brief \e TrackerFeatures tracks features using OpenCV's + * cvGoodFeaturesToTrack and cvCalcOpticalFlowPyrLK */ -class ALVAR_EXPORT TrackerFeatures : public Tracker { +class ALVAR_EXPORT TrackerFeatures : public Tracker +{ protected: - int x_res, y_res; - int frame_count; - double quality_level; - double min_distance; - int min_features; - int max_features; - char *status; - IplImage *img_eig; - IplImage *img_tmp; - IplImage *gray; - IplImage *prev_gray; - IplImage *pyramid; - IplImage *prev_pyramid; - IplImage *mask; - int next_id; - int win_size; - int pyr_levels; + int x_res, y_res; + int frame_count; + double quality_level; + double min_distance; + int min_features; + int max_features; + char* status; + IplImage* img_eig; + IplImage* img_tmp; + IplImage* gray; + IplImage* prev_gray; + IplImage* pyramid; + IplImage* prev_pyramid; + IplImage* mask; + int next_id; + int win_size; + int pyr_levels; + + /** \brief Reset track features on specified mask area */ + double TrackHid(IplImage* img, IplImage* mask = NULL, + bool add_features = true); - /** \brief Reset track features on specified mask area */ - double TrackHid(IplImage *img, IplImage *mask=NULL, bool add_features=true); public: - /** \brief \e Track result: previous features */ - CvPoint2D32f *prev_features; - /** \brief \e Track result: current features */ - CvPoint2D32f *features; - /** \brief \e Track result: count of previous features */ - int prev_feature_count; - /** \brief \e Track result: count of current features */ - int feature_count; - /** \brief \e Track result: ID:s for previous features */ - int *prev_ids; - /** \brief \e Track result: ID:s for current features */ - int *ids; - /** - * \brief Constructor for \e TrackerFeatures tracks features using OpenCV's cvGoodFeaturesToTrack and cvCalcOpticalFlowPyrLK - * \param _max_features The maximum amount of features to be tracked - * \param _min_features The minimum amount of features. The featureset is filled up when the number of features is lower than this. - * \param _quality_level Multiplier for the maxmin eigenvalue; specifies minimal accepted quality of image corners. - * \param _min_distance Limit, specifying minimum possible distance between returned corners; Euclidian distance is used. - * If 0 given we use default value aiming for uniform cover: _min_distance = 0.8*sqrt(x_res*y_res/max_features)) - * \param _pyr_levels Number of pyramid levels - */ - TrackerFeatures(int _max_features=100, int _min_features=90, double _quality_level=0.01, double _min_distance=10, int _pyr_levels=1, int _win_size=3); - /** \brief Destructor */ - ~TrackerFeatures(); - /** \brief Change settings while running */ - void ChangeSettings(int _max_features=100, int _min_features=90, double _quality_level=0.01, double _min_distance=10); - /** \brief Reset */ - void Reset(); - /** \brief Reset track features on specified mask area */ - double Reset(IplImage *img, IplImage *mask); - /** \brief Stop tracking the identified feature (with index for features array)*/ - bool DelFeature(int index); - /** \brief Stop tracking the identified feature (with feature id) */ - bool DelFeatureId(int id); - /** \brief Track features */ - double Track(IplImage *img) { return Track(img, true); } - /** \brief Track features */ - double Track(IplImage *img, bool add_features); - /** \brief Track features */ - double Track(IplImage* img, IplImage* mask); - /** \brief add features to the previously tracked frame if there are less than min_features */ - int AddFeatures(IplImage *mask=NULL); - /** \brief Create and get the pointer to new_features_mask */ - IplImage *NewFeatureMask(); - /** \brief Purge features that are considerably closer than the defined min_distance. - * - * Note, that we always try to maintain the smaller id's assuming that they are older ones - */ - int Purge(); + /** \brief \e Track result: previous features */ + CvPoint2D32f* prev_features; + /** \brief \e Track result: current features */ + CvPoint2D32f* features; + /** \brief \e Track result: count of previous features */ + int prev_feature_count; + /** \brief \e Track result: count of current features */ + int feature_count; + /** \brief \e Track result: ID:s for previous features */ + int* prev_ids; + /** \brief \e Track result: ID:s for current features */ + int* ids; + /** + * \brief Constructor for \e TrackerFeatures tracks features using OpenCV's + * cvGoodFeaturesToTrack and cvCalcOpticalFlowPyrLK \param _max_features The + * maximum amount of features to be tracked \param _min_features The minimum + * amount of features. The featureset is filled up when the number of features + * is lower than this. \param _quality_level Multiplier for the maxmin + * eigenvalue; specifies minimal accepted quality of image corners. \param + * _min_distance Limit, specifying minimum possible distance between returned + * corners; Euclidian distance is used. If 0 given we use default value aiming + * for uniform cover: _min_distance = 0.8*sqrt(x_res*y_res/max_features)) + * \param _pyr_levels Number of pyramid levels + */ + TrackerFeatures(int _max_features = 100, int _min_features = 90, + double _quality_level = 0.01, double _min_distance = 10, + int _pyr_levels = 1, int _win_size = 3); + /** \brief Destructor */ + ~TrackerFeatures(); + /** \brief Change settings while running */ + void ChangeSettings(int _max_features = 100, int _min_features = 90, + double _quality_level = 0.01, double _min_distance = 10); + /** \brief Reset */ + void Reset(); + /** \brief Reset track features on specified mask area */ + double Reset(IplImage* img, IplImage* mask); + /** \brief Stop tracking the identified feature (with index for features + * array)*/ + bool DelFeature(int index); + /** \brief Stop tracking the identified feature (with feature id) */ + bool DelFeatureId(int id); + /** \brief Track features */ + double Track(IplImage* img) + { + return Track(img, true); + } + /** \brief Track features */ + double Track(IplImage* img, bool add_features); + /** \brief Track features */ + double Track(IplImage* img, IplImage* mask); + /** \brief add features to the previously tracked frame if there are less than + * min_features */ + int AddFeatures(IplImage* mask = NULL); + /** \brief Create and get the pointer to new_features_mask */ + IplImage* NewFeatureMask(); + /** \brief Purge features that are considerably closer than the defined + * min_distance. + * + * Note, that we always try to maintain the smaller id's assuming that they + * are older ones + */ + int Purge(); }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/TrackerOrientation.h b/ar_track_alvar/include/ar_track_alvar/TrackerOrientation.h index 31a5a8b..4a39c67 100644 --- a/ar_track_alvar/include/ar_track_alvar/TrackerOrientation.h +++ b/ar_track_alvar/include/ar_track_alvar/TrackerOrientation.h @@ -35,85 +35,96 @@ * \brief This file implements an orientation tracker. */ -namespace alvar { - +namespace alvar +{ /** * \brief Track orientation based only on features in the image plane. */ -class ALVAR_EXPORT TrackerOrientation : public Tracker { - +class ALVAR_EXPORT TrackerOrientation : public Tracker +{ public: - TrackerOrientation(int width, int height, int image_scale=1, int outlier_limit=20) - : _image_scale(image_scale) - , _outlier_limit(outlier_limit) - , _xres(width) - , _yres(height) - { - _camera = 0; - _grsc = 0; - _object_model = 0; - } + TrackerOrientation(int width, int height, int image_scale = 1, + int outlier_limit = 20) + : _image_scale(image_scale) + , _outlier_limit(outlier_limit) + , _xres(width) + , _yres(height) + { + _camera = 0; + _grsc = 0; + _object_model = 0; + } private: - - struct Feature - { - enum {NOT_TRACKED=0, IS_TRACKED} status2D; - enum {NONE=0, USE_FOR_POSE, IS_OUTLIER, IS_INITIAL} status3D; - - Feature() - { - status3D = NONE; - status2D = NOT_TRACKED; - } - - Feature(double vx, double vy) - { - point.x = float(vx); - point.y = float(vy); - status3D = NONE; - status2D = NOT_TRACKED; - } - - ~Feature() {} - - CvPoint2D32f point; - CvPoint3D64f point3d; - }; - - TrackerFeatures _ft; - std::map _F_v; - - int _xres; - int _yres; - int _image_scale; - int _outlier_limit; - - Pose _pose; - IplImage *_grsc; - Camera *_camera; - CvMat *_object_model; + struct Feature + { + enum + { + NOT_TRACKED = 0, + IS_TRACKED + } status2D; + enum + { + NONE = 0, + USE_FOR_POSE, + IS_OUTLIER, + IS_INITIAL + } status3D; + + Feature() + { + status3D = NONE; + status2D = NOT_TRACKED; + } + + Feature(double vx, double vy) + { + point.x = float(vx); + point.y = float(vy); + status3D = NONE; + status2D = NOT_TRACKED; + } + + ~Feature() + { + } + + CvPoint2D32f point; + CvPoint3D64f point3d; + }; + + TrackerFeatures _ft; + std::map _F_v; + + int _xres; + int _yres; + int _image_scale; + int _outlier_limit; + + Pose _pose; + IplImage* _grsc; + Camera* _camera; + CvMat* _object_model; public: - void SetCamera(Camera *camera) { - _camera = camera; - } - void GetPose(Pose &pose); - void GetPose(double gl_mat[16]) { - _pose.GetMatrixGL(gl_mat); - } - void Reset(); - double Track(IplImage *image); + void SetCamera(Camera* camera) + { + _camera = camera; + } + void GetPose(Pose& pose); + void GetPose(double gl_mat[16]) + { + _pose.GetMatrixGL(gl_mat); + } + void Reset(); + double Track(IplImage* image); private: - static void Project(CvMat* state, CvMat* projection, void *param); - bool UpdatePose(IplImage* image=0); - bool UpdateRotationOnly(IplImage *gray, IplImage *image=0); - + static void Project(CvMat* state, CvMat* projection, void* param); + bool UpdatePose(IplImage* image = 0); + bool UpdateRotationOnly(IplImage* gray, IplImage* image = 0); }; -} // namespace alvar +} // namespace alvar #endif - - diff --git a/ar_track_alvar/include/ar_track_alvar/TrackerPsa.h b/ar_track_alvar/include/ar_track_alvar/TrackerPsa.h index 4b47574..6ecfe5e 100644 --- a/ar_track_alvar/include/ar_track_alvar/TrackerPsa.h +++ b/ar_track_alvar/include/ar_track_alvar/TrackerPsa.h @@ -32,59 +32,61 @@ * \brief This file implements a PSA tracker. */ -namespace alvar { - +namespace alvar +{ /** - * \brief \e TrackerPsa implements a very simple PSA tracker + * \brief \e TrackerPsa implements a very simple PSA tracker * - * (See Drab, Stephan A. & Artner, Nicole M. "Motion Detection as Interaction Technique for Games & Applications on Mobile Devices" PERMID 2005) + * (See Drab, Stephan A. & Artner, Nicole M. "Motion Detection as Interaction + * Technique for Games & Applications on Mobile Devices" PERMID 2005) */ -class ALVAR_EXPORT TrackerPsa : public Tracker { +class ALVAR_EXPORT TrackerPsa : public Tracker +{ protected: - int max_shift; - int x_res, y_res; - long *hor, *horprev; - long *ver, *verprev; - long framecount; + int max_shift; + int x_res, y_res; + long *hor, *horprev; + long *ver, *verprev; + long framecount; public: - /** \brief \e Track result x-translation in pixels */ - double xd; - /** \brief \e Track result y-translation in pixels */ - double yd; - /** \brief Constructor */ - TrackerPsa(int _max_shift = 50); - /** \brief Destructor */ - ~TrackerPsa(); - /** \brief Track using PSA */ - double Track(IplImage *img); + /** \brief \e Track result x-translation in pixels */ + double xd; + /** \brief \e Track result y-translation in pixels */ + double yd; + /** \brief Constructor */ + TrackerPsa(int _max_shift = 50); + /** \brief Destructor */ + ~TrackerPsa(); + /** \brief Track using PSA */ + double Track(IplImage* img); - virtual void Compensate(double *x, double *y); + virtual void Compensate(double* x, double* y); }; /** - * \brief \e TrackerPsaRot implements a slightly extended version of a \e TrackerPsa which can also detect sideways rotation + * \brief \e TrackerPsaRot implements a slightly extended version of a \e + * TrackerPsa which can also detect sideways rotation */ -class ALVAR_EXPORT TrackerPsaRot : public TrackerPsa { +class ALVAR_EXPORT TrackerPsaRot : public TrackerPsa +{ protected: - double *rot, *rotprev; - int *rot_count; + double *rot, *rotprev; + int* rot_count; public: - /** \brief \e Track result rotation in degrees */ - double rotd; - /** \brief Constructor */ - TrackerPsaRot(int _max_shift = 50); - /** \brief Destructor */ - ~TrackerPsaRot(); - /** \brief Track using PSA with rotation*/ - double Track(IplImage *img); + /** \brief \e Track result rotation in degrees */ + double rotd; + /** \brief Constructor */ + TrackerPsaRot(int _max_shift = 50); + /** \brief Destructor */ + ~TrackerPsaRot(); + /** \brief Track using PSA with rotation*/ + double Track(IplImage* img); - virtual void Compensate(double *x, double *y); + virtual void Compensate(double* x, double* y); }; -} // namespace alvar +} // namespace alvar #endif - - diff --git a/ar_track_alvar/include/ar_track_alvar/TrackerStat.h b/ar_track_alvar/include/ar_track_alvar/TrackerStat.h index 53d54d0..fba855a 100644 --- a/ar_track_alvar/include/ar_track_alvar/TrackerStat.h +++ b/ar_track_alvar/include/ar_track_alvar/TrackerStat.h @@ -34,51 +34,55 @@ * \brief This file implements a statistical tracker. */ -namespace alvar { - +namespace alvar +{ /** - * \brief \e TrackerStat deduces the optical flow based on tracked features using Seppo Valli's statistical tracking. + * \brief \e TrackerStat deduces the optical flow based on tracked features + * using Seppo Valli's statistical tracking. */ -class ALVAR_EXPORT TrackerStat : public Tracker { +class ALVAR_EXPORT TrackerStat : public Tracker +{ protected: - TrackerFeatures f; - HistogramSubpixel hist; + TrackerFeatures f; + HistogramSubpixel hist; + public: - /** \brief \e Track result x-translation in pixels */ - double xd; - /** \brief \e Track result y-translation in pixels */ - double yd; - /** \brief Constructor */ - TrackerStat(int binsize=8); - /** \brief Reset */ - void Reset(); - /** - * \brief Translation tracker (the simplest possible) - */ - double Track(IplImage *img); - virtual void Compensate(double *x, double *y); + /** \brief \e Track result x-translation in pixels */ + double xd; + /** \brief \e Track result y-translation in pixels */ + double yd; + /** \brief Constructor */ + TrackerStat(int binsize = 8); + /** \brief Reset */ + void Reset(); + /** + * \brief Translation tracker (the simplest possible) + */ + double Track(IplImage* img); + virtual void Compensate(double* x, double* y); }; /** - * \brief TrackerStatRot implements a slightly extended version of TrackerStat which can also detect sideways rotation. + * \brief TrackerStatRot implements a slightly extended version of TrackerStat + * which can also detect sideways rotation. */ -class ALVAR_EXPORT TrackerStatRot : public TrackerStat { - int x_res, y_res; - HistogramSubpixel hist_rot; +class ALVAR_EXPORT TrackerStatRot : public TrackerStat +{ + int x_res, y_res; + HistogramSubpixel hist_rot; + public: - /** \brief \e Track result rotation in degrees */ - double rotd; - /** \brief Constructor */ - TrackerStatRot(int binsize=8, int binsize_rot=3); - /** - * \brief Translation + rotation tracker - */ - double Track(IplImage *img); - virtual void Compensate(double *x, double *y); + /** \brief \e Track result rotation in degrees */ + double rotd; + /** \brief Constructor */ + TrackerStatRot(int binsize = 8, int binsize_rot = 3); + /** + * \brief Translation + rotation tracker + */ + double Track(IplImage* img); + virtual void Compensate(double* x, double* y); }; -} // namespace alvar +} // namespace alvar #endif - - diff --git a/ar_track_alvar/include/ar_track_alvar/TrifocalTensor.h b/ar_track_alvar/include/ar_track_alvar/TrifocalTensor.h index 5429440..5f16aad 100644 --- a/ar_track_alvar/include/ar_track_alvar/TrifocalTensor.h +++ b/ar_track_alvar/include/ar_track_alvar/TrifocalTensor.h @@ -34,118 +34,122 @@ * \brief This file implements a trifocal tensor. */ -namespace alvar { +namespace alvar +{ +/** + * \brief Trifocal tensor works for three images like a fundamental matrix + * works for two images. + * + * Given three camera poses P0, P1, P2 and a 3D point X, we can calculate a + * trifocal tensor T(P0, P1, P2). The tensor relates projections of X in P0, + * P1 and P2 in such a way that when any two projections are known the third + * projection can be calculated. + * + * This implementation of trifocal tensor assumes that the camera poses + * P0, P1 and P2 are known. When projections of X in P0 and P1 are known + * the projection in P2 can be computed using the tensor. + * + * The current implementation cannot be used to directly compute the tensor + * from point correspondencies alone. The implementation can be used for + * example for optimizing three camera poses when point correspondences are + * known in the three images by minimizing the trifocal 'projection error' + * computed by \e projectError -method. + * + * \code + * Pose P1, P2; // P0 is identity pose. + * CvPoint2D64f proj0, proj1, proj2; // A 3D point projected with poses P0, P1 + * and P2. + * + * TrifocalTensor T(P1, P2); + * CvPoint2D64f test2; + * T.project(proj0, proj1, test2); + * assert(proj2.x == test2.x); + * assert(proj2.y == test2.y); + * assert(proj2.z == test2.z); + * \endcode + */ +class ALVAR_EXPORT TrifocalTensor +{ +private: + double T[3][3][3]; + double projectAxis(const CvPoint2D64f& p0, const CvPoint2D64f& p1, int l); + +public: + TrifocalTensor(); - /** - * \brief Trifocal tensor works for three images like a fundamental matrix - * works for two images. - * - * Given three camera poses P0, P1, P2 and a 3D point X, we can calculate a - * trifocal tensor T(P0, P1, P2). The tensor relates projections of X in P0, - * P1 and P2 in such a way that when any two projections are known the third - * projection can be calculated. + /** \brief Constructs a tensor from identity pose and two other known poses. + * See \e computeTensor. * - * This implementation of trifocal tensor assumes that the camera poses - * P0, P1 and P2 are known. When projections of X in P0 and P1 are known - * the projection in P2 can be computed using the tensor. + * \param P1 The second pose relative to the first pose. + * \param P2 The third pose relative to the first pose. + */ + TrifocalTensor(const Pose& P1, const Pose& P2); + + /** \brief Constructs a tensor from three known poses. + * See \e computeTensor. * - * The current implementation cannot be used to directly compute the tensor - * from point correspondencies alone. The implementation can be used for - * example for optimizing three camera poses when point correspondences are - * known in the three images by minimizing the trifocal 'projection error' - * computed by \e projectError -method. + * \param P0 The first camera pose. + * \param P1 The second pose relative to the first pose. + * \param P2 The third pose relative to the first pose. + */ + TrifocalTensor(const Pose& P0, const Pose& P1, const Pose& P2); + + ~TrifocalTensor(); + + /** \brief Initializes the tensor from identity pose and two other known + * poses. * - * \code - * Pose P1, P2; // P0 is identity pose. - * CvPoint2D64f proj0, proj1, proj2; // A 3D point projected with poses P0, P1 and P2. + * The first pose is identity and the two other poses are relative + * translations/rotation between the first and the second pose and + * between the first and the third pose. * - * TrifocalTensor T(P1, P2); - * CvPoint2D64f test2; - * T.project(proj0, proj1, test2); - * assert(proj2.x == test2.x); - * assert(proj2.y == test2.y); - * assert(proj2.z == test2.z); - * \endcode + * \param P1 The second pose relative to the first pose. + * \param P2 The third pose relative to the first pose. */ - class ALVAR_EXPORT TrifocalTensor { - private: - double T[3][3][3]; - double projectAxis(const CvPoint2D64f &p0, const CvPoint2D64f &p1, int l); - - public: - TrifocalTensor(); + void computeTensor(const Pose& P1, const Pose& P2); - /** \brief Constructs a tensor from identity pose and two other known poses. - * See \e computeTensor. - * - * \param P1 The second pose relative to the first pose. - * \param P2 The third pose relative to the first pose. - */ - TrifocalTensor(const Pose &P1, const Pose &P2); + /** \brief Initializes the tensor from three known poses. + * + * \param P0 The first camera pose. + * \param P1 The second pose relative to the first pose. + * \param P2 The third pose relative to the first pose. + */ + void computeTensor(const Pose& P0, const Pose& P1, const Pose& P2); - /** \brief Constructs a tensor from three known poses. - * See \e computeTensor. - * - * \param P0 The first camera pose. - * \param P1 The second pose relative to the first pose. - * \param P2 The third pose relative to the first pose. - */ - TrifocalTensor(const Pose &P0, const Pose &P1, const Pose &P2); + /** \brief Computes the projection of a point in the third pose. + * + * When we have three images, each a projection of a scene from the three + * different camera poses (identity and the two poses that were used to + * initialize the tensor) and a 2D point correspondence between the first + * and the second images, a position in the third image is computed. + * + * \param p0 2D position in a image projected from the first pose. + * \param p1 2D position in a image projected from the second pose. + * \param p2 Computed 2D position in a image projected form the third pose. + */ + void project(const CvPoint2D64f& p0, const CvPoint2D64f& p1, + CvPoint2D64f& p2); - ~TrifocalTensor(); - - /** \brief Initializes the tensor from identity pose and two other known - * poses. - * - * The first pose is identity and the two other poses are relative - * translations/rotation between the first and the second pose and - * between the first and the third pose. - * - * \param P1 The second pose relative to the first pose. - * \param P2 The third pose relative to the first pose. - */ - void computeTensor(const Pose &P1, const Pose &P2); - - /** \brief Initializes the tensor from three known poses. - * - * \param P0 The first camera pose. - * \param P1 The second pose relative to the first pose. - * \param P2 The third pose relative to the first pose. - */ - void computeTensor(const Pose &P0, const Pose &P1, const Pose &P2); - - /** \brief Computes the projection of a point in the third pose. - * - * When we have three images, each a projection of a scene from the three - * different camera poses (identity and the two poses that were used to - * initialize the tensor) and a 2D point correspondence between the first - * and the second images, a position in the third image is computed. - * - * \param p0 2D position in a image projected from the first pose. - * \param p1 2D position in a image projected from the second pose. - * \param p2 Computed 2D position in a image projected form the third pose. - */ - void project(const CvPoint2D64f &p0, const CvPoint2D64f &p1, CvPoint2D64f &p2); - - /** \brief Computes how much three points differ from the tensor. - * - * When we have three images, each a projection of a scene from the three - * different camera poses (identity and the two poses that were used to - * initialize the tensor) and a 2D point correspondence between the first - * the second and the third images, we compute how well the three projections - * match with the trifocal tensor. - * - * When the third point lies exactly where the tensor projects the first - * two points, the returned error is zero. - * - * \param p0 2D position in a image projected from the first pose. - * \param p1 2D position in a image projected from the second pose. - * \param p2 2D position in a image projected form the third pose. - * \return Squared projection error. - */ - double projectError(const CvPoint2D64f &p0, const CvPoint2D64f &p1, const CvPoint2D64f &p2); - }; + /** \brief Computes how much three points differ from the tensor. + * + * When we have three images, each a projection of a scene from the three + * different camera poses (identity and the two poses that were used to + * initialize the tensor) and a 2D point correspondence between the first + * the second and the third images, we compute how well the three projections + * match with the trifocal tensor. + * + * When the third point lies exactly where the tensor projects the first + * two points, the returned error is zero. + * + * \param p0 2D position in a image projected from the first pose. + * \param p1 2D position in a image projected from the second pose. + * \param p2 2D position in a image projected form the third pose. + * \return Squared projection error. + */ + double projectError(const CvPoint2D64f& p0, const CvPoint2D64f& p1, + const CvPoint2D64f& p2); +}; -} // namespace alvar +} // namespace alvar -#endif // __TRIFOCAL_TENSOR__ +#endif // __TRIFOCAL_TENSOR__ diff --git a/ar_track_alvar/include/ar_track_alvar/Uncopyable.h b/ar_track_alvar/include/ar_track_alvar/Uncopyable.h index ed708c0..2649df7 100644 --- a/ar_track_alvar/include/ar_track_alvar/Uncopyable.h +++ b/ar_track_alvar/include/ar_track_alvar/Uncopyable.h @@ -32,8 +32,8 @@ #include "Alvar.h" -namespace alvar { - +namespace alvar +{ /** * \brief Uncopyable for preventing object copies. * @@ -43,32 +43,32 @@ namespace alvar { class ALVAR_EXPORT Uncopyable { protected: - /** - * \brief Constructor. - */ - Uncopyable() - { - } + /** + * \brief Constructor. + */ + Uncopyable() + { + } - /** - * \brief Destructor. - */ - ~Uncopyable() - { - } + /** + * \brief Destructor. + */ + ~Uncopyable() + { + } private: - /** - * \brief Copy constructor. - */ - Uncopyable(const Uncopyable &uncopyable); + /** + * \brief Copy constructor. + */ + Uncopyable(const Uncopyable& uncopyable); - /** - * \brief Assignment operator. - */ - Uncopyable &operator=(const Uncopyable &uncopyable); + /** + * \brief Assignment operator. + */ + Uncopyable& operator=(const Uncopyable& uncopyable); }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/UnscentedKalman.h b/ar_track_alvar/include/ar_track_alvar/UnscentedKalman.h index 9e953ff..010032c 100644 --- a/ar_track_alvar/include/ar_track_alvar/UnscentedKalman.h +++ b/ar_track_alvar/include/ar_track_alvar/UnscentedKalman.h @@ -33,257 +33,267 @@ * \brief This file implements an unscented Kalman filter. */ -namespace alvar { +namespace alvar +{ +class UnscentedProcess; +class UnscentedObservation; - class UnscentedProcess; - class UnscentedObservation; +/** + * \brief Implementation of unscented kalman filter (UKF) for filtering + * non-linear processes. + * + * See http://www.cs.unc.edu/~welch/kalman/media/pdf/Julier1997_SPIE_KF.pdf + * for more details about UKF. + * + * The UKF estimates a process state (represented by a vector) using + * observations of the process. Observations are some derivate of the process + * state as usually the process state cannot be directly observed. + * + * \e UnscentedProcess models the process by predicting the next filter state + * based on the current filter state. + * + * \e UnscentedObservation models the observation by predicting observation + * results based on the current filter state. + * + * UnscentedKalman holds the estimated process state vector and its covariance + * matrix. The new process state can be estimated using \e predict and \e update + * methods. + * + * The current implementation does not separate process noise elements from + * the process state vector. It is therefore the responsibility of the user + * to include noise terms into process state and state covariance. + * + * \code + * class MyUnscentedProcess : public UnscentedProcess { + * void f(CvMat *state) { // compute new state } + * CvMat *getProcessNoise() { return _noise; } + * } myProcess; + * + * class MyUnscentedObservation : public UnscentedObservation { + * void h(CvMat *z, cvMat *state) { // compute measurement vector z from + * state } CvMat *getObservation() { return _obs; } CvMat *getObservationNoise() + * { return _noise; } } myObservation; + * + * int state_n = NUMBER_OF_ELEMENTS_IN_PROCESS_STATE_VECTOR; + * int obs_n = NUMBER_OF_ELEMENTS_IN_PROCESS_OBSERVATION_VECTOR; + * int state_k = NUMBER_OF_PROCESS_NOISE_ELEMENTS; //TODO: Not supported at + * the moment. + * + * UnscentedKalman ukf(state_n, obs_n, state_k); + * initializeState(ukf.getState(), ukf.getStateCovariance()); + * ukf.initialize(); + * + * while (1) { + * ukf.predict(&myProcess); + * // measure new observation. + * ukf.update(&myObservation); + * CvMat *state = ukf.getState(); + * // unpack state information from the state vector and do something with + * it. + * } + * + * \endcode + */ +class ALVAR_EXPORT UnscentedKalman +{ +private: + int state_n; + int state_k; + int obs_n; + int sigma_n; + bool sigmasUpdated; + double lambda, lambda2; + + CvMat* state; + CvMat* stateCovariance; + CvMat* sqrtStateCovariance; + CvMat* stateD; + CvMat* stateU; + CvMat* stateV; + CvMat* stateTmp; + CvMat* stateDiff; - /** - * \brief Implementation of unscented kalman filter (UKF) for filtering non-linear - * processes. + CvMat* predObs; + CvMat* predObsCovariance; + CvMat* invPredObsCovariance; + CvMat* predObsDiff; + + CvMat* statePredObsCrossCorrelation; + CvMat* kalmanGain; + CvMat* kalmanTmp; + + CvMat** sigma_state; + CvMat** sigma_predObs; + + // possess state mean and co-variance (as a list of sigma points). + // generate sigma points from state mean vector and co-variance matrix. + // compute state mean vector and co-variance matrix from sigma points. + + // predict: + // - map sigma points thru process model f. + + // update: + // - map sigma points thru h. + // - from current sigma points and sigma observations: + // - compute state estimate x and co-variance P. + // - compute predicted observation z and innocation co-variance Z + // - compute cross correlation XZ + // - compute new state mean and co-variance. + // - generate new sigma points. +public: + /** \brief Initializes Unscented Kalman filter. * - * See http://www.cs.unc.edu/~welch/kalman/media/pdf/Julier1997_SPIE_KF.pdf - * for more details about UKF. + * Initializes Unscented Kalman filter. The state vector returned by \e + * getState and state covariance matrix returned by \e getStateCovariance + * should be initialized before using the filter. * - * The UKF estimates a process state (represented by a vector) using observations - * of the process. Observations are some derivate of the process state as usually - * the process state cannot be directly observed. + * Separate state noise vector is currently unsupported. The user should + * include noise terms in the state vector directly. Set the noise mean into + * state vector and noise variance into state covariance matrix. * - * \e UnscentedProcess models the process by predicting the next filter state - * based on the current filter state. + * \param state_n The number of elements in process state vector. + * \param obs_n The number of elements in observation vector. + * \param state_k The number of noise elements used in the process model. + * TODO: This is currently unsupported. + * \param alpha Spread of sigma points. + * \param beta Prior knowlegde about the distribution (2 for Gaussian). + */ + UnscentedKalman(int state_n, int obs_n, int state_k = 0, double alpha = 0.001, + double beta = 2.0); + ~UnscentedKalman(); + + /** \brief Returns the process state vector. * - * \e UnscentedObservation models the observation by predicting observation results - * based on the current filter state. + * The returned state vector contains the current state of the process. + * The returned vector may be modified if the current process state is + * known, for example in initialization phase. If the vector is modified, + * \e initialize method must be called before calling either predict or + * update methods. * - * UnscentedKalman holds the estimated process state vector and its covariance - * matrix. The new process state can be estimated using \e predict and \e update - * methods. + * \return A vector of state_n elements. + */ + CvMat* getState() + { + return state; + } + + /** \brief Returns the process state covariance matrix. * - * The current implementation does not separate process noise elements from - * the process state vector. It is therefore the responsibility of the user - * to include noise terms into process state and state covariance. + * The returned matrix contains the current state covariance. The matrix + * may be modified if the covariance is known, for example in initialization + * phase. If the matrix is modified, \e initialize method must be called + * before calling either predict of update methods. * - * \code - * class MyUnscentedProcess : public UnscentedProcess { - * void f(CvMat *state) { // compute new state } - * CvMat *getProcessNoise() { return _noise; } - * } myProcess; + * \return state_n by state_n covariance matrix. + */ + CvMat* getStateCovariance() + { + return stateCovariance; + } + + /** \brief (Re-)initialize UKF internal state. * - * class MyUnscentedObservation : public UnscentedObservation { - * void h(CvMat *z, cvMat *state) { // compute measurement vector z from state } - * CvMat *getObservation() { return _obs; } - * CvMat *getObservationNoise() { return _noise; } - * } myObservation; + * Must be called before predict/update when ever state or state co-variance + * are changed. + */ + void initialize(); + + /** \brief Updated the state by predicting. * - * int state_n = NUMBER_OF_ELEMENTS_IN_PROCESS_STATE_VECTOR; - * int obs_n = NUMBER_OF_ELEMENTS_IN_PROCESS_OBSERVATION_VECTOR; - * int state_k = NUMBER_OF_PROCESS_NOISE_ELEMENTS; //TODO: Not supported at the moment. + * Updates the process state by predicting new state from the current state. + * Normally each predict call is followed with a call to update method. * - * UnscentedKalman ukf(state_n, obs_n, state_k); - * initializeState(ukf.getState(), ukf.getStateCovariance()); - * ukf.initialize(); + * \param process_model The model implementation that is used to predict the + * next state. + */ + void predict(UnscentedProcess* process_model); + + /** \brief Updates the state by an observation. * - * while (1) { - * ukf.predict(&myProcess); - * // measure new observation. - * ukf.update(&myObservation); - * CvMat *state = ukf.getState(); - * // unpack state information from the state vector and do something with it. - * } + * Updates the process state by a measurement that indirectly observed the + * correct process state. The observation implementation needs to hold the + * current measurement data and implement a transformation from process state + * into measurement (the \e UnscentedObservation::h method). * - * \endcode + * \param observation The observation implementation the is used to update + * the current state. */ - class ALVAR_EXPORT UnscentedKalman { - private: - int state_n; - int state_k; - int obs_n; - int sigma_n; - bool sigmasUpdated; - double lambda, lambda2; + void update(UnscentedObservation* observation); +}; - CvMat *state; - CvMat *stateCovariance; - CvMat *sqrtStateCovariance; - CvMat *stateD; - CvMat *stateU; - CvMat *stateV; - CvMat *stateTmp; - CvMat *stateDiff; - - CvMat *predObs; - CvMat *predObsCovariance; - CvMat *invPredObsCovariance; - CvMat *predObsDiff; - - CvMat *statePredObsCrossCorrelation; - CvMat *kalmanGain; - CvMat *kalmanTmp; - - CvMat **sigma_state; - CvMat **sigma_predObs; - - // possess state mean and co-variance (as a list of sigma points). - // generate sigma points from state mean vector and co-variance matrix. - // compute state mean vector and co-variance matrix from sigma points. - - // predict: - // - map sigma points thru process model f. - - // update: - // - map sigma points thru h. - // - from current sigma points and sigma observations: - // - compute state estimate x and co-variance P. - // - compute predicted observation z and innocation co-variance Z - // - compute cross correlation XZ - // - compute new state mean and co-variance. - // - generate new sigma points. - public: - - /** \brief Initializes Unscented Kalman filter. - * - * Initializes Unscented Kalman filter. The state vector returned by \e getState - * and state covariance matrix returned by \e getStateCovariance should be - * initialized before using the filter. - * - * Separate state noise vector is currently unsupported. The user should include - * noise terms in the state vector directly. Set the noise mean into state vector - * and noise variance into state covariance matrix. - * - * \param state_n The number of elements in process state vector. - * \param obs_n The number of elements in observation vector. - * \param state_k The number of noise elements used in the process model. - * TODO: This is currently unsupported. - * \param alpha Spread of sigma points. - * \param beta Prior knowlegde about the distribution (2 for Gaussian). - */ - UnscentedKalman(int state_n, int obs_n, int state_k = 0, double alpha = 0.001, double beta = 2.0); - ~UnscentedKalman(); - - /** \brief Returns the process state vector. - * - * The returned state vector contains the current state of the process. - * The returned vector may be modified if the current process state is - * known, for example in initialization phase. If the vector is modified, - * \e initialize method must be called before calling either predict or - * update methods. - * - * \return A vector of state_n elements. - */ - CvMat *getState() { return state; } - - /** \brief Returns the process state covariance matrix. - * - * The returned matrix contains the current state covariance. The matrix - * may be modified if the covariance is known, for example in initialization - * phase. If the matrix is modified, \e initialize method must be called - * before calling either predict of update methods. - * - * \return state_n by state_n covariance matrix. - */ - CvMat *getStateCovariance() { return stateCovariance; } - - /** \brief (Re-)initialize UKF internal state. - * - * Must be called before predict/update when ever state or state co-variance - * are changed. - */ - void initialize(); - - /** \brief Updated the state by predicting. - * - * Updates the process state by predicting new state from the current state. - * Normally each predict call is followed with a call to update method. - * - * \param process_model The model implementation that is used to predict the - * next state. - */ - void predict(UnscentedProcess *process_model); - - /** \brief Updates the state by an observation. - * - * Updates the process state by a measurement that indirectly observed the - * correct process state. The observation implementation needs to hold the - * current measurement data and implement a transformation from process state - * into measurement (the \e UnscentedObservation::h method). - * - * \param observation The observation implementation the is used to update - * the current state. - */ - void update(UnscentedObservation *observation); - }; - - /** - * \brief Process model for an unscented kalman filter. +/** + * \brief Process model for an unscented kalman filter. + * + * Implementing class needs to allocate a noise matrix of correct size. + */ +class ALVAR_EXPORT UnscentedProcess +{ +public: + /** \brief process model: state+1 = f(state) + * + * Model the process by computing an estimate how the process changes + * when one timestep is taken. * - * Implementing class needs to allocate a noise matrix of correct size. + * \param state state_n size vector; The current state in input and the next + * state estimate in output. */ - class ALVAR_EXPORT UnscentedProcess { - public: - /** \brief process model: state+1 = f(state) - * - * Model the process by computing an estimate how the process changes - * when one timestep is taken. - * - * \param state state_n size vector; The current state in input and the next - * state estimate in output. - */ - virtual void f(CvMat *state) = 0; + virtual void f(CvMat* state) = 0; - /** \brief Returns the process noise covariance. - * - * The returned matrix will be added to the current state covariance matrix, - * increasing the uncertainty of the current state. The matrix should reflect - * all unknown factors of the process which are not taken into account by the - * state estimation method \e f. - * - * \return state_n by state_n size matrix; or NULL for no additional noise. - */ - virtual CvMat *getProcessNoise() = 0; - }; + /** \brief Returns the process noise covariance. + * + * The returned matrix will be added to the current state covariance matrix, + * increasing the uncertainty of the current state. The matrix should reflect + * all unknown factors of the process which are not taken into account by the + * state estimation method \e f. + * + * \return state_n by state_n size matrix; or NULL for no additional noise. + */ + virtual CvMat* getProcessNoise() = 0; +}; - /** - * \brief Observation model for an unscented kalman filter. +/** + * \brief Observation model for an unscented kalman filter. + * + * The implementation needs to allocate correct size measurement vector and + * noise matrix and to implement a transformation from process state into a + * measurement. + */ +class ALVAR_EXPORT UnscentedObservation +{ +public: + /** \brief observation model: z = h(state) * - * The implementation needs to allocate correct size measurement vector and - * noise matrix and to implement a transformation from process state into a - * measurement. + * Computes an estimated measurement vector from the current state estimate. + * + * \param z obs_n size vector; The estimated measurement. + * \param state state_n size vector; The current state. */ - class ALVAR_EXPORT UnscentedObservation { - public: - /** \brief observation model: z = h(state) - * - * Computes an estimated measurement vector from the current state estimate. - * - * \param z obs_n size vector; The estimated measurement. - * \param state state_n size vector; The current state. - */ - virtual void h(CvMat *z, CvMat *state) = 0; + virtual void h(CvMat* z, CvMat* state) = 0; - /** \brief Returns the current measurement vector. - * - * The returned vector should contain the latest measurement values. - * In the UKF update phase the process state will be modified in such a - * way to make the difference between estimated measurement (from method \e h) - * and the returned real measurement smaller. - * - * \return obs_n size vector containing the current measured values. - */ - virtual CvMat *getObservation() = 0; + /** \brief Returns the current measurement vector. + * + * The returned vector should contain the latest measurement values. + * In the UKF update phase the process state will be modified in such a + * way to make the difference between estimated measurement (from method \e h) + * and the returned real measurement smaller. + * + * \return obs_n size vector containing the current measured values. + */ + virtual CvMat* getObservation() = 0; - /** \brief Returns the observation noise covariance matrix. - * - * The returned matrix will be added to the current observation covariance - * matrix increasing the uncertainty of measurements. The matrix should - * reflect the amount of noise in the measurement vector returned by \e - * getObservation method. - * - * \return obs_n by obs_b matrix containing observation noise covariance; or - * NULL for no additional noise. - */ - virtual CvMat *getObservationNoise() = 0; - }; + /** \brief Returns the observation noise covariance matrix. + * + * The returned matrix will be added to the current observation covariance + * matrix increasing the uncertainty of measurements. The matrix should + * reflect the amount of noise in the measurement vector returned by \e + * getObservation method. + * + * \return obs_n by obs_b matrix containing observation noise covariance; or + * NULL for no additional noise. + */ + virtual CvMat* getObservationNoise() = 0; +}; -} // namespace alvar +} // namespace alvar -#endif // __UNSCENTED_KALMAN__ +#endif // __UNSCENTED_KALMAN__ diff --git a/ar_track_alvar/include/ar_track_alvar/Util.h b/ar_track_alvar/include/ar_track_alvar/Util.h index 9dcb0fc..358020d 100644 --- a/ar_track_alvar/include/ar_track_alvar/Util.h +++ b/ar_track_alvar/include/ar_track_alvar/Util.h @@ -37,270 +37,284 @@ #include #include #include -#include -#include -#include -#include -#include "opencv2/core/types_c.h" -#include //for abs +#include +#include //for abs #include -#include //Compatibility with OpenCV 3.x - -namespace alvar { +namespace alvar +{ const double PI = 3.14159265; /** -* \brief Returns the sign of a number. -*/ -template inline -int ALVAR_EXPORT Sign(const C& v) + * \brief Returns the sign of a number. + */ +template +inline int ALVAR_EXPORT Sign(const C& v) { - return (v<0?-1:1); + return (v < 0 ? -1 : 1); } /** -* \brief Converts an angle from radians to degrees. -*/ -template inline -double ALVAR_EXPORT Rad2Deg(const C& v) + * \brief Converts an angle from radians to degrees. + */ +template +inline double ALVAR_EXPORT Rad2Deg(const C& v) { - return v*(180/PI); + return v * (180 / PI); } /** -* \brief Converts an angle from degrees to radians. -*/ -template inline -double ALVAR_EXPORT Deg2Rad(const C& v) + * \brief Converts an angle from degrees to radians. + */ +template +inline double ALVAR_EXPORT Deg2Rad(const C& v) { - return v*(PI/180); + return v * (PI / 180); } /** - * \brief Simple \e Point class meant to be inherited from OpenCV point-classes. For example: Point p + * \brief Simple \e Point class meant to be inherited from OpenCV point-classes. + * For example: Point p */ -template +template struct ALVAR_EXPORT Point : public C { - /** - * \brief Additional value can be related to the point. - */ - D val; - - Point(int vx=0, int vy=0) - { - C::x = vx; - C::y = vy; - } - Point(double vx, double vy) - { - C::x = vx; - C::y = vy; - } + /** + * \brief Additional value can be related to the point. + */ + D val; + + Point(int vx = 0, int vy = 0) + { + C::x = vx; + C::y = vy; + } + Point(double vx, double vy) + { + C::x = vx; + C::y = vy; + } }; -/** - * \brief The default integer point type. -*/ +/** + * \brief The default integer point type. + */ typedef ALVAR_EXPORT Point PointInt; /** - * \brief The default double point type. -*/ -typedef ALVAR_EXPORT Point PointDouble; - -/** \brief Returns the squared distance of two points. - * \param p1 First point. - * \param p2 Second point. - * \return Squared distance. -*/ -template -double PointSquaredDistance(PointType p1, PointType p2) { - return ((p1.x-p2.x)*(p1.x-p2.x)) + - ((p1.y-p2.y)*(p1.y-p2.y)); -} + * \brief The default double point type. + */ +typedef ALVAR_EXPORT Point PointDouble; +/** \brief Returns the squared distance of two points. + * \param p1 First point. + * \param p2 Second point. + * \return Squared distance. + */ +template +double PointSquaredDistance(PointType p1, PointType p2) +{ + return ((p1.x - p2.x) * (p1.x - p2.x)) + ((p1.y - p2.y) * (p1.y - p2.y)); +} -//ttesis start +// ttesis start +/** + * \brief Computes dot product AB.BC + * \param A,B and C points defining lines (line segments) AB and BC + */ +int ALVAR_EXPORT dot(const cv::Point& A, const cv::Point& B, + const cv::Point& C); -/** - * \brief Computes dot product AB.BC - * \param A,B and C points defining lines (line segments) AB and BC +/** + * \brief Computes the cross product AB x AC + * \param A,B and C points defining lines (line segments) AB and AC + * \param */ -int ALVAR_EXPORT dot(CvPoint *A, CvPoint *B, CvPoint *C); +int ALVAR_EXPORT cross(const cv::Point& A, const cv::Point& B, + const cv::Point& C); -/** - * \brief Computes the cross product AB x AC - * \param A,B and C points defining lines (line segments) AB and AC - * \param +/** + * \brief Compute the distance from A to B + * \param A and B points */ -int ALVAR_EXPORT cross(CvPoint *A,CvPoint *B, CvPoint *C); +double ALVAR_EXPORT distance(const cv::Point& A, const cv::Point& B); +/** + * \brief Computes the distance from point C to line (segment) AB. + * \param isSegment If isSegment is true, AB is a segment, not a line. + * \param C point + * \param A abd B points defining line (segment) AB + */ +double ALVAR_EXPORT linePointDist(const cv::Point& A, const cv::Point& B, + const cv::Point& C, bool isSegment); -/** - * \brief Compute the distance from A to B - * \param A and B points +/** + * \brief Computes the angle between lines AB and CD + * \param isDirectionDependent If isDirectionDependent = 1, angle depends on + * the order of the points. Otherwise returns smaller angle. \param A start + * point of first line \param B end point of first line \param C start point + * of second line \param D end point of second line */ -double ALVAR_EXPORT distance(CvPoint *A,CvPoint *B); +double ALVAR_EXPORT angle(const cv::Point& A, const cv::Point& B, + const cv::Point& C, const cv::Point& D, + int isDirectionDependent); -/** - * \brief Computes the distance from point C to line (segment) AB. - * \param isSegment If isSegment is true, AB is a segment, not a line. - * \param C point - * \param A abd B points defining line (segment) AB +/** + * \brief Calculates minimum distance from Point C to Polygon whose points are + * in list PointList \brief Returns distance \param index index of point A in + * pointlist, where A is the starting point of the closest polygon segment + * \param isClosedPolygon is true if polygon is closed (segment of the first + * and last point is also checked) */ -double ALVAR_EXPORT linePointDist(CvPoint *A,CvPoint *B,CvPoint *C, bool isSegment); +double ALVAR_EXPORT polyLinePointDist(const std::vector& points, + const cv::Point& C, int* index, + int isClosedPolygon); +// ttesis end -/** - * \brief Computes the angle between lines AB and CD - * \param isDirectionDependent If isDirectionDependent = 1, angle depends on the order of the points. Otherwise returns smaller angle. - * \param A start point of first line - * \param B end point of first line - * \param C start point of second line - * \param D end point of second line +/** + * \brief Uses OpenCV routine to fit ellipse to a vector of points. + * \param points Vector of points on the ellipse edge. + * \param ellipse_box OpenCV struct for the fitted ellipse. */ -double ALVAR_EXPORT angle(CvPoint *A,CvPoint *B, CvPoint *C,CvPoint *D, int isDirectionDependent); +void ALVAR_EXPORT FitCVEllipse(const std::vector& points, + cv::RotatedRect& ellipse_box); +int ALVAR_EXPORT exp_filt2(std::vector& v, std::vector& ret, + bool clamp); -/** - * \brief Calculates minimum distance from Point C to Polygon whose points are in list PointList - * \brief Returns distance - * \param index index of point A in pointlist, where A is the starting point of the closest polygon segment - * \param isClosedPolygon is true if polygon is closed (segment of the first and last point is also checked) +/** + * \brief Calculates the difference between the consecutive vector elements. + * \param v Source elements. + * \param ret The difference vector. This is cleared and then resized. + * \return The number of elements. */ -double ALVAR_EXPORT polyLinePointDist(CvPoint *PointList, int nPnts,CvPoint *C, int *index, int isClosedPolygon); - -//ttesis end +template +inline int ALVAR_EXPORT diff(const std::vector& v, std::vector& ret) +{ + ret.clear(); + if (v.size() == 1) + { + ret.push_back(0); + } + else if (v.size() > 1) + { + ret.push_back(v.at(1) - v.at(0)); + for (size_t i = 1; i < v.size(); ++i) + { + ret.push_back(v.at(i) - v.at(i - 1)); + } + } + return int(ret.size()); +} +/** + * \brief Finds zero crossings of given vector elements (sequence). + * \param v Sequence of numbers from where the zero crossings are found. + * \param corners Resulting index list of zero crossings. + * \param offs + * \return Number of zero crossings found. + */ +int ALVAR_EXPORT find_zero_crossings(const std::vector& v, + std::vector& corners, int offs = 20); -/** - * \brief Uses OpenCV routine to fit ellipse to a vector of points. - * \param points Vector of points on the ellipse edge. - * \param ellipse_box OpenCV struct for the fitted ellipse. +/** + * \brief Output OpenCV matrix for debug purposes. */ -void ALVAR_EXPORT FitCVEllipse(const std::vector &points, cv::RotatedRect& ellipse_box); - -int ALVAR_EXPORT exp_filt2(std::vector &v,std:: vector &ret, bool clamp); - -/** - * \brief Calculates the difference between the consecutive vector elements. - * \param v Source elements. - * \param ret The difference vector. This is cleared and then resized. - * \return The number of elements. - */ -template inline -int ALVAR_EXPORT diff(const std::vector &v, std::vector &ret) -{ - ret.clear(); - if (v.size() == 1) { - ret.push_back(0); - } else if (v.size() > 1) { - ret.push_back(v.at(1)-v.at(0)); - for(size_t i = 1; i < v.size(); ++i) - { - ret.push_back(v.at(i)-v.at(i-1)); - } - } - return int(ret.size()); -} +void ALVAR_EXPORT out_matrix(const cv::Mat& m, const char* name); -/** - * \brief Finds zero crossings of given vector elements (sequence). - * \param v Sequence of numbers from where the zero crossings are found. - * \param corners Resulting index list of zero crossings. - * \param offs - * \return Number of zero crossings found. - */ -int ALVAR_EXPORT find_zero_crossings(const std::vector& v, std::vector &corners, int offs = 20); - -/** - * \brief Output OpenCV matrix for debug purposes. - */ -void ALVAR_EXPORT out_matrix(const CvMat *m, const char *name); - -/** - * \brief Limits a number to between two values. - * \param val Input value. - * \param min_val Minimum value for the result. - * \param max_val Maximum value for the result. - * \return Resulting value that is between \e min_val and \e max_val. -*/ +/** + * \brief Limits a number to between two values. + * \param val Input value. + * \param min_val Minimum value for the result. + * \param max_val Maximum value for the result. + * \return Resulting value that is between \e min_val and \e max_val. + */ double ALVAR_EXPORT Limit(double val, double min_val, double max_val); -/** +/** * \brief Class for N-dimensional index to be used e.g. with STL maps * * The idea is that if you want to sort N-dimensional pointers (e.g. * when they are stored in STL maps) it is enough to have the 'operator<' - * working, instead of needing to calculate something like i=z*x_res*y_res + y*x_res + x; + * working, instead of needing to calculate something like i=z*x_res*y_res + + * y*x_res + x; */ -struct ALVAR_EXPORT Index { - /** \brief The indices for each dimension are stored in \e val (last being the most significant) */ - std::vector val; - /** \brief Constructor for 1D index */ - Index(int a); - /** \brief Constructor for 2D index */ - Index(int a, int b); - /** \brief Constructor for 3D index */ - Index(int a, int b, int c); - /** \brief Operator used for sorting the multidimensional indices (last dimension being the most significant) */ - bool operator<(const Index &index) const; +struct ALVAR_EXPORT Index +{ + /** \brief The indices for each dimension are stored in \e val (last being the + * most significant) */ + std::vector val; + /** \brief Constructor for 1D index */ + Index(int a); + /** \brief Constructor for 2D index */ + Index(int a, int b); + /** \brief Constructor for 3D index */ + Index(int a, int b, int c); + /** \brief Operator used for sorting the multidimensional indices (last + * dimension being the most significant) */ + bool operator<(const Index& index) const; }; -/** +/** * \brief Class for N-dimensional Histograms */ -class ALVAR_EXPORT Histogram { +class ALVAR_EXPORT Histogram +{ protected: - std::map bins; - std::vector dim_binsize; - int DimIndex(int dim, double val); - double DimVal(int dim, int index); + std::map bins; + std::vector dim_binsize; + int DimIndex(int dim, double val); + double DimVal(int dim, int index); + public: - /** \brief Add dimension with a binsize - */ - void AddDimension(int binsize); - /** \brief Clear the histogram */ - void Clear(); - /** \brief Increase the histogram for given dimensions */ - void Inc(double dim0, double dim1=0, double dim2=0); - /** \brief Get the maximum from the histogram - * This returns the value in the middle of the 'bin' instead of bin-number - */ - int GetMax(double *dim0, double *dim1=0, double *dim2=0); + /** \brief Add dimension with a binsize + */ + void AddDimension(int binsize); + /** \brief Clear the histogram */ + void Clear(); + /** \brief Increase the histogram for given dimensions */ + void Inc(double dim0, double dim1 = 0, double dim2 = 0); + /** \brief Get the maximum from the histogram + * This returns the value in the middle of the 'bin' instead of bin-number + */ + int GetMax(double* dim0, double* dim1 = 0, double* dim2 = 0); }; -/** - * \brief N-dimensional Histograms calculating also the subpixel average for max bin +/** + * \brief N-dimensional Histograms calculating also the subpixel average for max + * bin */ -class ALVAR_EXPORT HistogramSubpixel : public Histogram { +class ALVAR_EXPORT HistogramSubpixel : public Histogram +{ protected: - std::map acc_dim0; - std::map acc_dim1; - std::map acc_dim2; + std::map acc_dim0; + std::map acc_dim1; + std::map acc_dim2; + public: - /** \brief Clear the histogram */ - void Clear(); - /** \brief Increase the histogram for given dimensions */ - void Inc(double dim0, double dim1=0, double dim2=0); - /** \brief Get the maximum from the histogram - * This finds the maximum bin(s) and averages the original - * values contained there to achieve subpixel accuracy. - */ - int GetMax(double *dim0, double *dim1=0, double *dim2=0); + /** \brief Clear the histogram */ + void Clear(); + /** \brief Increase the histogram for given dimensions */ + void Inc(double dim0, double dim1 = 0, double dim2 = 0); + /** \brief Get the maximum from the histogram + * This finds the maximum bin(s) and averages the original + * values contained there to achieve subpixel accuracy. + */ + int GetMax(double* dim0, double* dim1 = 0, double* dim2 = 0); }; #if (_MSC_VER >= 1400) - inline void STRCPY(char *to, rsize_t size, const char *src) { - strcpy_s(to,size,src); - } +inline void STRCPY(char* to, rsize_t size, const char* src) +{ + strcpy_s(to, size, src); +} #else - inline void STRCPY(char *to, size_t size, const char *src) { - strncpy(to,src,size-1); - } +inline void STRCPY(char* to, size_t size, const char* src) +{ + strncpy(to, src, size - 1); +} #endif #ifdef min @@ -326,7 +340,7 @@ class ALVAR_EXPORT HistogramSubpixel : public Histogram { * return true; * } * \endcode - * In your classes \e Serialize -method you can use the overloaded + * In your classes \e Serialize -method you can use the overloaded * \e Serialize method of the \e Serialization class to serialize * data or data arrays. In addition you can use \e SerializeClass * to serialize inner serializable classes. @@ -349,108 +363,130 @@ class ALVAR_EXPORT HistogramSubpixel : public Histogram { * seri>>cam; * \endcode * - * See the constructor \e Serialization::Serialization documentation for + * See the constructor \e Serialization::Serialization documentation for * further use examples. */ -class ALVAR_EXPORT Serialization { +class ALVAR_EXPORT Serialization +{ protected: - bool input; - std::string filename; - //std::iostream *stream; - std::ios *stream; - void *formatter_handle; - bool Output(); - bool Input(); - bool Descend(const char *id); - bool Ascend(); + bool input; + std::string filename; + // std::iostream *stream; + std::ios* stream; + void* formatter_handle; + bool Output(); + bool Input(); + bool Descend(const char* id); + bool Ascend(); + public: - /** \brief Constructor for serializing to/from specified filename - * - * \code - * Serialization sero("test1.xml"); - * sero<>cam; - * \endcode - * - * Note that this is not identical to: - * \code - * ofstream ofs("test1.xml"); - * Serialization sero(ofs); - * sero<>cam; - * \endcode - * - * There are differences with these approaches. When using the constructor - * with 'filename', we use the tinyxml Save and Load methods, while with - * iostream we use tinyxml operators for << and >> . The prior approach - * uses properly indented xml-files with XML declaration . In the - * latter approach the indentations and the XML declaration are left out. - * The XML declaration is left out because for some reason tinyxml - * doesn't parse it correctly when using operator>> . - */ - Serialization(std::string _filename); - /** \brief Constructor for serializing any iostream (e.g. std::stringstream) */ - Serialization(std::basic_iostream &_stream); - /** \brief Constructor for serializing any istream (e.g. std::cin) */ - Serialization(std::basic_istream &_stream); - /** \brief Constructor for serializing any ostream (e.g. std::cout) */ - Serialization(std::basic_ostream &_stream); - /** \brief Destructor */ - ~Serialization(); - /** \brief Operator for outputting a serializable class into the defined filename or std::iostream */ - template - Serialization& operator<<(C &serializable) { - input=false; - if (!SerializeClass(serializable) || !Output()) { - throw(AlvarException("Serialization failure")); - } - return *this; - } - /** \brief Operator for reading a serializable class from the defined filename or std::iostream */ - template - Serialization& operator>>(C &serializable) { - input=true; - if (!Input() || !SerializeClass(serializable)) { - throw(AlvarException("Serialization failure")); - } - return *this; - } - /** \brief Method for serializing a serializable class. Used by operators << and >> . - * - * Note, in the future this should be usable also from your serializable class - * for adding nested serializable classes. - */ - template - bool SerializeClass(C &serializable) { - std::string s = serializable.SerializeId(); - if (!Descend(s.c_str()) || !serializable.Serialize(this) || !Ascend()) { - return false; - } - return true; - } - /** \brief Method for serializing 'int' data element. Used from your serializable class. */ - bool Serialize(int &data, const std::string &name); - /** \brief Method for serializing 'int' data element. Used from your serializable class. */ - bool Serialize(unsigned short &data, const std::string &name); - /** \brief Method for serializing 'int' data element. Used from your serializable class. */ - bool Serialize(unsigned long &data, const std::string &name); - /** \brief Method for serializing 'double' data element. Used from your serializable class. */ - bool Serialize(double &data, const std::string &name); - /** \brief Method for serializing 'std::string' data element. Used from your serializable class. */ - bool Serialize(std::string &data, const std::string &name); - /** \brief Method for serializing 'CvMat' data element. Used from your serializable class. */ - bool Serialize(CvMat &data, const std::string &name); - /** \brief Method for checking if we are inputting or outputting. Can be used from your serializable class. */ - bool IsInput() { return input; } + /** \brief Constructor for serializing to/from specified filename + * + * \code + * Serialization sero("test1.xml"); + * sero<>cam; + * \endcode + * + * Note that this is not identical to: + * \code + * ofstream ofs("test1.xml"); + * Serialization sero(ofs); + * sero<>cam; + * \endcode + * + * There are differences with these approaches. When using the constructor + * with 'filename', we use the tinyxml Save and Load methods, while with + * iostream we use tinyxml operators for << and >> . The prior approach + * uses properly indented xml-files with XML declaration . In the + * latter approach the indentations and the XML declaration are left out. + * The XML declaration is left out because for some reason tinyxml + * doesn't parse it correctly when using operator>> . + */ + Serialization(std::string _filename); + /** \brief Constructor for serializing any iostream (e.g. std::stringstream) + */ + Serialization(std::basic_iostream& _stream); + /** \brief Constructor for serializing any istream (e.g. std::cin) */ + Serialization(std::basic_istream& _stream); + /** \brief Constructor for serializing any ostream (e.g. std::cout) */ + Serialization(std::basic_ostream& _stream); + /** \brief Destructor */ + ~Serialization(); + /** \brief Operator for outputting a serializable class into the defined + * filename or std::iostream */ + template + Serialization& operator<<(C& serializable) + { + input = false; + if (!SerializeClass(serializable) || !Output()) + { + throw(AlvarException("Serialization failure")); + } + return *this; + } + /** \brief Operator for reading a serializable class from the defined filename + * or std::iostream */ + template + Serialization& operator>>(C& serializable) + { + input = true; + if (!Input() || !SerializeClass(serializable)) + { + throw(AlvarException("Serialization failure")); + } + return *this; + } + /** \brief Method for serializing a serializable class. Used by operators << + * and >> . + * + * Note, in the future this should be usable also from your serializable class + * for adding nested serializable classes. + */ + template + bool SerializeClass(C& serializable) + { + std::string s = serializable.SerializeId(); + if (!Descend(s.c_str()) || !serializable.Serialize(this) || !Ascend()) + { + return false; + } + return true; + } + /** \brief Method for serializing 'int' data element. Used from your + * serializable class. */ + bool Serialize(int& data, const std::string& name); + /** \brief Method for serializing 'int' data element. Used from your + * serializable class. */ + bool Serialize(unsigned short& data, const std::string& name); + /** \brief Method for serializing 'int' data element. Used from your + * serializable class. */ + bool Serialize(unsigned long& data, const std::string& name); + /** \brief Method for serializing 'double' data element. Used from your + * serializable class. */ + bool Serialize(double& data, const std::string& name); + /** \brief Method for serializing 'std::string' data element. Used from your + * serializable class. */ + bool Serialize(std::string& data, const std::string& name); + /** \brief Method for serializing 'cv::Mat' data element. Used from your + * serializable class. */ + bool Serialize(cv::Mat& data, const std::string& name); + /** \brief Method for checking if we are inputting or outputting. Can be used + * from your serializable class. */ + bool IsInput() + { + return input; + } }; -} // namespace alvar +} // namespace alvar #endif diff --git a/ar_track_alvar/include/ar_track_alvar/filter/kinect_filtering.h b/ar_track_alvar/include/ar_track_alvar/filter/kinect_filtering.h index 730377c..ebbc13a 100644 --- a/ar_track_alvar/include/ar_track_alvar/filter/kinect_filtering.h +++ b/ar_track_alvar/include/ar_track_alvar/filter/kinect_filtering.h @@ -29,8 +29,8 @@ */ /** - * \file - * + * \file + * * Library for depth based filtering * * \author Bhaskara Marthi @@ -51,7 +51,6 @@ #include #include #include -// #include #include #include #include @@ -62,47 +61,44 @@ namespace ar_track_alvar { - typedef pcl::PointXYZRGB ARPoint; typedef pcl::PointCloud ARCloud; // Result of plane fit: inliers and the plane equation struct PlaneFitResult { - EIGEN_MAKE_ALIGNED_OPERATOR_NEW - PlaneFitResult () : inliers(ARCloud::Ptr(new ARCloud)) {} + EIGEN_MAKE_ALIGNED_OPERATOR_NEW + PlaneFitResult() : inliers(ARCloud::Ptr(new ARCloud)) + { + } ARCloud::Ptr inliers; pcl::ModelCoefficients coeffs; }; // Select out a subset of a cloud corresponding to a set of pixel coordinates -ARCloud::Ptr filterCloud (const ARCloud& cloud, - const std::vector >& pixels); +ARCloud::Ptr filterCloud( + const ARCloud& cloud, + const std::vector >& pixels); // Wrapper for PCL plane fitting -PlaneFitResult fitPlane (ARCloud::ConstPtr cloud); +PlaneFitResult fitPlane(ARCloud::ConstPtr cloud); -// Given the coefficients of a plane, and two points p1 and p2, we produce a +// Given the coefficients of a plane, and two points p1 and p2, we produce a // quaternion q that sends p2'-p1' to (1,0,0) and n to (0,0,1), where p1' and -// p2' are the projections of p1 and p2 onto the plane and n is the normal. +// p2' are the projections of p1 and p2 onto the plane and n is the normal. // There's a sign ambiguity here, which is resolved by requiring that the // difference p4'-p3' ends up with a positive y coordinate -int -extractOrientation (const pcl::ModelCoefficients& coeffs, - const ARPoint& p1, const ARPoint& p2, - const ARPoint& p3, const ARPoint& p4, - geometry_msgs::msg::Quaternion &retQ); +int extractOrientation(const pcl::ModelCoefficients& coeffs, const ARPoint& p1, + const ARPoint& p2, const ARPoint& p3, const ARPoint& p4, + geometry_msgs::msg::Quaternion& retQ); // Like extractOrientation except return value is a btMatrix3x3 -int -extractFrame (const pcl::ModelCoefficients& coeffs, - const ARPoint& p1, const ARPoint& p2, - const ARPoint& p3, const ARPoint& p4, - tf2::Matrix3x3 &retmat); - +int extractFrame(const pcl::ModelCoefficients& coeffs, const ARPoint& p1, + const ARPoint& p2, const ARPoint& p3, const ARPoint& p4, + tf2::Matrix3x3& retmat); // Return the centroid (mean) of a point cloud -geometry_msgs::msg::Point centroid (const ARCloud& points); -} // namespace +geometry_msgs::msg::Point centroid(const ARCloud& points); +} // namespace ar_track_alvar -#endif // include guard +#endif // include guard diff --git a/ar_track_alvar/include/ar_track_alvar/filter/medianFilter.h b/ar_track_alvar/include/ar_track_alvar/filter/medianFilter.h index 4e43ada..60b9b35 100644 --- a/ar_track_alvar/include/ar_track_alvar/filter/medianFilter.h +++ b/ar_track_alvar/include/ar_track_alvar/filter/medianFilter.h @@ -29,9 +29,9 @@ */ /** - * \file - * - * N-dimensional median filter for marker poses + * \file + * + * N-dimensional median filter for marker poses * * \author Scott Niekum */ @@ -43,22 +43,20 @@ namespace ar_track_alvar { - class MedianFilter { - public: +public: MedianFilter(int n); - void addPose(const alvar::Pose &new_pose); - void getMedian(alvar::Pose &ret_pose); + void addPose(const alvar::Pose& new_pose); + void getMedian(alvar::Pose& ret_pose); - private: - int median_n; - alvar::Pose *median_poses; +private: + int median_n; + alvar::Pose* median_poses; int median_ind; bool median_init; }; +} // namespace ar_track_alvar -} // namespace - -#endif // include guard +#endif // include guard diff --git a/ar_track_alvar/nodes/FindMarkerBundles.cpp b/ar_track_alvar/nodes/FindMarkerBundles.cpp index b7d6cb5..ef56cec 100644 --- a/ar_track_alvar/nodes/FindMarkerBundles.cpp +++ b/ar_track_alvar/nodes/FindMarkerBundles.cpp @@ -493,7 +493,7 @@ class FindMarkerBundles : public rclcpp::Node } //Detect and track the markers - if (marker_detector.Detect(image, cam, true, false, max_new_marker_error, + if (marker_detector.Detect(*image, cam, true, false, max_new_marker_error, max_track_error, CVSEQ, true)) { //printf("\n--------------------------\n\n"); diff --git a/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp b/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp index 0cfeef6..cb1e4fb 100644 --- a/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp +++ b/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp @@ -169,13 +169,13 @@ class FindMarkerBundlesNoKinect : public rclcpp::Node // Updates the bundlePoses of the multi_marker_bundles by detecting markers and using all markers in a bundle to infer the master tag's position void GetMultiMarkerPoses(cv::Mat *image) { - if (marker_detector.Detect(image, cam, true, false, max_new_marker_error, max_track_error, CVSEQ, true)){ + if (marker_detector.Detect(*image, cam, true, false, max_new_marker_error, max_track_error, CVSEQ, true)){ for(int i=0; iUpdate(marker_detector.markers, cam, bundlePoses[i]); - if(marker_detector.DetectAdditional(image, cam, false) > 0){ + if(marker_detector.DetectAdditional(*image, cam, false) > 0){ for(int i=0; iSetTrackMarkers(marker_detector, cam, bundlePoses[i], image) > 0)) + if ((multi_marker_bundles[i]->SetTrackMarkers(marker_detector, cam, bundlePoses[i], *image) > 0)) multi_marker_bundles[i]->Update(marker_detector.markers, cam, bundlePoses[i]); } } diff --git a/ar_track_alvar/nodes/IndividualMarkers.cpp b/ar_track_alvar/nodes/IndividualMarkers.cpp index 733e051..1c6f2a2 100644 --- a/ar_track_alvar/nodes/IndividualMarkers.cpp +++ b/ar_track_alvar/nodes/IndividualMarkers.cpp @@ -197,7 +197,6 @@ class IndividualMarkers : public rclcpp::Node void InfoCallback (const sensor_msgs::msg::CameraInfo::SharedPtr cam_info) { - RCLCPP_INFO(this->get_logger(),"this executed"); if (!cam->getCamInfo_) { cam->SetCameraInfo(cam_info); @@ -306,104 +305,116 @@ void draw3dPoints(ARCloud::Ptr cloud, string frame, int color, int id, double ra } } - int PlaneFitPoseImprovement(int id, const ARCloud &corners_3D, ARCloud::Ptr selected_points, const ARCloud &cloud, Pose &p) + int PlaneFitPoseImprovement(int id, const ARCloud& corners_3D, + ARCloud::Ptr selected_points, const ARCloud& cloud, + Pose& p) +{ + ata::PlaneFitResult res = ata::fitPlane(selected_points); + gm::msg::PoseStamped pose; + pose.header.stamp = pcl_conversions::fromPCL(cloud.header).stamp; + pose.header.frame_id = cloud.header.frame_id; + pose.pose.position = ata::centroid(*res.inliers); + + draw3dPoints(selected_points, cloud.header.frame_id, 1, id, 0.005); + + // Get 2 points that point forward in marker x direction + int i1, i2; + if (isnan(corners_3D[0].x) || isnan(corners_3D[0].y) || + isnan(corners_3D[0].z) || isnan(corners_3D[3].x) || + isnan(corners_3D[3].y) || isnan(corners_3D[3].z)) { - ata::PlaneFitResult res = ata::fitPlane(selected_points); - gm::msg::PoseStamped pose; - pose.header.stamp = pcl_conversions::fromPCL(cloud.header).stamp; - pose.header.frame_id = cloud.header.frame_id; - pose.pose.position = ata::centroid(*res.inliers); - - draw3dPoints(selected_points, cloud.header.frame_id, 1, id, 0.005); - - //Get 2 points that point forward in marker x direction - int i1,i2; - if(isnan(corners_3D[0].x) || isnan(corners_3D[0].y) || isnan(corners_3D[0].z) || - isnan(corners_3D[3].x) || isnan(corners_3D[3].y) || isnan(corners_3D[3].z)) + if (isnan(corners_3D[1].x) || isnan(corners_3D[1].y) || + isnan(corners_3D[1].z) || isnan(corners_3D[2].x) || + isnan(corners_3D[2].y) || isnan(corners_3D[2].z)) { - - if(isnan(corners_3D[1].x) || isnan(corners_3D[1].y) || isnan(corners_3D[1].z) || - isnan(corners_3D[2].x) || isnan(corners_3D[2].y) || isnan(corners_3D[2].z)) - { - return -1; - } - else - { - i1 = 1; - i2 = 2; - } + return -1; } else { - i1 = 0; - i2 = 3; + i1 = 1; + i2 = 2; } + } + else + { + i1 = 0; + i2 = 3; + } - //Get 2 points the point forward in marker y direction - int i3,i4; - if(isnan(corners_3D[0].x) || isnan(corners_3D[0].y) || isnan(corners_3D[0].z) || - isnan(corners_3D[1].x) || isnan(corners_3D[1].y) || isnan(corners_3D[1].z)) + // Get 2 points the point forward in marker y direction + int i3, i4; + if (isnan(corners_3D[0].x) || isnan(corners_3D[0].y) || + isnan(corners_3D[0].z) || isnan(corners_3D[1].x) || + isnan(corners_3D[1].y) || isnan(corners_3D[1].z)) + { + if (isnan(corners_3D[3].x) || isnan(corners_3D[3].y) || + isnan(corners_3D[3].z) || isnan(corners_3D[2].x) || + isnan(corners_3D[2].y) || isnan(corners_3D[2].z)) { - if(isnan(corners_3D[3].x) || isnan(corners_3D[3].y) || isnan(corners_3D[3].z) || - isnan(corners_3D[2].x) || isnan(corners_3D[2].y) || isnan(corners_3D[2].z)) - { - return -1; - } - else - { - i3 = 2; - i4 = 3; - } + return -1; } else { - i3 = 1; - i4 = 0; + i3 = 2; + i4 = 3; } + } + else + { + i3 = 1; + i4 = 0; + } - ARCloud::Ptr orient_points(new ARCloud()); - orient_points->points.push_back(corners_3D[i1]); - draw3dPoints(orient_points, cloud.header.frame_id, 3, id+1000, 0.008); - - orient_points->clear(); - orient_points->points.push_back(corners_3D[i2]); - draw3dPoints(orient_points, cloud.header.frame_id, 2, id+2000, 0.008); - - int succ; - succ = ata::extractOrientation(res.coeffs, corners_3D[i1], corners_3D[i2], corners_3D[i3], corners_3D[i4], pose.pose.orientation); - if(succ < 0) return -1; - - tf2::Matrix3x3 mat; - succ = ata::extractFrame(res.coeffs, corners_3D[i1], corners_3D[i2], corners_3D[i3], corners_3D[i4], mat); - if(succ < 0) return -1; - - drawArrow(pose.pose.position, mat, cloud.header.frame_id, 1, id); - - p.translation[0] = pose.pose.position.x * 100.0; - p.translation[1] = pose.pose.position.y * 100.0; - p.translation[2] = pose.pose.position.z * 100.0; - p.quaternion[1] = pose.pose.orientation.x; - p.quaternion[2] = pose.pose.orientation.y; - p.quaternion[3] = pose.pose.orientation.z; - p.quaternion[0] = pose.pose.orientation.w; + ARCloud::Ptr orient_points(new ARCloud()); + orient_points->points.push_back(corners_3D[i1]); + draw3dPoints(orient_points, cloud.header.frame_id, 3, id + 1000, 0.008); + + orient_points->clear(); + orient_points->points.push_back(corners_3D[i2]); + draw3dPoints(orient_points, cloud.header.frame_id, 2, id + 2000, 0.008); + + int succ; + succ = ata::extractOrientation(res.coeffs, corners_3D[i1], corners_3D[i2], + corners_3D[i3], corners_3D[i4], + pose.pose.orientation); + if (succ < 0) + return -1; + + tf2::Matrix3x3 mat; + succ = ata::extractFrame(res.coeffs, corners_3D[i1], corners_3D[i2], + corners_3D[i3], corners_3D[i4], mat); + if (succ < 0) + return -1; + + drawArrow(pose.pose.position, mat, cloud.header.frame_id, 1, id); + + p.translation[0] = pose.pose.position.x * 100.0; + p.translation[1] = pose.pose.position.y * 100.0; + p.translation[2] = pose.pose.position.z * 100.0; + p.quaternion[1] = pose.pose.orientation.x; + p.quaternion[2] = pose.pose.orientation.y; + p.quaternion[3] = pose.pose.orientation.z; + p.quaternion[0] = pose.pose.orientation.w; - return 0; - } + return 0; +} void GetMarkerPoses(cv::Mat * image, ARCloud &cloud) { //Detect and track the markers - if (marker_detector.Detect(image, cam, true, false, max_new_marker_error, max_track_error, CVSEQ, true)) + if (marker_detector.Detect(*image, cam, true, false, max_new_marker_error, max_track_error, CVSEQ, true)) { - std::cout << "--------------------------" << std::endl; + RCLCPP_INFO(this->get_logger(), "-----------------------------------"); for (size_t i=0; isize(); i++) { vector > pixels; Marker *m = &((*marker_detector.markers)[i]); int id = m->GetId(); - std::cout<< "******* ID: " << id << std::endl; + + RCLCPP_INFO(this->get_logger(), "******* ID: %d",id); + int resol = m->GetRes(); int ori = m->ros_orientation; diff --git a/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp b/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp index 3fbdc30..c470b4f 100644 --- a/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp +++ b/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp @@ -208,7 +208,7 @@ class IndividualMarkersNoKinect : public rclcpp::Node void getCapCallback (const sensor_msgs::msg::Image::SharedPtr image_msg) { //If we've already gotten the cam info, then go ahead - if(cam->getCamInfo_){ + if(cam->getCamInfo_){ try { geometry_msgs::msg::TransformStamped CamToOutput; @@ -230,23 +230,31 @@ class IndividualMarkersNoKinect : public rclcpp::Node // do this conversion here -jbinney cv::Mat ipl_image = cv_ptr_->image; - marker_detector.Detect(&ipl_image, cam, true, true, max_new_marker_error, max_track_error, CVSEQ, true); - arPoseMarkers_.markers.clear (); - RCLCPP_WARN(this->get_logger(),"Markers detected %d",marker_detector.markers->size()); + marker_detector.Detect(cv_ptr_->image, cam, true, false, + max_new_marker_error, max_track_error, CVSEQ, + true); + arPoseMarkers_.markers.clear(); + + + //marker_detector.Detect(ipl_image, cam, true, true, max_new_marker_error, max_track_error, CVSEQ, true); + arPoseMarkers_.markers.clear (); for (size_t i=0; isize(); i++) { - //Get the pose relative to the camera - int id = (*(marker_detector.markers))[i].GetId(); - Pose p = (*(marker_detector.markers))[i].pose; - double px = p.translation[0]/100.0; - double py = p.translation[1]/100.0; - double pz = p.translation[2]/100.0; - double qx = p.quaternion[1]; - double qy = p.quaternion[2]; - double qz = p.quaternion[3]; - double qw = p.quaternion[0]; + // Get the pose relative to the camera + int id = (*(marker_detector.markers))[i].GetId(); + Pose p = (*(marker_detector.markers))[i].pose; + double px = p.translation[0] / 100.0; + double py = p.translation[1] / 100.0; + double pz = p.translation[2] / 100.0; + + cv::Mat quat =cv::Mat(4, 1, CV_64F); + p.GetQuaternion(quat); + double qx = quat.at(1,0); //p.quaternion[1]; #leaving these for the record, this was a bug in the original repo + double qy = quat.at(2,0); //p.quaternion[2]; + double qz = quat.at(3,0); //p.quaternion[3]; + double qw = quat.at(0,0); //p.quaternion[0]; tf2::Quaternion rotation (qx,qy,qz,qw); tf2::Vector3 origin (px,py,pz); diff --git a/ar_track_alvar/nodes/TrainMarkerBundle.cpp b/ar_track_alvar/nodes/TrainMarkerBundle.cpp index c2cde86..3e1c3fb 100644 --- a/ar_track_alvar/nodes/TrainMarkerBundle.cpp +++ b/ar_track_alvar/nodes/TrainMarkerBundle.cpp @@ -245,14 +245,14 @@ class TrainMarkerBundle : public rclcpp::Node double error = -1; if (!optimize_done) { - if (marker_detector.Detect(image, cam, true, false, max_new_marker_error, max_track_error, CVSEQ, true)) { + if (marker_detector.Detect(*image, cam, true, false, max_new_marker_error, max_track_error, CVSEQ, true)) { error = multi_marker_init->Update(marker_detector.markers, cam, pose); } } else { - if (marker_detector.Detect(image, cam, true, false, max_new_marker_error, max_track_error, CVSEQ, true)) { + if (marker_detector.Detect(*image, cam, true, false, max_new_marker_error, max_track_error, CVSEQ, true)) { error = multi_marker_bundle->Update(marker_detector.markers, cam, pose); - if ((multi_marker_bundle->SetTrackMarkers(marker_detector, cam, pose, image) > 0) && (marker_detector.DetectAdditional(image, cam, false) > 0)) + if ((multi_marker_bundle->SetTrackMarkers(marker_detector, cam, pose, *image) > 0) && (marker_detector.DetectAdditional(*image, cam, false) > 0)) { error = multi_marker_bundle->Update(marker_detector.markers, cam, pose); } diff --git a/ar_track_alvar/src/Alvar.cpp b/ar_track_alvar/src/Alvar.cpp index fbf2433..5d30cbf 100644 --- a/ar_track_alvar/src/Alvar.cpp +++ b/ar_track_alvar/src/Alvar.cpp @@ -25,21 +25,27 @@ #include -namespace alvar { - +namespace alvar +{ void alvarInfo() { - std::cerr << "ALVAR " << ALVAR_VERSION << " - A Library for Virtual and Augmented Reality" << std::endl; - std::cerr << "Copyright 2007-2012 VTT Technical Research Centre of Finland" << std::endl; - std::cerr << "Licensed under the GNU Lesser General Public License" << std::endl; - std::cerr << "Built on " << ALVAR_DATE << " for " << ALVAR_SYSTEM << std::endl; - std::cerr << std::endl; + std::cerr << "ALVAR " << ALVAR_VERSION + << " - A Library for Virtual and Augmented Reality" << std::endl; + std::cerr << "Copyright 2007-2012 VTT Technical Research Centre of Finland" + << std::endl; + std::cerr << "Licensed under the GNU Lesser General Public License" + << std::endl; + std::cerr << "Built on " << ALVAR_DATE << " for " << ALVAR_SYSTEM + << std::endl; + std::cerr << std::endl; } -struct AlvarLoader { - AlvarLoader() { - alvarInfo(); - } +struct AlvarLoader +{ + AlvarLoader() + { + alvarInfo(); + } } alvarBasicLoader; -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/Bitset.cpp b/ar_track_alvar/src/Bitset.cpp index be6c55c..da5a615 100644 --- a/ar_track_alvar/src/Bitset.cpp +++ b/ar_track_alvar/src/Bitset.cpp @@ -25,306 +25,408 @@ using namespace std; -namespace alvar { +namespace alvar +{ using namespace std; -int Bitset::Length() { - return bits.size(); +int Bitset::Length() +{ + return bits.size(); +} +ostream& Bitset::Output(ostream& os) const +{ + deque::const_iterator iter = bits.begin(); + while (iter != bits.end()) + { + if (*iter) + os << "1"; + else + os << "0"; + iter++; + } + return os; +} +void Bitset::clear() +{ + bits.clear(); } -ostream &Bitset::Output(ostream &os) const { - deque::const_iterator iter=bits.begin(); - while (iter != bits.end()) { - if (*iter) os<<"1"; - else os<<"0"; - iter++; - } - return os; +void Bitset::push_back(const bool bit) +{ + bits.push_back(bit); } -void Bitset::clear() { bits.clear(); } -void Bitset::push_back(const bool bit) { bits.push_back(bit); } -void Bitset::push_back(const unsigned char b, int bit_count /*=8*/) { - push_back((const unsigned long)b, bit_count); +void Bitset::push_back(const unsigned char b, int bit_count /*=8*/) +{ + push_back((const unsigned long)b, bit_count); } -void Bitset::push_back(const unsigned short s, int bit_count /*=16*/) { - push_back((const unsigned long)s, bit_count); +void Bitset::push_back(const unsigned short s, int bit_count /*=16*/) +{ + push_back((const unsigned long)s, bit_count); } -void Bitset::push_back(const unsigned long l, int bit_count /*=32*/) { - unsigned long mask; - if ((bit_count > 32) || (bit_count == 0)) bit_count=32; - mask = 0x01<<(bit_count-1); - for (int i=0; i>=1; - } +void Bitset::push_back(const unsigned long l, int bit_count /*=32*/) +{ + unsigned long mask; + if ((bit_count > 32) || (bit_count == 0)) + bit_count = 32; + mask = 0x01 << (bit_count - 1); + for (int i = 0; i < bit_count; i++) + { + if (l & mask) + push_back(true); + else + push_back(false); + mask >>= 1; + } } -void Bitset::push_back_meaningful(const unsigned long l) { - int bit_count = 1; - for (int i=0; i<32; i++) { - unsigned long mask = 0x01< 0x08) bitpos >>= 4; - for (size_t i=0; i < bits.size(); i++) { - if (bits[i]) b = b | bitpos; - else b = b & (0x0f ^ bitpos); - bitpos >>= 1; - if (bitpos == 0x00) { - bitpos = 0x08; - ss << b; - } - } - return ss.str(); + stringstream ss; + ss.unsetf(std::ios_base::dec); + ss.setf(std::ios_base::hex); + unsigned long b = 0; + int bitpos = (0x08 << (bits.size() % 4)); + if (bitpos > 0x08) + bitpos >>= 4; + for (size_t i = 0; i < bits.size(); i++) + { + if (bits[i]) + b = b | bitpos; + else + b = b & (0x0f ^ bitpos); + bitpos >>= 1; + if (bitpos == 0x00) + { + bitpos = 0x08; + ss << b; + } + } + return ss.str(); } unsigned long Bitset::ulong() { - //if(bits.size() > (sizeof(unsigned long)*8)) - // throw "code too big for unsigned long\n"; - stringstream ss; - ss << setbase(16) << hex(); - unsigned long v; - ss >> v; - return v; + // if(bits.size() > (sizeof(unsigned long)*8)) + // throw "code too big for unsigned long\n"; + stringstream ss; + ss << setbase(16) << hex(); + unsigned long v; + ss >> v; + return v; } unsigned char Bitset::uchar() { - //if(bits.size() > (sizeof(unsigned char)*8)) - // throw "code too big for unsigned char\n"; - stringstream ss; - ss << setbase(16) << hex(); - unsigned long v; //ttehop: Note, that this cannot be char - ss >> v; - return (unsigned char)v; + // if(bits.size() > (sizeof(unsigned char)*8)) + // throw "code too big for unsigned char\n"; + stringstream ss; + ss << setbase(16) << hex(); + unsigned long v; // ttehop: Note, that this cannot be char + ss >> v; + return (unsigned char)v; } -void BitsetExt::hamming_enc_block(unsigned long block_len, deque::iterator &iter) { - if (verbose) cout<<"hamming_enc_block: "; - unsigned long next_parity=1; - for (unsigned long i=1; i<=block_len; i++) { - // Add a parity bit if this a place for such - if (i == next_parity) { - if (verbose) cout<<"p"; - next_parity <<= 1; - iter = bits.insert(iter, false); - } - // Otherwise if this bit is 1 change all related parity bits - else { - if (iter == bits.end()) { - block_len = i-1; - break; - } - if (verbose) cout<<(*iter?1:0); - if (*iter) { - unsigned long parity = next_parity>>1; - while (parity) { - if (i & parity) { - deque::iterator parity_iter=(iter - (i - parity)); - *parity_iter = !*parity_iter; - } - parity >>= 1; - } - } - } - iter++; - } - // Update the last parity bit if we have one - // Note, that the last parity bit can safely be removed from the code if it is not desired... - if (block_len == (next_parity >> 1)) { - // If the last bit is parity bit - make parity over the previous data - for (unsigned long ii=1; ii "; - for (unsigned long ii=block_len; ii>=1; ii--) { - cout<<(*(iter-ii)?1:0); - } - cout<<" block_len: "<::iterator& iter) +{ + if (verbose) + cout << "hamming_enc_block: "; + unsigned long next_parity = 1; + for (unsigned long i = 1; i <= block_len; i++) + { + // Add a parity bit if this a place for such + if (i == next_parity) + { + if (verbose) + cout << "p"; + next_parity <<= 1; + iter = bits.insert(iter, false); + } + // Otherwise if this bit is 1 change all related parity bits + else + { + if (iter == bits.end()) + { + block_len = i - 1; + break; + } + if (verbose) + cout << (*iter ? 1 : 0); + if (*iter) + { + unsigned long parity = next_parity >> 1; + while (parity) + { + if (i & parity) + { + deque::iterator parity_iter = (iter - (i - parity)); + *parity_iter = !*parity_iter; + } + parity >>= 1; + } + } + } + iter++; + } + // Update the last parity bit if we have one + // Note, that the last parity bit can safely be removed from the code if it is + // not desired... + if (block_len == (next_parity >> 1)) + { + // If the last bit is parity bit - make parity over the previous data + for (unsigned long ii = 1; ii < block_len; ii++) + { + if (*(iter - ii - 1)) + *(iter - 1) = !*(iter - 1); + } + } + if (verbose) + { + cout << " -> "; + for (unsigned long ii = block_len; ii >= 1; ii--) + { + cout << (*(iter - ii) ? 1 : 0); + } + cout << " block_len: " << block_len << endl; + } } -int BitsetExt::hamming_dec_block(unsigned long block_len, deque::iterator &iter) { - if (verbose) cout<<"hamming_dec_block: "; - bool potentially_double_error = false; - unsigned long total_parity=0; - unsigned long parity=0; - unsigned long next_parity=1; - for (unsigned long i=1; i<=block_len; i++) { - if (iter == bits.end()) { - // ttehop: - // At 3.12.2009 I changed the following line because - // it crashed with 7x7 markers. However, I didn't fully - // understand the reason why it should be so. Lets - // give more thought to it when we have more time. - // old version: block_len = i-1; - block_len = i; - break; - } - if (*iter) { - parity = parity ^ i; - total_parity = total_parity ^ 1; - } - if (i == next_parity) { - if (verbose) cout<<"("<<*iter<<")"; - next_parity <<= 1; - iter = bits.erase(iter); - } else { - if (verbose) cout<<*iter; - iter++; - } - } - if (block_len < 3) { - if (verbose) cout<<" too short"<> 1)) { - parity = parity & ~(next_parity >> 1); // The last parity bit shouldn't be included in the other parity tests (TODO: Better solution) - if (total_parity == 0) { - potentially_double_error = true; - } - } - int steps=0; - if (verbose) cout<<" parity: "<= parity) { - steps++; - } - } - iter[-steps] = !iter[-steps]; - if (verbose) cout<<" corrected"<::iterator& iter) +{ + if (verbose) + cout << "hamming_dec_block: "; + bool potentially_double_error = false; + unsigned long total_parity = 0; + unsigned long parity = 0; + unsigned long next_parity = 1; + for (unsigned long i = 1; i <= block_len; i++) + { + if (iter == bits.end()) + { + // ttehop: + // At 3.12.2009 I changed the following line because + // it crashed with 7x7 markers. However, I didn't fully + // understand the reason why it should be so. Lets + // give more thought to it when we have more time. + // old version: block_len = i-1; + block_len = i; + break; + } + if (*iter) + { + parity = parity ^ i; + total_parity = total_parity ^ 1; + } + if (i == next_parity) + { + if (verbose) + cout << "(" << *iter << ")"; + next_parity <<= 1; + iter = bits.erase(iter); + } + else + { + if (verbose) + cout << *iter; + iter++; + } + } + if (block_len < 3) + { + if (verbose) + cout << " too short" << endl; + return 0; + } + if (block_len == (next_parity >> 1)) + { + parity = parity & ~(next_parity >> 1); // The last parity bit shouldn't be + // included in the other parity + // tests (TODO: Better solution) + if (total_parity == 0) + { + potentially_double_error = true; + } + } + int steps = 0; + if (verbose) + cout << " parity: " << parity; + if (parity) + { + if (potentially_double_error) + { + if (verbose) + cout << " double error" << endl; + return -1; + } + next_parity = 1; + for (unsigned long i = 1; i <= block_len; i++) + { + if (i == next_parity) + { + next_parity <<= 1; + if (i == parity) + { + if (verbose) + cout << " parity bit error" << endl; + return 1; // Only parity bit was erroneous + } + } + else if (i >= parity) + { + steps++; + } + } + iter[-steps] = !iter[-steps]; + if (verbose) + cout << " corrected" << endl; + return 1; + } + if (verbose) + cout << " ok" << endl; + return 0; } -BitsetExt::BitsetExt() { - SetVerbose(false); +BitsetExt::BitsetExt() +{ + SetVerbose(false); } -BitsetExt::BitsetExt(bool _verbose) { - SetVerbose(_verbose); +BitsetExt::BitsetExt(bool _verbose) +{ + SetVerbose(_verbose); } /* void BitsetExt::str_8to6bit() { - // TODO: Assume that the bitset contains 8-bit ASCII chars and change them into 6-bit chars + // TODO: Assume that the bitset contains 8-bit ASCII chars and change them +into 6-bit chars } void BitsetExt::str_6to8bit() { - // TODO: Assume that the bitset contains 6-bit chars and change them into 8-bit ASCII chars + // TODO: Assume that the bitset contains 6-bit chars and change them into +8-bit ASCII chars } void BitsetExt::crc_enc(int crc_len) { - // TODO: Add crc_len-bit checksum for the bitset + // TODO: Add crc_len-bit checksum for the bitset } bool BitsetExt::crc_dec(int crc_len) { - // TODO: Check the CRC - return false; + // TODO: Check the CRC + return false; } */ -void BitsetExt::SetVerbose(bool _verbose) { - verbose = _verbose; +void BitsetExt::SetVerbose(bool _verbose) +{ + verbose = _verbose; } -int BitsetExt::count_hamming_enc_len(int block_len, int dec_len) { - int parity_len=0; - int dec_len_count = dec_len; - while (dec_len_count > 0) { - unsigned long next_parity = 1; - for (unsigned long i=1; i<=(unsigned long)block_len; i++) { - if (i == next_parity) { - parity_len++; - next_parity <<= 1; - } else { - dec_len_count--; - } - if (dec_len_count == 0) break; - } - } - return dec_len + parity_len; +int BitsetExt::count_hamming_enc_len(int block_len, int dec_len) +{ + int parity_len = 0; + int dec_len_count = dec_len; + while (dec_len_count > 0) + { + unsigned long next_parity = 1; + for (unsigned long i = 1; i <= (unsigned long)block_len; i++) + { + if (i == next_parity) + { + parity_len++; + next_parity <<= 1; + } + else + { + dec_len_count--; + } + if (dec_len_count == 0) + break; + } + } + return dec_len + parity_len; } -int BitsetExt::count_hamming_dec_len(int block_len, int enc_len) { - int parity_len=0; - int enc_len_count = enc_len; - while (enc_len_count > 0) { - unsigned long next_parity = 1; - unsigned long i; - for (i=1; i<=(unsigned long)block_len; i++) { - if (i == next_parity) { - parity_len++; - next_parity <<= 1; - } - enc_len_count--; - if (enc_len_count == 0) break; - } - } - return enc_len - parity_len; +int BitsetExt::count_hamming_dec_len(int block_len, int enc_len) +{ + int parity_len = 0; + int enc_len_count = enc_len; + while (enc_len_count > 0) + { + unsigned long next_parity = 1; + unsigned long i; + for (i = 1; i <= (unsigned long)block_len; i++) + { + if (i == next_parity) + { + parity_len++; + next_parity <<= 1; + } + enc_len_count--; + if (enc_len_count == 0) + break; + } + } + return enc_len - parity_len; } -void BitsetExt::hamming_enc(int block_len) { - deque::iterator iter=bits.begin(); - while (iter != bits.end()) { - hamming_enc_block(block_len, iter); - } +void BitsetExt::hamming_enc(int block_len) +{ + deque::iterator iter = bits.begin(); + while (iter != bits.end()) + { + hamming_enc_block(block_len, iter); + } } // Returns number of corrected errors (or -1 if there were unrecoverable error) -int BitsetExt::hamming_dec(int block_len) { - int error_count=0; - deque::iterator iter=bits.begin(); - while (iter != bits.end()) { - int error=hamming_dec_block(block_len, iter); - if ((error == -1) || (error_count == -1)) error_count=-1; - else error_count += error; - } - return error_count; +int BitsetExt::hamming_dec(int block_len) +{ + int error_count = 0; + deque::iterator iter = bits.begin(); + while (iter != bits.end()) + { + int error = hamming_dec_block(block_len, iter); + if ((error == -1) || (error_count == -1)) + error_count = -1; + else + error_count += error; + } + return error_count; } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/Camera.cpp b/ar_track_alvar/src/Camera.cpp index 723ebf9..e6426b6 100644 --- a/ar_track_alvar/src/Camera.cpp +++ b/ar_track_alvar/src/Camera.cpp @@ -25,930 +25,872 @@ #include "ar_track_alvar/Camera.h" #include "ar_track_alvar/FileFormatUtils.h" #include -#include "rclcpp/rclcpp.hpp" -#include +#include #include -using std::placeholders::_1; using namespace std; -namespace alvar { +namespace alvar +{ using namespace std; - -void ProjPoints::Reset() { - object_points.clear(); - image_points.clear(); - point_counts.clear(); +void ProjPoints::Reset() +{ + object_points.clear(); + image_points.clear(); + point_counts.clear(); } // TODO: Does it matter what the etalon_square_size is??? -bool ProjPoints::AddPointsUsingChessboard(cv::Mat *image, double etalon_square_size, int etalon_rows, int etalon_columns, bool visualize) { - if (image->rows == 0) return false; - //IplImage *gray = cvCreateImage(cvSize(image->width, image->height), IPL_DEPTH_8U, 1); - - - cv::Mat gray = cv::Mat(cv::Size(image->rows, image->cols), CV_MAKETYPE(CV_8U,1)); - - - // CvPoint2D32f *corners = new CvPoint2D32f[etalon_rows*etalon_columns]; - - cv::Size patternsize(etalon_rows,etalon_columns); - vector centers; - - if (image->channels() == 1) - gray = image->clone(); - else - cv::cvtColor(*image, gray, cv::COLOR_BGR2GRAY); - width = image->rows; - height = image->cols; - - int point_count = 0; - //int pattern_was_found = cvFindChessboardCorners(gray, cvSize(etalon_rows, etalon_columns), corners, &point_count); - - bool pattern_was_found = findChessboardCorners(gray,patternsize,centers, cv::CALIB_CB_ADAPTIVE_THRESH); - point_count = centers.size(); - - - if (!pattern_was_found) point_count=0; - - - if (point_count > 0) { - - cv::TermCriteria criteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 30, 0.001); - cv::cornerSubPix(gray,centers,cv::Size(5,5), cv::Size(-1,-1),criteria); - - // Displaying the detected corner points on the checker board - if(visualize) - { - cv::drawChessboardCorners(*image, cv::Size(etalon_rows, etalon_columns), centers, pattern_was_found); - } - - for (int i=0; i 0) return true; - return false; -} - -bool ProjPoints::AddPointsUsingMarkers(vector &marker_corners, vector &marker_corners_img, cv::Mat* image) -{ - width = image->rows; - height = image->cols; - if ((marker_corners.size() == marker_corners_img.size()) && - (marker_corners.size() == 4)) - { - for (size_t p=0; p corners; + if (image.channels() == 1) + image.copyTo(gray); + else + cv::cvtColor(image, gray, cv::COLOR_RGB2GRAY); + + width = image.cols; + height = image.rows; + + std::size_t point_count = 0; + + int pattern_was_found = cv::findChessboardCorners( + gray, cv::Size(etalon_rows, etalon_columns), corners); + point_count = !pattern_was_found ? 0 : corners.size(); + if (point_count > 0) + { + cv::cornerSubPix( + gray, corners, cv::Size(5, 5), cv::Size(-1, -1), + cv::TermCriteria(cv::TermCriteria::MAX_ITER | cv::TermCriteria::EPS, 10, + 0.01f)); + for (int i = 0; i < point_count; i++) + { + cv::Point3d po; + cv::Point2d pi; + po.x = etalon_square_size * (i % etalon_rows); + po.y = etalon_square_size * (i / etalon_rows); + po.z = 0; + pi.x = corners[i].x; + pi.y = corners[i].y; + object_points.push_back(po); + image_points.push_back(pi); + } + point_counts.push_back(point_count); + } + if (visualize) + { + cv::drawChessboardCorners(image, cv::Size(etalon_rows, etalon_columns), + corners, false); + } + corners.clear(); + gray.release(); + if (point_count > 0) + return true; + return false; +} + +bool ProjPoints::AddPointsUsingMarkers(vector& marker_corners, + vector& marker_corners_img, + cv::Mat& image) +{ + width = image.cols; + height = image.rows; + if ((marker_corners.size() == marker_corners_img.size()) && + (marker_corners.size() == 4)) + { + for (size_t p = 0; p < marker_corners.size(); p++) + { + cv::Point3d po; + cv::Point2d pi; + po.x = marker_corners[p].x; + po.y = marker_corners[p].y; + po.z = 0; + pi.x = marker_corners_img[p].x; + pi.y = marker_corners_img[p].y; + object_points.push_back(po); + image_points.push_back(pi); + } + point_counts.push_back(marker_corners.size()); + } + + return true; +} + +Camera::Camera() +{ + calib_K = cv::Mat(3, 3, CV_64F, calib_K_data); + calib_D = cv::Mat(4, 1, CV_64F, calib_D_data); + memset(calib_K_data, 0, sizeof(double) * 3 * 3); + memset(calib_D_data, 0, sizeof(double) * 4); + calib_K_data[0][0] = 550; // Just some focal length by default + calib_K_data[1][1] = 550; // Just some focal length by default + calib_K_data[0][2] = 320; + calib_K_data[1][2] = 240; + calib_K_data[2][2] = 1; + calib_x_res = 640; + calib_y_res = 480; + x_res = 640; + y_res = 480; + getCamInfo_ = false; +} + +// Camera::Camera(ros::NodeHandle& n, std::string cam_info_topic) : n_(n) // { -// RCLCPP_INFO(this->get_logger(),"Subscribing to info topic"); -// calib_K = cvMat(3, 3, CV_64F, calib_K_data); -// calib_D = cvMat(4, 1, CV_64F, calib_D_data); -// memset(calib_K_data,0,sizeof(double)*3*3); -// memset(calib_D_data,0,sizeof(double)*4); -// calib_K_data[0][0] = 550; // Just some focal length by default -// calib_K_data[1][1] = 550; // Just some focal length by default -// calib_K_data[0][2] = 320; -// calib_K_data[1][2] = 240; -// calib_K_data[2][2] = 1; -// calib_x_res = 640; -// calib_y_res = 480; -// x_res = 640; -// y_res = 480; -// cameraInfoTopic_ = cam_info_topic; - -// RCLCPP_INFO(this->get_logger(),"Subscribing to info topic"); -// sub_ = this->create_subscription(cameraInfoTopic_, 1,std::bind(&Camera::camInfoCallback, this, _1)); -// getCamInfo_ = false; +// calib_K = cv::Mat(3, 3, CV_64F, calib_K_data); +// calib_D = cv::Mat(4, 1, CV_64F, calib_D_data); +// memset(calib_K_data, 0, sizeof(double) * 3 * 3); +// memset(calib_D_data, 0, sizeof(double) * 4); +// calib_K_data[0][0] = 550; // Just some focal length by default +// calib_K_data[1][1] = 550; // Just some focal length by default +// calib_K_data[0][2] = 320; +// calib_K_data[1][2] = 240; +// calib_K_data[2][2] = 1; +// calib_x_res = 640; +// calib_y_res = 480; +// x_res = 640; +// y_res = 480; +// cameraInfoTopic_ = cam_info_topic; +// ROS_INFO("Subscribing to info topic"); +// sub_ = n_.subscribe(cameraInfoTopic_, 1, &Camera::camInfoCallback, this); +// getCamInfo_ = false; // } +// +// Camera::Camera(int w, int h) { +// calib_K = cvMat(3, 3, CV_64F, calib_K_data); +// calib_D = cvMat(4, 1, CV_64F, calib_D_data); +// memset(calib_K_data,0,sizeof(double)*3*3); +// memset(calib_D_data,0,sizeof(double)*4); +// calib_K_data[0][0] = w/2; +// calib_K_data[1][1] = w/2; +// calib_K_data[0][2] = w/2; +// calib_K_data[1][2] = h/2; +// calib_K_data[2][2] = 1; +// calib_x_res = w; +// calib_y_res = h; +// x_res = w; +// y_res = h; +//} void Camera::SetSimpleCalib(int _x_res, int _y_res, double f_fac) { - memset(calib_K_data,0,sizeof(double)*3*3); - memset(calib_D_data,0,sizeof(double)*4); - calib_K_data[0][0] = _x_res*f_fac; // Just some focal length by default - calib_K_data[1][1] = _x_res*f_fac; // Just some focal length by default - calib_K_data[0][2] = _x_res/2; - calib_K_data[1][2] = _y_res/2; - calib_K_data[2][2] = 1; - calib_x_res = _x_res; - calib_y_res = _y_res; -} - -bool Camera::LoadCalibXML(const char *calibfile) { - tinyxml2::XMLDocument document; - if (!document.LoadFile(calibfile)) return false; - tinyxml2::XMLElement *xml_root = document.RootElement(); - - return - xml_root->QueryIntAttribute("width", &calib_x_res) == EXIT_SUCCESS && - xml_root->QueryIntAttribute("height", &calib_y_res) == EXIT_SUCCESS && - FileFormatUtils::parseXMLMatrix(xml_root->FirstChildElement("intrinsic_matrix"), &calib_K) && - FileFormatUtils::parseXMLMatrix(xml_root->FirstChildElement("distortion"), &calib_D); -} - -bool Camera::LoadCalibOpenCV(const char *calibfile) { - cvSetErrMode(CV_ErrModeSilent); - // CvFileStorage* fs = cvOpenFileStorage(calibfile, 0, CV_STORAGE_READ); - - cv::FileStorage cvfs(calibfile, cv::FileStorage::READ); // CV_STORAGE_READ); - - /* read data from file */ - - cvSetErrMode(CV_ErrModeLeaf); - if(cvfs.isOpened()){ - //CvFileNode* root_node = cvGetRootFileNode(fs); - - cv::FileNode root_node = cvfs.getFirstTopLevelNode(); - // K Intrinsic - - //cv::FileNode intrinsic_mat_node = root_node[std::string("intrinsic_matrix")]; - - - //CvFileNode* intrinsic_mat_node = cvGetFileNodeByName(fs, root_node, "intrinsic_matrix"); - - cv::Mat intrinsic_mat; - cvfs["intrinsic_matrix"] >> intrinsic_mat; - - CvMat intrinsic_mat2 = cvMat(intrinsic_mat); - //= intrinsic_mat_node.read(); //(cvRead(fs, intrinsic_mat_node)); - // reinterpret_cast - cvmSet(&calib_K, 0, 0, cvmGet(&intrinsic_mat2, 0, 0)); - cvmSet(&calib_K, 0, 1, cvmGet(&intrinsic_mat2, 0, 1)); - cvmSet(&calib_K, 0, 2, cvmGet(&intrinsic_mat2, 0, 2)); - cvmSet(&calib_K, 1, 0, cvmGet(&intrinsic_mat2, 1, 0)); - cvmSet(&calib_K, 1, 1, cvmGet(&intrinsic_mat2, 1, 1)); - cvmSet(&calib_K, 1, 2, cvmGet(&intrinsic_mat2, 1, 2)); - cvmSet(&calib_K, 2, 0, cvmGet(&intrinsic_mat2, 2, 0)); - cvmSet(&calib_K, 2, 1, cvmGet(&intrinsic_mat2, 2, 1)); - cvmSet(&calib_K, 2, 2, cvmGet(&intrinsic_mat2, 2, 2)); - - - //CvFileNode* dist_mat_node = cvGetFileNodeByName(fs, root_node, "distortion"); - - // D Distortion - cv::Mat dist_mat; - cvfs["distortion"] >> dist_mat; - CvMat dist_mat2 = cvMat(dist_mat); - - - //reinterpret_cast(cvRead(fs, dist_mat_node)); - cvmSet(&calib_D, 0, 0, cvmGet(&dist_mat2, 0, 0)); - cvmSet(&calib_D, 1, 0, cvmGet(&dist_mat2, 1, 0)); - cvmSet(&calib_D, 2, 0, cvmGet(&dist_mat2, 2, 0)); - cvmSet(&calib_D, 3, 0, cvmGet(&dist_mat2, 3, 0)); - - // Resolution - // cv::FileNode width_node = root_node["width"]; - // cv::FileNode height_node = root_node["height"]; - // calib_x_res = width_node->data.i; - // calib_y_res = height_node->data.i; - - cvfs["width"] >> calib_x_res; - cvfs["height"] >> calib_y_res; - //cvReleaseFileStorage(&fs); - return true; - } - // reset error status - cvSetErrStatus(CV_StsOk); - return false; + memset(calib_K_data, 0, sizeof(double) * 3 * 3); + memset(calib_D_data, 0, sizeof(double) * 4); + calib_K_data[0][0] = _x_res * f_fac; // Just some focal length by default + calib_K_data[1][1] = _x_res * f_fac; // Just some focal length by default + calib_K_data[0][2] = _x_res / 2; + calib_K_data[1][2] = _y_res / 2; + calib_K_data[2][2] = 1; + calib_x_res = _x_res; + calib_y_res = _y_res; +} + +bool Camera::LoadCalibXML(const char* calibfile) +{ + TiXmlDocument document; + if (!document.LoadFile(calibfile)) + return false; + TiXmlElement* xml_root = document.RootElement(); + + return xml_root->QueryIntAttribute("width", &calib_x_res) == TIXML_SUCCESS && + xml_root->QueryIntAttribute("height", &calib_y_res) == TIXML_SUCCESS && + FileFormatUtils::parseXMLMatrix(xml_root->FirstChildElement("intrinsic" + "_matrix"), + calib_K) && + FileFormatUtils::parseXMLMatrix(xml_root->FirstChildElement("distortio" + "n"), + calib_D); +} + +bool Camera::LoadCalibOpenCV(const char* calibfile) +{ + cv::FileStorage fs; + bool success = fs.open(calibfile, cv::FileStorage::READ); + + if (success) + { + // K Intrinsic + cv::FileNode intrinsic_mat_node = fs["intrinsic_matrix"]; + cv::Mat intrinsic_mat; + cv::read(intrinsic_mat_node, intrinsic_mat); + calib_K.at(0, 0) = intrinsic_mat.at(0, 0); + calib_K.at(0, 1) = intrinsic_mat.at(0, 1); + calib_K.at(0, 2) = intrinsic_mat.at(0, 2); + calib_K.at(1, 0) = intrinsic_mat.at(1, 0); + calib_K.at(1, 1) = intrinsic_mat.at(1, 1); + calib_K.at(1, 2) = intrinsic_mat.at(1, 2); + calib_K.at(2, 0) = intrinsic_mat.at(2, 0); + calib_K.at(2, 1) = intrinsic_mat.at(2, 1); + calib_K.at(2, 2) = intrinsic_mat.at(2, 2); + + // D Distortion + cv::FileNode dist_mat_node = fs["distortion"]; + cv::Mat dist_mat; + cv::read(dist_mat_node, dist_mat); + calib_D.at(0, 0) = dist_mat.at(0, 0); + calib_D.at(1, 0) = dist_mat.at(1, 0); + calib_D.at(2, 0) = dist_mat.at(2, 0); + calib_D.at(3, 0) = dist_mat.at(3, 0); + + // Resolution + cv::FileNode width_node = fs["width"]; + cv::FileNode height_node = fs["height"]; + cv::read(width_node, calib_x_res, 0); + cv::read(height_node, calib_y_res, 0); + fs.release(); + return true; + } + return false; } void Camera::SetCameraInfo(const sensor_msgs::msg::CameraInfo::SharedPtr cam_info) { - //cam_info_ = *cam_info; - - calib_x_res = cam_info->width; - calib_y_res = cam_info->height; - x_res = calib_x_res; - y_res = calib_y_res; - - cvmSet(&calib_K, 0, 0, cam_info_.k[0]); - cvmSet(&calib_K, 0, 1, cam_info_.k[1]); - cvmSet(&calib_K, 0, 2, cam_info_.k[2]); - cvmSet(&calib_K, 1, 0, cam_info_.k[3]); - cvmSet(&calib_K, 1, 1, cam_info_.k[4]); - cvmSet(&calib_K, 1, 2, cam_info_.k[5]); - cvmSet(&calib_K, 2, 0, cam_info_.k[6]); - cvmSet(&calib_K, 2, 1, cam_info_.k[7]); - cvmSet(&calib_K, 2, 2, cam_info_.k[8]); - - if (cam_info_.d.size() >= 4) { - cvmSet(&calib_D, 0, 0, cam_info_.d[0]); - cvmSet(&calib_D, 1, 0, cam_info_.d[1]); - cvmSet(&calib_D, 2, 0, cam_info_.d[2]); - cvmSet(&calib_D, 3, 0, cam_info_.d[3]); - } else { - cvmSet(&calib_D, 0, 0, 0); - cvmSet(&calib_D, 1, 0, 0); - cvmSet(&calib_D, 2, 0, 0); - cvmSet(&calib_D, 3, 0, 0); + //cam_info_ = cam_info; + + calib_x_res = cam_info_.width; + calib_y_res = cam_info_.height; + x_res = calib_x_res; + y_res = calib_y_res; + + calib_K.at(0, 0) = cam_info_.k[0]; + calib_K.at(0, 1) = cam_info_.k[1]; + calib_K.at(0, 2) = cam_info_.k[2]; + calib_K.at(1, 0) = cam_info_.k[3]; + calib_K.at(1, 1) = cam_info_.k[4]; + calib_K.at(1, 2) = cam_info_.k[5]; + calib_K.at(2, 0) = cam_info_.k[6]; + calib_K.at(2, 1) = cam_info_.k[7]; + calib_K.at(2, 2) = cam_info_.k[8]; + + if (cam_info_.d.size() >= 4) + { + calib_D.at(0, 0) = cam_info_.d[0]; + calib_D.at(1, 0) = cam_info_.d[1]; + calib_D.at(2, 0) = cam_info_.d[2]; + calib_D.at(3, 0) = cam_info_.d[3]; + } + else + { + calib_D.at(0, 0) = 0; + calib_D.at(1, 0) = 0; + calib_D.at(2, 0) = 0; + calib_D.at(3, 0) = 0; + } +} + +// void Camera::camInfoCallback(const sensor_msgs::CameraInfoConstPtr& cam_info) +// { +// if (!getCamInfo_) +// { +// SetCameraInfo(*cam_info); +// getCamInfo_ = true; +// sub_.shutdown(); +// } +// } + +bool Camera::SetCalib(const char* calibfile, int _x_res, int _y_res, + FILE_FORMAT format) +{ + x_res = _x_res; + y_res = _y_res; + if (!calibfile) + return false; + + bool success = false; + switch (format) + { + case FILE_FORMAT_XML: + success = LoadCalibXML(calibfile); + break; + case FILE_FORMAT_OPENCV: + case FILE_FORMAT_DEFAULT: + success = LoadCalibOpenCV(calibfile); + break; + default: + // TODO: throw exception? + break; + }; + + if (success) + { + // Scale matrix in case of different resolution calibration. + // The OpenCV documentation says: + // - If an image from camera is up-sampled/down-sampled by some factor, all + // intrinsic camera parameters + // (fx, fy, cx and cy) should be scaled (multiplied/divided, respectively) + // by the same factor. + // - The distortion coefficients remain the same regardless of the captured + // image resolution. + if ((calib_x_res != x_res) || (calib_y_res != y_res)) + { + calib_K_data[0][0] *= (double(x_res) / double(calib_x_res)); + calib_K_data[0][2] *= (double(x_res) / double(calib_x_res)); + calib_K_data[1][1] *= (double(y_res) / double(calib_y_res)); + calib_K_data[1][2] *= (double(y_res) / double(calib_y_res)); } + } + return success; } -bool Camera::SetCalib(const char *calibfile, int _x_res, int _y_res, FILE_FORMAT format) { - x_res = _x_res; - y_res = _y_res; - if(!calibfile) return false; - - bool success = false; - switch (format) { - case FILE_FORMAT_XML: - success = LoadCalibXML(calibfile); - break; - case FILE_FORMAT_OPENCV: - case FILE_FORMAT_DEFAULT: - success = LoadCalibOpenCV(calibfile); - break; - default: - // TODO: throw exception? - break; - }; - - if (success) { - // Scale matrix in case of different resolution calibration. - // The OpenCV documentation says: - // - If an image from camera is up-sampled/down-sampled by some factor, all intrinsic camera parameters - // (fx, fy, cx and cy) should be scaled (multiplied/divided, respectively) by the same factor. - // - The distortion coefficients remain the same regardless of the captured image resolution. - if ((calib_x_res != x_res) || (calib_y_res != y_res)) { - calib_K_data[0][0] *= (double(x_res)/double(calib_x_res)); - calib_K_data[0][2] *= (double(x_res)/double(calib_x_res)); - calib_K_data[1][1] *= (double(y_res)/double(calib_y_res)); - calib_K_data[1][2] *= (double(y_res)/double(calib_y_res)); - } - } - return success; -} - -bool Camera::SaveCalibXML(const char *calibfile) { - tinyxml2::XMLDocument document; - tinyxml2::XMLDeclaration* declaration=document.NewDeclaration(); - document.InsertFirstChild(declaration); - tinyxml2::XMLElement * temp; - temp->SetName("Camera"); - document.LinkEndChild(temp); - tinyxml2::XMLElement *xml_root = document.RootElement(); - xml_root->SetAttribute("width", calib_x_res); - xml_root->SetAttribute("height", calib_y_res); - xml_root->LinkEndChild(FileFormatUtils::createXMLMatrix("intrinsic_matrix", &calib_K)); - xml_root->LinkEndChild(FileFormatUtils::createXMLMatrix("distortion", &calib_D)); - return document.SaveFile(calibfile); -} - -bool Camera::SaveCalibOpenCV(const char *calibfile) { - cvSetErrMode(CV_ErrModeSilent); - cv::FileStorage cvfs(calibfile, cv::FileStorage::WRITE); - //cvFileStorage* fs = cvOpenFileStorage(calibfile, 0, CV_STORAGE_WRITE); - cvSetErrMode(CV_ErrModeLeaf); - if(cvfs.isOpened()){ - cv::write(cvfs, "intrinsic_matrix", cv::cvarrToMat(&calib_K)); - cv::write(cvfs, "distortion", cv::cvarrToMat(&calib_D)); - cv::write(cvfs, "width", calib_x_res); - cv::write(cvfs, "width", calib_x_res); - // cv::write(cvfs, "fov_x", data.fov_x); - // cv::write(cvfs, "fov_y", data.fov_y); - - - - // cvWrite(fs, "intrinsic_matrix", &calib_K, cvAttrList(0,0)); - // cvWrite(fs, "distortion", &calib_D, cvAttrList(0,0)); - // cvWriteInt(fs, "width", calib_x_res); - // cvWriteInt(fs, "height", calib_y_res); - //cvReleaseFileStorage(&fs); - return true; - } - // reset error status - cvSetErrStatus(CV_StsOk); - return false; -} - -bool Camera::SaveCalib(const char *calibfile, FILE_FORMAT format) { - if(!calibfile) - return false; - - switch (format) { - case FILE_FORMAT_XML: - return SaveCalibXML(calibfile); - case FILE_FORMAT_OPENCV: - case FILE_FORMAT_DEFAULT: - return SaveCalibOpenCV(calibfile); - default: - return false; - }; -} - -void Camera::Calibrate(ProjPoints &pp) -{ - CvMat *object_points = cvCreateMat((int)pp.object_points.size(), 1, CV_32FC3); - CvMat *image_points = cvCreateMat((int)pp.image_points.size(), 1, CV_32FC2); - const CvMat point_counts = cvMat((int)pp.point_counts.size(), 1, CV_32SC1, &pp.point_counts[0]); - for (size_t i=0; idata.fl[i*3+0] = (float)pp.object_points[i].x; - object_points->data.fl[i*3+1] = (float)pp.object_points[i].y; - object_points->data.fl[i*3+2] = (float)pp.object_points[i].z; - image_points->data.fl[i*2+0] = (float)pp.image_points[i].x; - image_points->data.fl[i*2+1] = (float)pp.image_points[i].y; - } - - cv::Mat cameraMatrix,distCoeffs,R,T; - - cv::calibrateCamera(cv::cvarrToMat(object_points), cv::cvarrToMat(image_points), cv::Size(pp.width, pp.height), cameraMatrix, distCoeffs, R, T); - - - calib_K = cvMat(cameraMatrix); - calib_D = cvMat(distCoeffs); - - calib_x_res = pp.width; - calib_y_res = pp.height; - - cvReleaseMat(&object_points); - cvReleaseMat(&image_points); -} - -void Camera::SetRes(int _x_res, int _y_res) { - x_res = _x_res; - y_res = _y_res; - // Scale matrix in case of different resolution calibration. - // The OpenCV documentation says: - // - If an image from camera is up-sampled/down-sampled by some factor, all intrinsic camera parameters - // (fx, fy, cx and cy) should be scaled (multiplied/divided, respectively) by the same factor. - // - The distortion coefficients remain the same regardless of the captured image resolution. - if ((calib_x_res != x_res) || (calib_y_res != y_res)) { - calib_K_data[0][0] *= (double(x_res)/double(calib_x_res)); - calib_K_data[0][2] *= (double(x_res)/double(calib_x_res)); - calib_K_data[1][1] *= (double(y_res)/double(calib_y_res)); - calib_K_data[1][2] *= (double(y_res)/double(calib_y_res)); - } +bool Camera::SaveCalibXML(const char* calibfile) +{ + TiXmlDocument document; + document.LinkEndChild(new TiXmlDeclaration("1.0", "UTF-8", "no")); + document.LinkEndChild(new TiXmlElement("camera")); + TiXmlElement* xml_root = document.RootElement(); + xml_root->SetAttribute("width", calib_x_res); + xml_root->SetAttribute("height", calib_y_res); + xml_root->LinkEndChild( + FileFormatUtils::createXMLMatrix("intrinsic_matrix", calib_K)); + xml_root->LinkEndChild( + FileFormatUtils::createXMLMatrix("distortion", calib_D)); + return document.SaveFile(calibfile); +} + +bool Camera::SaveCalibOpenCV(const char* calibfile) +{ + cv::FileStorage fs; + bool success = fs.open(calibfile, cv::FileStorage::WRITE); + if (success) + { + cv::write(fs, "intrinsic_matrix", calib_K); + cv::write(fs, "distortion", calib_D); + cv::write(fs, "width", calib_x_res); + cv::write(fs, "height", calib_y_res); + fs.release(); + return true; + } + return false; +} + +bool Camera::SaveCalib(const char* calibfile, FILE_FORMAT format) +{ + if (!calibfile) + return false; + + switch (format) + { + case FILE_FORMAT_XML: + return SaveCalibXML(calibfile); + case FILE_FORMAT_OPENCV: + case FILE_FORMAT_DEFAULT: + return SaveCalibOpenCV(calibfile); + default: + return false; + }; +} + +void Camera::Calibrate(ProjPoints& pp) +{ + std::vector> object_points; + std::vector> image_points; + object_points.emplace_back(); + for (const auto& point : pp.object_points) + { + object_points[0].push_back(point); + } + image_points.emplace_back(); + for (const auto& point : pp.image_points) + { + image_points[0].push_back(point); + } + cv::calibrateCamera(object_points, image_points, + cv::Size(pp.width, pp.height), calib_K, calib_D, + cv::Mat(), cv::Mat(), cv::CALIB_USE_INTRINSIC_GUESS); + + calib_x_res = pp.width; + calib_y_res = pp.height; +} + +void Camera::SetRes(int _x_res, int _y_res) +{ + x_res = _x_res; + y_res = _y_res; + // Scale matrix in case of different resolution calibration. + // The OpenCV documentation says: + // - If an image from camera is up-sampled/down-sampled by some factor, all + // intrinsic camera parameters + // (fx, fy, cx and cy) should be scaled (multiplied/divided, respectively) + // by the same factor. + // - The distortion coefficients remain the same regardless of the captured + // image resolution. + if ((calib_x_res != x_res) || (calib_y_res != y_res)) + { + calib_K_data[0][0] *= (double(x_res) / double(calib_x_res)); + calib_K_data[0][2] *= (double(x_res) / double(calib_x_res)); + calib_K_data[1][1] *= (double(y_res) / double(calib_y_res)); + calib_K_data[1][2] *= (double(y_res) / double(calib_y_res)); + } } // TODO: Better approach for this... // Note, the proj_matrix[8] is now negated. This is due to the fact // that with OpenCV and OpenGL projection matrices both y and z -// should be mirrored. All other components are -void Camera::GetOpenglProjectionMatrix(double proj_matrix[16], const int width, const int height, const float far_clip /*= 1000.0f*/, const float near_clip /*= 0.1f*/) { - proj_matrix[0] = 2.0f * calib_K_data[0][0] / float(width); - proj_matrix[1] = 0; - proj_matrix[2] = 0; - proj_matrix[3] = 0; - proj_matrix[4] = 2.0f * calib_K_data[0][1] / float(width); // skew - proj_matrix[5] = 2.0f * calib_K_data[1][1] / float(height); - proj_matrix[6] = 0; - proj_matrix[7] = 0; - //proj_matrix[8] = (2.0f * calib_K_data[0][2] / float(width)) - 1.0f; - proj_matrix[8] = -(2.0f * calib_K_data[0][2] / float(width)) + 1.0f; - proj_matrix[9] = (2.0f * calib_K_data[1][2] / float(height)) - 1.0f; - proj_matrix[10] = -(far_clip + near_clip)/(far_clip - near_clip); - proj_matrix[11] = -1.0f; - proj_matrix[12] = 0; - proj_matrix[13] = 0; - proj_matrix[14] = -2.0f * far_clip * near_clip / (far_clip - near_clip); - proj_matrix[15] = 0; -} - -void Camera::SetOpenglProjectionMatrix(double proj_matrix[16], const int width, const int height) { - x_res = width; - y_res = height; - calib_x_res = width; - calib_y_res = height; - calib_K_data[0][0] = proj_matrix[0] * float(width) / 2.0f; - calib_K_data[0][1] = proj_matrix[4] * float(width) / 2.0f; - calib_K_data[1][1] = proj_matrix[5] * float(height) / 2.0f; - //calib_K_data[0][2] = (proj_matrix[8] + 1.0f) * float(width) / 2.0f; - calib_K_data[0][2] = (-proj_matrix[8] + 1.0f) * float(width) / 2.0f; // Is this ok? - calib_K_data[1][2] = (proj_matrix[9] + 1.0f) * float(height) / 2.0f; - calib_K_data[2][2] = 1; -} - -void Camera::Undistort(PointDouble &point) +// should be mirrored. All other components are +void Camera::GetOpenglProjectionMatrix(double proj_matrix[16], const int width, + const int height, + const float far_clip /*= 1000.0f*/, + const float near_clip /*= 0.1f*/) { -/* - // focal length - double ifx = 1./cvmGet(&calib_K, 0, 0); - double ify = 1./cvmGet(&calib_K, 1, 1); - - // principal point - double cx = cvmGet(&calib_K, 0, 2); - double cy = cvmGet(&calib_K, 1, 2); - - // distortion coeffs - double* k = calib_D.data.db; - - // compensate distortion iteratively - double x = (point.x - cx)*ifx, y = (point.y - cy)*ify, x0 = x, y0 = y; - for(int j = 0; j < 5; j++){ - double r2 = x*x + y*y; - double icdist = 1./(1 + k[0]*r2 + k[1]*r2*r2); - double delta_x = 2*k[2]*x*y + k[3]*(r2 + 2*x*x); - double delta_y = k[2]*(r2 + 2*y*y) + 2*k[3]*x*y; - x = (x0 - delta_x)*icdist; - y = (y0 - delta_y)*icdist; - } - // apply compensation - point.x = x/ifx + cx; - point.y = y/ify + cy; -*/ + proj_matrix[0] = 2.0f * calib_K_data[0][0] / float(width); + proj_matrix[1] = 0; + proj_matrix[2] = 0; + proj_matrix[3] = 0; + proj_matrix[4] = 2.0f * calib_K_data[0][1] / float(width); // skew + proj_matrix[5] = 2.0f * calib_K_data[1][1] / float(height); + proj_matrix[6] = 0; + proj_matrix[7] = 0; + // proj_matrix[8] = (2.0f * calib_K_data[0][2] / float(width)) - 1.0f; + proj_matrix[8] = -(2.0f * calib_K_data[0][2] / float(width)) + 1.0f; + proj_matrix[9] = (2.0f * calib_K_data[1][2] / float(height)) - 1.0f; + proj_matrix[10] = -(far_clip + near_clip) / (far_clip - near_clip); + proj_matrix[11] = -1.0f; + proj_matrix[12] = 0; + proj_matrix[13] = 0; + proj_matrix[14] = -2.0f * far_clip * near_clip / (far_clip - near_clip); + proj_matrix[15] = 0; +} + +void Camera::SetOpenglProjectionMatrix(double proj_matrix[16], const int width, + const int height) +{ + x_res = width; + y_res = height; + calib_x_res = width; + calib_y_res = height; + calib_K_data[0][0] = proj_matrix[0] * float(width) / 2.0f; + calib_K_data[0][1] = proj_matrix[4] * float(width) / 2.0f; + calib_K_data[1][1] = proj_matrix[5] * float(height) / 2.0f; + // calib_K_data[0][2] = (proj_matrix[8] + 1.0f) * float(width) / 2.0f; + calib_K_data[0][2] = + (-proj_matrix[8] + 1.0f) * float(width) / 2.0f; // Is this ok? + calib_K_data[1][2] = (proj_matrix[9] + 1.0f) * float(height) / 2.0f; + calib_K_data[2][2] = 1; +} + +void Camera::Undistort(PointDouble& point) +{ + /* + // focal length + double ifx = 1./cvmGet(&calib_K, 0, 0); + double ify = 1./cvmGet(&calib_K, 1, 1); + + // principal point + double cx = cvmGet(&calib_K, 0, 2); + double cy = cvmGet(&calib_K, 1, 2); + + // distortion coeffs + double* k = calib_D.data.db; + + // compensate distortion iteratively + double x = (point.x - cx)*ifx, y = (point.y - cy)*ify, x0 = x, y0 = y; + for(int j = 0; j < 5; j++){ + double r2 = x*x + y*y; + double icdist = 1./(1 + k[0]*r2 + k[1]*r2*r2); + double delta_x = 2*k[2]*x*y + k[3]*(r2 + 2*x*x); + double delta_y = k[2]*(r2 + 2*y*y) + 2*k[3]*x*y; + x = (x0 - delta_x)*icdist; + y = (y0 - delta_y)*icdist; + } + // apply compensation + point.x = x/ifx + cx; + point.y = y/ify + cy; + */ } -void Camera::Undistort(vector& points) +void Camera::Undistort(vector& points) { -/* - // focal length - double ifx = 1./cvmGet(&calib_K, 0, 0); - double ify = 1./cvmGet(&calib_K, 1, 1); - - // principal point - double cx = cvmGet(&calib_K, 0, 2); - double cy = cvmGet(&calib_K, 1, 2); - - // distortion coeffs - double* k = calib_D.data.db; - - for(unsigned int i = 0; i < points.size(); i++) - { - // compensate distortion iteratively - double x = (points[i].x - cx)*ifx, y = (points[i].y - cy)*ify, x0 = x, y0 = y; - for(int j = 0; j < 5; j++){ - double r2 = x*x + y*y; - double icdist = 1./(1 + k[0]*r2 + k[1]*r2*r2); - double delta_x = 2*k[2]*x*y + k[3]*(r2 + 2*x*x); - double delta_y = k[2]*(r2 + 2*y*y) + 2*k[3]*x*y; - x = (x0 - delta_x)*icdist; - y = (y0 - delta_y)*icdist; - } - // apply compensation - points[i].x = x/ifx + cx; - points[i].y = y/ify + cy; - } -*/ + /* + // focal length + double ifx = 1./cvmGet(&calib_K, 0, 0); + double ify = 1./cvmGet(&calib_K, 1, 1); + + // principal point + double cx = cvmGet(&calib_K, 0, 2); + double cy = cvmGet(&calib_K, 1, 2); + + // distortion coeffs + double* k = calib_D.data.db; + + for(unsigned int i = 0; i < points.size(); i++) + { + // compensate distortion iteratively + double x = (points[i].x - cx)*ifx, y = (points[i].y - cy)*ify, x0 = x, y0 + = y; for(int j = 0; j < 5; j++){ double r2 = x*x + y*y; double icdist + = 1./(1 + k[0]*r2 + k[1]*r2*r2); double delta_x = 2*k[2]*x*y + k[3]*(r2 + + 2*x*x); double delta_y = k[2]*(r2 + 2*y*y) + 2*k[3]*x*y; x = (x0 - + delta_x)*icdist; y = (y0 - delta_y)*icdist; + } + // apply compensation + points[i].x = x/ifx + cx; + points[i].y = y/ify + cy; + } + */ } void Camera::Undistort(cv::Point2f& point) { -/* - // focal length - double ifx = 1./cvmGet(&calib_K, 0, 0); - double ify = 1./cvmGet(&calib_K, 1, 1); - - // principal point - double cx = cvmGet(&calib_K, 0, 2); - double cy = cvmGet(&calib_K, 1, 2); - - // distortion coeffs - double* k = calib_D.data.db; - - - // compensate distortion iteratively - double x = (point.x - cx)*ifx, y = (point.y - cy)*ify, x0 = x, y0 = y; - for(int j = 0; j < 5; j++){ - double r2 = x*x + y*y; - double icdist = 1./(1 + k[0]*r2 + k[1]*r2*r2); - double delta_x = 2*k[2]*x*y + k[3]*(r2 + 2*x*x); - double delta_y = k[2]*(r2 + 2*y*y) + 2*k[3]*x*y; - x = (x0 - delta_x)*icdist; - y = (y0 - delta_y)*icdist; - } - // apply compensation - point.x = float(x/ifx + cx); - point.y = float(y/ify + cy); -*/ + /* + // focal length + double ifx = 1./cvmGet(&calib_K, 0, 0); + double ify = 1./cvmGet(&calib_K, 1, 1); + + // principal point + double cx = cvmGet(&calib_K, 0, 2); + double cy = cvmGet(&calib_K, 1, 2); + + // distortion coeffs + double* k = calib_D.data.db; + + + // compensate distortion iteratively + double x = (point.x - cx)*ifx, y = (point.y - cy)*ify, x0 = x, y0 = y; + for(int j = 0; j < 5; j++){ + double r2 = x*x + y*y; + double icdist = 1./(1 + k[0]*r2 + k[1]*r2*r2); + double delta_x = 2*k[2]*x*y + k[3]*(r2 + 2*x*x); + double delta_y = k[2]*(r2 + 2*y*y) + 2*k[3]*x*y; + x = (x0 - delta_x)*icdist; + y = (y0 - delta_y)*icdist; + } + // apply compensation + point.x = float(x/ifx + cx); + point.y = float(y/ify + cy); + */ } /* - template - void Undistort(PointType& point) { - // focal length - double ifx = 1./cvmGet(&calib_K, 0, 0); - double ify = 1./cvmGet(&calib_K, 1, 1); - - // principal point - double cx = cvmGet(&calib_K, 0, 2); - double cy = cvmGet(&calib_K, 1, 2); - - // distortion coeffs - double* k = calib_D.data.db; - - // compensate distortion iteratively - double x = (point.x - cx)*ifx, y = (point.y - cy)*ify, x0 = x, y0 = y; - for(int j = 0; j < 5; j++){ - double r2 = x*x + y*y; - double icdist = 1./(1 + k[0]*r2 + k[1]*r2*r2); - double delta_x = 2*k[2]*x*y + k[3]*(r2 + 2*x*x); - double delta_y = k[2]*(r2 + 2*y*y) + 2*k[3]*x*y; - x = (x0 - delta_x)*icdist; - y = (y0 - delta_y)*icdist; - } - // apply compensation - point.x = x/ifx + cx; - point.y = y/ify + cy; - } + template + void Undistort(PointType& point) { + // focal length + double ifx = 1./cvmGet(&calib_K, 0, 0); + double ify = 1./cvmGet(&calib_K, 1, 1); + + // principal point + double cx = cvmGet(&calib_K, 0, 2); + double cy = cvmGet(&calib_K, 1, 2); + + // distortion coeffs + double* k = calib_D.data.db; + + // compensate distortion iteratively + double x = (point.x - cx)*ifx, y = (point.y - cy)*ify, x0 = x, y0 = y; + for(int j = 0; j < 5; j++){ + double r2 = x*x + y*y; + double icdist = 1./(1 + k[0]*r2 + k[1]*r2*r2); + double delta_x = 2*k[2]*x*y + k[3]*(r2 + 2*x*x); + double delta_y = k[2]*(r2 + 2*y*y) + 2*k[3]*x*y; + x = (x0 - delta_x)*icdist; + y = (y0 - delta_y)*icdist; + } + // apply compensation + point.x = x/ifx + cx; + point.y = y/ify + cy; + } */ -void Camera::Distort(vector& points) +void Camera::Distort(vector& points) { -/* - double u0 = cvmGet(&calib_K, 0, 2), v0 = cvmGet(&calib_K, 1, 2); // cx, cy - double fx = cvmGet(&calib_K, 0, 0), fy = cvmGet(&calib_K, 1, 1); - double _fx = 1./fx, _fy = 1./fy; - double* k = calib_D.data.db; - - double k1 = k[0], k2 = k[1]; - double p1 = k[2], p2 = k[3]; - - for(unsigned int i = 0; i < points.size(); i++) - { - // Distort - double y = (points[i].y - v0)*_fy; - double y2 = y*y; - double _2p1y = 2*p1*y; - double _3p1y2 = 3*p1*y2; - double p2y2 = p2*y2; - - double x = (points[i].x - u0)*_fx; - double x2 = x*x; - double r2 = x2 + y2; - double d = 1 + (k1 + k2*r2)*r2; - - points[i].x = fx*(x*(d + _2p1y) + p2y2 + (3*p2)*x2) + u0; - points[i].y = fy*(y*(d + (2*p2)*x) + _3p1y2 + p1*x2) + v0; - } -*/ + /* + double u0 = cvmGet(&calib_K, 0, 2), v0 = cvmGet(&calib_K, 1, 2); // cx, cy + double fx = cvmGet(&calib_K, 0, 0), fy = cvmGet(&calib_K, 1, 1); + double _fx = 1./fx, _fy = 1./fy; + double* k = calib_D.data.db; + + double k1 = k[0], k2 = k[1]; + double p1 = k[2], p2 = k[3]; + + for(unsigned int i = 0; i < points.size(); i++) + { + // Distort + double y = (points[i].y - v0)*_fy; + double y2 = y*y; + double _2p1y = 2*p1*y; + double _3p1y2 = 3*p1*y2; + double p2y2 = p2*y2; + + double x = (points[i].x - u0)*_fx; + double x2 = x*x; + double r2 = x2 + y2; + double d = 1 + (k1 + k2*r2)*r2; + + points[i].x = fx*(x*(d + _2p1y) + p2y2 + (3*p2)*x2) + u0; + points[i].y = fy*(y*(d + (2*p2)*x) + _3p1y2 + p1*x2) + v0; + } + */ } -void Camera::Distort(PointDouble & point) +void Camera::Distort(PointDouble& point) { -/* - double u0 = cvmGet(&calib_K, 0, 2), v0 = cvmGet(&calib_K, 1, 2); // cx, cy - double fx = cvmGet(&calib_K, 0, 0), fy = cvmGet(&calib_K, 1, 1); - double _fx = 1./fx, _fy = 1./fy; - double* k = calib_D.data.db; - - double k1 = k[0], k2 = k[1]; - double p1 = k[2], p2 = k[3]; - - // Distort - double y = (point.y - v0)*_fy; - double y2 = y*y; - double _2p1y = 2*p1*y; - double _3p1y2 = 3*p1*y2; - double p2y2 = p2*y2; - - double x = (point.x - u0)*_fx; - double x2 = x*x; - double r2 = x2 + y2; - double d = 1 + (k1 + k2*r2)*r2; - - point.x = fx*(x*(d + _2p1y) + p2y2 + (3*p2)*x2) + u0; - point.y = fy*(y*(d + (2*p2)*x) + _3p1y2 + p1*x2) + v0; -*/ + /* + double u0 = cvmGet(&calib_K, 0, 2), v0 = cvmGet(&calib_K, 1, 2); // cx, cy + double fx = cvmGet(&calib_K, 0, 0), fy = cvmGet(&calib_K, 1, 1); + double _fx = 1./fx, _fy = 1./fy; + double* k = calib_D.data.db; + + double k1 = k[0], k2 = k[1]; + double p1 = k[2], p2 = k[3]; + + // Distort + double y = (point.y - v0)*_fy; + double y2 = y*y; + double _2p1y = 2*p1*y; + double _3p1y2 = 3*p1*y2; + double p2y2 = p2*y2; + + double x = (point.x - u0)*_fx; + double x2 = x*x; + double r2 = x2 + y2; + double d = 1 + (k1 + k2*r2)*r2; + + point.x = fx*(x*(d + _2p1y) + p2y2 + (3*p2)*x2) + u0; + point.y = fy*(y*(d + (2*p2)*x) + _3p1y2 + p1*x2) + v0; + */ } -void Camera::Distort(cv::Point3f & point) +void Camera::Distort(cv::Point2f& point) { -/* - double u0 = cvmGet(&calib_K, 0, 2), v0 = cvmGet(&calib_K, 1, 2); // cx, cy - double fx = cvmGet(&calib_K, 0, 0), fy = cvmGet(&calib_K, 1, 1); - double _fx = 1./fx, _fy = 1./fy; - double* k = calib_D.data.db; - - double k1 = k[0], k2 = k[1]; - double p1 = k[2], p2 = k[3]; - - // Distort - double y = (point.y - v0)*_fy; - double y2 = y*y; - double _2p1y = 2*p1*y; - double _3p1y2 = 3*p1*y2; - double p2y2 = p2*y2; - - double x = (point.x - u0)*_fx; - double x2 = x*x; - double r2 = x2 + y2; - double d = 1 + (k1 + k2*r2)*r2; - - point.x = float(fx*(x*(d + _2p1y) + p2y2 + (3*p2)*x2) + u0); - point.y = float(fy*(y*(d + (2*p2)*x) + _3p1y2 + p1*x2) + v0); -*/ + /* + double u0 = cvmGet(&calib_K, 0, 2), v0 = cvmGet(&calib_K, 1, 2); // cx, cy + double fx = cvmGet(&calib_K, 0, 0), fy = cvmGet(&calib_K, 1, 1); + double _fx = 1./fx, _fy = 1./fy; + double* k = calib_D.data.db; + + double k1 = k[0], k2 = k[1]; + double p1 = k[2], p2 = k[3]; + + // Distort + double y = (point.y - v0)*_fy; + double y2 = y*y; + double _2p1y = 2*p1*y; + double _3p1y2 = 3*p1*y2; + double p2y2 = p2*y2; + + double x = (point.x - u0)*_fx; + double x2 = x*x; + double r2 = x2 + y2; + double d = 1 + (k1 + k2*r2)*r2; + + point.x = float(fx*(x*(d + _2p1y) + p2y2 + (3*p2)*x2) + u0); + point.y = float(fy*(y*(d + (2*p2)*x) + _3p1y2 + p1*x2) + v0); + */ +} + +void Camera::CalcExteriorOrientation(const vector& pw, + const vector& pi, + cv::Mat& rodriques, cv::Mat& tra) const +{ + vector pi2; + for (const auto& point : pi) + { + pi2.push_back(point); + } + + tra.setTo(cv::Scalar::all(0)); + rodriques.setTo(cv::Scalar::all(0)); + cv::solvePnP(pw, pi2, calib_K, cv::Mat(), rodriques, tra, false, + cv::SOLVEPNP_ITERATIVE); +} + +void Camera::CalcExteriorOrientation(const vector& pw, + const vector& pi, + cv::Mat& rodriques, cv::Mat& tra) const +{ + vector pw3; + for (const auto& point : pw) + { + pw3.emplace_back(cv::Point3d(point.x, point.y, 0)); + } + + CalcExteriorOrientation(pw3, pi, rodriques, tra); +} + +void Camera::CalcExteriorOrientation(const vector& pw, + const vector& pi, + Pose* pose) const +{ + cv::Mat ext_rodriques_mat = cv::Mat(3, 1, CV_64F); + cv::Mat ext_translate_mat = cv::Mat(3, 1, CV_64F); + CalcExteriorOrientation(pw, pi, ext_rodriques_mat, ext_translate_mat); + pose->SetRodriques(ext_rodriques_mat); + pose->SetTranslation(ext_translate_mat); +} + +void Camera::ProjectPoints(vector& pw, Pose* pose, + vector& pi) const +{ + cv::Mat ext_rodriques_mat = cv::Mat(3, 1, CV_64F); + cv::Mat ext_translate_mat = cv::Mat(3, 1, CV_64F); + pose->GetRodriques(ext_rodriques_mat); + pose->GetTranslation(ext_translate_mat); + cv::Mat object_points = cv::Mat((int)pw.size(), 1, CV_32FC3); + cv::Mat image_points = cv::Mat((int)pi.size(), 1, CV_32FC2); + for (size_t i = 0; i < pw.size(); i++) + { + object_points.at(i * 3 + 0) = (float)pw[i].x; + object_points.at(i * 3 + 1) = (float)pw[i].y; + object_points.at(i * 3 + 2) = (float)pw[i].z; + } + cv::projectPoints(object_points, ext_translate_mat, ext_translate_mat, + calib_K, calib_D, image_points); + for (size_t i = 0; i < pw.size(); i++) + { + pi[i].x = image_points.at(i * 2 + 0); + pi[i].y = image_points.at(i * 2 + 1); + } + object_points.release(); + image_points.release(); +} + +void Camera::ProjectPoints(const cv::Mat& object_points, + const cv::Mat& rotation_vector, + const cv::Mat& translation_vector, + cv::Mat& image_points) const +{ + // Project points + cv::projectPoints(object_points, rotation_vector, translation_vector, calib_K, + calib_D, image_points); +} + +void Camera::ProjectPoints(const cv::Mat& object_points, const Pose* pose, + cv::Mat& image_points) const +{ + cv::Mat ext_rodriques_mat = cv::Mat(3, 1, CV_64F); + cv::Mat ext_translate_mat = cv::Mat(3, 1, CV_64F); + pose->GetRodriques(ext_rodriques_mat); + pose->GetTranslation(ext_translate_mat); + cv::projectPoints(object_points, ext_rodriques_mat, ext_translate_mat, + calib_K, calib_D, image_points); +} + +void Camera::ProjectPoints(const cv::Mat& object_points, double gl[16], + cv::Mat& image_points) const +{ + double glm[4][4] = { + gl[0], gl[4], gl[8], gl[12], gl[1], gl[5], gl[9], gl[13], + gl[2], gl[6], gl[10], gl[14], gl[3], gl[7], gl[11], gl[15], + }; + cv::Mat glm_mat = cv::Mat(4, 4, CV_64F, glm); + + // For some reason we need to mirror both y and z ??? + double cv_mul_data[4][4]; + cv::Mat cv_mul = cv::Mat(4, 4, CV_64F, cv_mul_data); + cv::setIdentity(cv_mul); + cv_mul.at(1, 1) = -1; + cv_mul.at(2, 2) = -1; + glm_mat = cv_mul * glm_mat; + + // Rotation + Rotation r; + r.SetMatrix(glm_mat); + double rod[3]; + cv::Mat rod_mat = cv::Mat(3, 1, CV_64F, rod); + r.GetRodriques(rod_mat); + // Translation + double tra[3] = { glm[0][3], glm[1][3], glm[2][3] }; + cv::Mat tra_mat = cv::Mat(3, 1, CV_64F, tra); + // Project points + ProjectPoints(object_points, rod_mat, tra_mat, image_points); +} + +void Camera::ProjectPoint(const cv::Point3d& pw, const Pose* pose, + cv::Point2d& pi) const +{ + float object_points_data[3] = { (float)pw.x, (float)pw.y, (float)pw.z }; + float image_points_data[2] = { 0 }; + cv::Mat object_points = cv::Mat(1, 1, CV_32FC3, object_points_data); + cv::Mat image_points = cv::Mat(1, 1, CV_32FC2, image_points_data); + ProjectPoints(object_points, pose, image_points); + pi.x = image_points.at(0); + pi.y = image_points.at(1); +} + +void Camera::ProjectPoint(const cv::Point3f& pw, const Pose* pose, + cv::Point2f& pi) const +{ + float object_points_data[3] = { (float)pw.x, (float)pw.y, (float)pw.z }; + float image_points_data[2] = { 0 }; + cv::Mat object_points = cv::Mat(1, 1, CV_32FC3, object_points_data); + cv::Mat image_points = cv::Mat(1, 1, CV_32FC2, image_points_data); + ProjectPoints(object_points, pose, image_points); + pi.x = image_points.at(0); + pi.y = image_points.at(1); +} + +Homography::Homography() +{ + H = cv::Mat(3, 3, CV_64F); +} + +void Homography::Find(const vector& pw, + const vector& pi) +{ + assert(pw.size() == pi.size()); + int size = (int)pi.size(); + + cv::Point2d srcp[size]; + cv::Point2d dstp[size]; + + for (int i = 0; i < size; ++i) + { + srcp[i].x = pw[i].x; + srcp[i].y = pw[i].y; + + dstp[i].x = pi[i].x; + dstp[i].y = pi[i].y; + } + + cv::Mat src_pts, dst_pts; + dst_pts = cv::Mat(1, size, CV_64FC2, dstp); + src_pts = cv::Mat(1, size, CV_64FC2, srcp); + + cv::Mat tmp = cv::findHomography(src_pts, dst_pts); + if (tmp.elemSize() > 0) + { + H = tmp; + } +} + +void Homography::ProjectPoints(const vector& from, + vector& to) const +{ + int size = (int)from.size(); + + cv::Point3d srcp[size]; + + for (int i = 0; i < size; ++i) + { + srcp[i].x = from[i].x; + srcp[i].y = from[i].y; + srcp[i].z = 1; + } + + cv::Point3d dstp[size]; + + cv::Mat src_pts, dst_pts; + src_pts = cv::Mat(1, size, CV_64FC3, srcp); + dst_pts = cv::Mat(1, size, CV_64FC3, dstp); + + if (! H.empty()) { + cv::transform(src_pts, dst_pts, H); + + to.clear(); + for (int i = 0; i < size; ++i) + { + PointDouble pt; + pt.x = dstp[i].x / dstp[i].z; + pt.y = dstp[i].y / dstp[i].z; + + to.push_back(pt); + } + } } -void Camera::CalcExteriorOrientation(vector& pw, vector& pi, - Pose *pose) -{ - double ext_rodriques[3]; - double ext_translate[3]; - CvMat ext_rodriques_mat = cvMat(3, 1, CV_64F, ext_rodriques); - CvMat ext_translate_mat = cvMat(3, 1, CV_64F, ext_translate); - CvMat *object_points = cvCreateMat((int)pw.size(), 1, CV_32FC3); - CvMat *image_points = cvCreateMat((int)pi.size(), 1, CV_32FC2); - for (size_t i=0; idata.fl[i*3+0] = (float)pw[i].x; - object_points->data.fl[i*3+1] = (float)pw[i].y; - object_points->data.fl[i*3+2] = (float)pw[i].z; - image_points->data.fl[i*2+0] = (float)pi[i].x; - image_points->data.fl[i*2+1] = (float)pi[i].y; - } - //cvFindExtrinsicCameraParams2(object_points, image_points, &calib_K, &calib_D, &ext_rodriques_mat, &ext_translate_mat); - - //cv::sfm::computeOrientation(cv::cvarrToMat(object_points),cv::cvarrToMat(image_points),) - - cv::solvePnP(cv::cvarrToMat(object_points), cv::cvarrToMat(image_points), cv::cvarrToMat(&calib_K), cv::cvarrToMat(&calib_D), cv::cvarrToMat(&ext_rodriques_mat), cv::cvarrToMat(&ext_translate_mat)); - pose->SetRodriques(&ext_rodriques_mat); - pose->SetTranslation(&ext_translate_mat); - cvReleaseMat(&object_points); - cvReleaseMat(&image_points); -} - -void Camera::CalcExteriorOrientation(vector& pw, vector& pi, - CvMat *rodriques, CvMat *tra) -{ - //assert(pw.size() == pi.size()); - - int size = (int)pi.size(); - - CvPoint3D64f *world_pts = new CvPoint3D64f[size]; - CvPoint2D64f *image_pts = new CvPoint2D64f[size]; - - for(int i = 0; i < size; i++){ - world_pts[i].x = pw[i].x; - world_pts[i].y = pw[i].y; - world_pts[i].z = pw[i].z; - // flip image points! Why??? - //image_pts[i].x = x_res - pi[i].x; - //image_pts[i].y = y_res - pi[i].y; - image_pts[i].x = pi[i].x; - image_pts[i].y = pi[i].y; - } - - double rot[3]; // rotation vector - CvMat world_mat, image_mat, rot_vec; - cvInitMatHeader(&world_mat, size, 1, CV_64FC3, world_pts); - cvInitMatHeader(&image_mat, size, 1, CV_64FC2, image_pts); - cvInitMatHeader(&rot_vec, 3, 1, CV_64FC1, rot); - - cvZero(tra); - //cvmodFindExtrinsicCameraParams2(&world_mat, &image_mat, &calib_K, &calib_D, rodriques, tra, error); - //cv::cvFindExtrinsicCameraParams2(&world_mat, &image_mat, &calib_K, &calib_D, rodriques, tra); - - cv::Mat rodout = cv::cvarrToMat(rodriques); - cv::Mat traout = cv::cvarrToMat(tra); - - cv::solvePnP(cv::cvarrToMat(&world_mat), cv::cvarrToMat(&image_mat), cv::cvarrToMat(&calib_K), cv::cvarrToMat(&calib_D),rodout,traout); - *rodriques = cvMat(rodout); - *tra = cvMat(traout); - - delete[] world_pts; - delete[] image_pts; -} - -void Camera::CalcExteriorOrientation(vector& pw, vector& pi, - CvMat *rodriques, CvMat *tra) -{ - //assert(pw.size() == pi.size()); - int size = (int)pi.size(); - - vector pw3; - pw3.resize(size); - for (int i=0; i& pw, vector& pi, Pose *pose) -{ - double ext_rodriques[3]; - double ext_translate[3]; - CvMat ext_rodriques_mat = cvMat(3, 1, CV_64F, ext_rodriques); - CvMat ext_translate_mat = cvMat(3, 1, CV_64F, ext_translate); - CalcExteriorOrientation(pw, pi, &ext_rodriques_mat, &ext_translate_mat); - pose->SetRodriques(&ext_rodriques_mat); - pose->SetTranslation(&ext_translate_mat); -} - -bool Camera::CalcExteriorOrientation(const CvMat* object_points, CvMat* image_points, CvMat *rodriques, CvMat *tra) { - cv::Mat rodout = cv::cvarrToMat(rodriques); - cv::Mat traout = cv::cvarrToMat(tra); - cv::solvePnP(cv::cvarrToMat(object_points), cv::cvarrToMat(image_points), cv::cvarrToMat(&calib_K), cv::cvarrToMat(&calib_D), rodout, traout); - *rodriques = cvMat(rodout); - *tra = cvMat(traout); - return true; -} - -bool Camera::CalcExteriorOrientation(const CvMat* object_points, CvMat* image_points, Pose *pose) { - double ext_rodriques[3]; - double ext_translate[3]; - CvMat ext_rodriques_mat = cvMat(3, 1, CV_64F, ext_rodriques); - CvMat ext_translate_mat = cvMat(3, 1, CV_64F, ext_translate); - bool ret = CalcExteriorOrientation(object_points, image_points, &ext_rodriques_mat, &ext_translate_mat); - pose->SetRodriques(&ext_rodriques_mat); - pose->SetTranslation(&ext_translate_mat); - return ret; -} - -void Camera::ProjectPoints(vector& pw, Pose *pose, vector& pi) const { - double ext_rodriques[3]; - double ext_translate[3]; - CvMat ext_rodriques_mat = cvMat(3, 1, CV_64F, ext_rodriques); - CvMat ext_translate_mat = cvMat(3, 1, CV_64F, ext_translate); - pose->GetRodriques(&ext_rodriques_mat); - pose->GetTranslation(&ext_translate_mat); - CvMat *object_points = cvCreateMat((int)pw.size(), 1, CV_32FC3); - CvMat *image_points = cvCreateMat((int)pi.size(), 1, CV_32FC2); - for (size_t i=0; idata.fl[i*3+0] = (float)pw[i].x; - object_points->data.fl[i*3+1] = (float)pw[i].y; - object_points->data.fl[i*3+2] = (float)pw[i].z; - } - cv::projectPoints(cv::cvarrToMat(object_points), cv::cvarrToMat(&ext_rodriques_mat), cv::cvarrToMat(&ext_translate_mat), cv::cvarrToMat(&calib_K), cv::cvarrToMat(&calib_D), cv::cvarrToMat(image_points)); - for (size_t i=0; idata.fl[i*2+0]; - pi[i].y = image_points->data.fl[i*2+1]; - } - cvReleaseMat(&object_points); - cvReleaseMat(&image_points); -} - -void Camera::ProjectPoints(const CvMat* object_points, const CvMat* rotation_vector, - const CvMat* translation_vector, CvMat* image_points) const -{ - // Project points - cv::projectPoints(cv::cvarrToMat(object_points), cv::cvarrToMat(rotation_vector), cv::cvarrToMat(translation_vector), cv::cvarrToMat(&calib_K), cv::cvarrToMat(&calib_D), cv::cvarrToMat(image_points)); -} - -void Camera::ProjectPoints(const CvMat* object_points, const Pose* pose, CvMat* image_points) const -{ - double ext_rodriques[3]; - double ext_translate[3]; - CvMat ext_rodriques_mat = cvMat(3, 1, CV_64F, ext_rodriques); - CvMat ext_translate_mat = cvMat(3, 1, CV_64F, ext_translate); - pose->GetRodriques(&ext_rodriques_mat); - pose->GetTranslation(&ext_translate_mat); - cv::projectPoints(cv::cvarrToMat(object_points), cv::cvarrToMat(&ext_rodriques_mat), cv::cvarrToMat(&ext_translate_mat), cv::cvarrToMat(&calib_K), cv::cvarrToMat(&calib_D), cv::cvarrToMat(image_points)); -} - -void Camera::ProjectPoints(const CvMat* object_points, double gl[16], CvMat* image_points) const -{ - double glm[4][4] = { - gl[0], gl[4], gl[8], gl[12], - gl[1], gl[5], gl[9], gl[13], - gl[2], gl[6], gl[10], gl[14], - gl[3], gl[7], gl[11], gl[15], - }; - CvMat glm_mat = cvMat(4, 4, CV_64F, glm); - - // For some reason we need to mirror both y and z ??? - double cv_mul_data[4][4]; - CvMat cv_mul = cvMat(4, 4, CV_64F, cv_mul_data); - cvSetIdentity(&cv_mul); - cvmSet(&cv_mul, 1, 1, -1); - cvmSet(&cv_mul, 2, 2, -1); - cvMatMul(&cv_mul, &glm_mat, &glm_mat); - - // Rotation - Rotation r; - r.SetMatrix(&glm_mat); - double rod[3]; - CvMat rod_mat=cvMat(3, 1, CV_64F, rod); - r.GetRodriques(&rod_mat); - // Translation - double tra[3] = { glm[0][3], glm[1][3], glm[2][3] }; - CvMat tra_mat = cvMat(3, 1, CV_64F, tra); - // Project points - ProjectPoints(object_points, &rod_mat, &tra_mat, image_points); -} - -void Camera::ProjectPoint(const cv::Point3f pw, const Pose *pose, cv::Point2f &pi) const { - float object_points_data[3] = {(float)pw.x, (float)pw.y, (float)pw.z}; - float image_points_data[2] = {0}; - CvMat object_points = cvMat(1, 1, CV_32FC3, object_points_data); - CvMat image_points = cvMat(1, 1, CV_32FC2, image_points_data); - ProjectPoints(&object_points, pose, &image_points); - pi.x = image_points.data.fl[0]; - pi.y = image_points.data.fl[1]; -} - -// void Camera::ProjectPoint(const cv::Point3f pw, const Pose *pose, cv::Point2f &pi) const { -// float object_points_data[3] = {(float)pw.x, (float)pw.y, (float)pw.z}; -// float image_points_data[2] = {0}; -// CvMat object_points = cvMat(1, 1, CV_32FC3, object_points_data); -// CvMat image_points = cvMat(1, 1, CV_32FC2, image_points_data); -// ProjectPoints(&object_points, pose, &image_points); -// pi.x = image_points.data.fl[0]; -// pi.y = image_points.data.fl[1]; -} - -alvar::Homography::Homography() { - cvInitMatHeader(&H, 3, 3, CV_64F, H_data); -} - -void alvar::Homography::Find(const vector& pw, const vector& pi) -{ - assert(pw.size() == pi.size()); - int size = (int)pi.size(); - - CvPoint2D64f *srcp = new CvPoint2D64f[size]; - CvPoint2D64f *dstp = new CvPoint2D64f[size]; - - for(int i = 0; i < size; ++i){ - srcp[i].x = pw[i].x; - srcp[i].y = pw[i].y; - - dstp[i].x = pi[i].x; - dstp[i].y = pi[i].y; - } - - CvMat src_pts, dst_pts; - cvInitMatHeader(&dst_pts, 1, size, CV_64FC2, dstp); - cvInitMatHeader(&src_pts, 1, size, CV_64FC2, srcp); - - cv::findHomography(cv::cvarrToMat(&src_pts), cv::cvarrToMat(&dst_pts), cv::cvarrToMat(&H)); - - delete[] srcp; - delete[] dstp; -} - -void alvar::Homography::ProjectPoints(const vector< alvar::PointDouble>& from, vector< alvar::PointDouble>& to) -{ - int size = (int)from.size(); - - CvPoint3D64f *srcp = new CvPoint3D64f[size]; - - for(int i = 0; i < size; ++i){ - srcp[i].x = from[i].x; - srcp[i].y = from[i].y; - srcp[i].z = 1; - } - - CvPoint3D64f *dstp = new CvPoint3D64f[size]; - - CvMat src_pts, dst_pts; - cvInitMatHeader(&src_pts, 1, size, CV_64FC3, srcp); - cvInitMatHeader(&dst_pts, 1, size, CV_64FC3, dstp); - - cvTransform(&src_pts, &dst_pts, &H); - - to.clear(); - for(int i = 0; i < size; ++i) - { - alvar::PointDouble pt; - pt.x = dstp[i].x / dstp[i].z; - pt.y = dstp[i].y / dstp[i].z; - - to.push_back(pt); - } - - delete[] srcp; - delete[] dstp; -} - -// } // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/CaptureDevice.cpp b/ar_track_alvar/src/CaptureDevice.cpp index 4c006e1..ce5a0d6 100644 --- a/ar_track_alvar/src/CaptureDevice.cpp +++ b/ar_track_alvar/src/CaptureDevice.cpp @@ -25,12 +25,12 @@ #include -namespace alvar { - -CaptureDevice::CaptureDevice(const std::string captureType, const std::string id, const std::string description) - : mCaptureType(captureType) - , mId(id) - , mDescription(description) +namespace alvar +{ +CaptureDevice::CaptureDevice(const std::string captureType, + const std::string id, + const std::string description) + : mCaptureType(captureType), mId(id), mDescription(description) { } @@ -40,24 +40,24 @@ CaptureDevice::~CaptureDevice() std::string CaptureDevice::captureType() const { - return mCaptureType; + return mCaptureType; } std::string CaptureDevice::id() const { - return mId; + return mId; } std::string CaptureDevice::description() const { - return mDescription; + return mDescription; } std::string CaptureDevice::uniqueName() const { - std::stringstream name; - name << captureType() << "_" << id(); - return name.str(); + std::stringstream name; + name << captureType() << "_" << id(); + return name.str(); } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/CaptureFactory.cpp b/ar_track_alvar/src/CaptureFactory.cpp index 87105c6..ff73447 100644 --- a/ar_track_alvar/src/CaptureFactory.cpp +++ b/ar_track_alvar/src/CaptureFactory.cpp @@ -27,244 +27,281 @@ #include "ar_track_alvar/CapturePlugin.h" #include "ar_track_alvar/DirectoryIterator.h" -namespace alvar { - +namespace alvar +{ CaptureFactoryPrivate::CaptureFactoryPrivate() - : mPluginPaths() - , mPluginPrefix() - , mPluginPostfix() - , mLoadedAllPlugins(false) - , mPluginMap() - , mCapturePluginMap() + : mPluginPaths() + , mPluginPrefix() + , mPluginPostfix() + , mLoadedAllPlugins(false) + , mPluginMap() + , mCapturePluginMap() { - setupPluginPaths(); + setupPluginPaths(); - mPluginPrefix = pluginPrefix(); - mPluginPrefix.append("alvarcaptureplugin"); + mPluginPrefix = pluginPrefix(); + mPluginPrefix.append("alvarcaptureplugin"); - mPluginPostfix.append(ALVAR_VERSION_NODOTS); - #if _DEBUG - mPluginPostfix.append("d"); - #endif - mPluginPostfix.append("."); - mPluginPostfix.append(pluginExtension()); + mPluginPostfix.append(ALVAR_VERSION_NODOTS); +#if _DEBUG + mPluginPostfix.append("d"); +#endif + mPluginPostfix.append("."); + mPluginPostfix.append(pluginExtension()); } CaptureFactoryPrivate::~CaptureFactoryPrivate() { - for (CapturePluginMap::iterator itr = mCapturePluginMap.begin(); itr != mCapturePluginMap.end(); itr++) { - delete itr->second; - } - mCapturePluginMap.clear(); - mPluginMap.clear(); + for (CapturePluginMap::iterator itr = mCapturePluginMap.begin(); + itr != mCapturePluginMap.end(); itr++) + { + delete itr->second; + } + mCapturePluginMap.clear(); + mPluginMap.clear(); } void CaptureFactoryPrivate::loadPlugins() { - // ensure that plugins have not already been loaded - if (mLoadedAllPlugins) { - return; + // ensure that plugins have not already been loaded + if (mLoadedAllPlugins) + { + return; + } + + // iterate over search paths + for (PluginPathsVector::iterator itr = mPluginPaths.begin(); + itr != mPluginPaths.end(); ++itr) + { + DirectoryIterator directory(*itr); + + // iterate over entries in current path + while (directory.hasNext()) + { + std::string entry = directory.next(); + + // verify that filename matches the plugin convention + int prefixIndex = entry.find(mPluginPrefix); + int postfixIndex = entry.rfind(mPluginPostfix); + if (prefixIndex == -1 || postfixIndex == -1) + { + continue; + } + + // load the actual plugin + entry = entry.substr(mPluginPrefix.size(), + postfixIndex - mPluginPrefix.size()); + loadPlugin(entry, directory.currentPath()); } + } - // iterate over search paths - for (PluginPathsVector::iterator itr = mPluginPaths.begin(); itr != mPluginPaths.end(); ++itr) { - DirectoryIterator directory(*itr); - - // iterate over entries in current path - while (directory.hasNext()) { - std::string entry = directory.next(); - - // verify that filename matches the plugin convention - int prefixIndex = entry.find(mPluginPrefix); - int postfixIndex = entry.rfind(mPluginPostfix); - if (prefixIndex == -1 || postfixIndex == -1) { - continue; - } - - // load the actual plugin - entry = entry.substr(mPluginPrefix.size(), postfixIndex - mPluginPrefix.size()); - loadPlugin(entry, directory.currentPath()); - } - } - - // this should only be done once - mLoadedAllPlugins = true; + // this should only be done once + mLoadedAllPlugins = true; } -void CaptureFactoryPrivate::loadPlugin(const std::string &captureType) +void CaptureFactoryPrivate::loadPlugin(const std::string& captureType) { - // ensure plugin is not alredy loaded - if (mPluginMap.find(captureType) != mPluginMap.end()) { - return; - } - - // iterate over search paths - for (PluginPathsVector::iterator itr = mPluginPaths.begin(); itr != mPluginPaths.end(); ++itr) { - DirectoryIterator directory(*itr); - - // iterate over entries in current path - while (directory.hasNext()) { - std::string entry = directory.next(); - - // verify that filename matches the plugin convention - int prefixIndex = entry.find(mPluginPrefix); - int postfixIndex = entry.rfind(mPluginPostfix); - if (prefixIndex == -1 || postfixIndex == -1) { - continue; - } - - // verify that filename matches capture type - entry = entry.substr(mPluginPrefix.size(), postfixIndex - mPluginPrefix.size()); - if (entry != captureType) { - continue; - } - - // load the actual plugin - loadPlugin(entry, directory.currentPath()); - - // stop searching - break; - } + // ensure plugin is not alredy loaded + if (mPluginMap.find(captureType) != mPluginMap.end()) + { + return; + } + + // iterate over search paths + for (PluginPathsVector::iterator itr = mPluginPaths.begin(); + itr != mPluginPaths.end(); ++itr) + { + DirectoryIterator directory(*itr); + + // iterate over entries in current path + while (directory.hasNext()) + { + std::string entry = directory.next(); + + // verify that filename matches the plugin convention + int prefixIndex = entry.find(mPluginPrefix); + int postfixIndex = entry.rfind(mPluginPostfix); + if (prefixIndex == -1 || postfixIndex == -1) + { + continue; + } + + // verify that filename matches capture type + entry = entry.substr(mPluginPrefix.size(), + postfixIndex - mPluginPrefix.size()); + if (entry != captureType) + { + continue; + } + + // load the actual plugin + loadPlugin(entry, directory.currentPath()); + + // stop searching + break; } + } } -void CaptureFactoryPrivate::loadPlugin(const std::string &captureType, const std::string &filename) +void CaptureFactoryPrivate::loadPlugin(const std::string& captureType, + const std::string& filename) { - // ensure plugin is not alredy loaded - if (mPluginMap.find(captureType) != mPluginMap.end()) { - return; + // ensure plugin is not alredy loaded + if (mPluginMap.find(captureType) != mPluginMap.end()) + { + return; + } + + try + { + // create and load the plugin + Plugin plugin(filename); + + // register the plugin + // for this to work, each plugin must export the following method + // extern "C" __declspec(dllexport) void registerPlugin(const std::string + // &captureType, alvar::CapturePlugin *capturePlugin); + // which creates a new CapturePlugin object: capturePlugin = new + // MyCapturePlugin(captureType); + typedef void (*RegisterPlugin)(const std::string& captureType, + CapturePlugin*& capturePlugin); + RegisterPlugin registerPlugin = (RegisterPlugin)plugin.resolve("registerPlu" + "gin"); + CapturePlugin* capturePlugin = NULL; + if (registerPlugin) + { + registerPlugin(captureType, capturePlugin); } - try { - // create and load the plugin - Plugin plugin(filename); - - // register the plugin - // for this to work, each plugin must export the following method - // extern "C" __declspec(dllexport) void registerPlugin(const std::string &captureType, alvar::CapturePlugin *capturePlugin); - // which creates a new CapturePlugin object: capturePlugin = new MyCapturePlugin(captureType); - typedef void (*RegisterPlugin)(const std::string &captureType, CapturePlugin *&capturePlugin); - RegisterPlugin registerPlugin = (RegisterPlugin)plugin.resolve("registerPlugin"); - CapturePlugin *capturePlugin = NULL; - if (registerPlugin) { - registerPlugin(captureType, capturePlugin); - } - - // return if plugin did not create it's capture plugin - if (capturePlugin == NULL) { - return; - } - - // insert the plugin and capture plugin into maps - mPluginMap.insert(PluginMap::value_type(captureType, plugin)); - mCapturePluginMap.insert(CapturePluginMap::value_type(captureType, capturePlugin)); - } - catch (AlvarException e) { - // if anything fails, simply ignore it... - #if defined(_DEBUG) || !defined(NDEBUG) - std::cout << e.what() << std::endl; - #endif + // return if plugin did not create it's capture plugin + if (capturePlugin == NULL) + { + return; } + + // insert the plugin and capture plugin into maps + mPluginMap.insert(PluginMap::value_type(captureType, plugin)); + mCapturePluginMap.insert( + CapturePluginMap::value_type(captureType, capturePlugin)); + } + catch (AlvarException e) + { +// if anything fails, simply ignore it... +#if defined(_DEBUG) || !defined(NDEBUG) + std::cout << e.what() << std::endl; +#endif + } } -CapturePlugin *CaptureFactoryPrivate::getPlugin(const std::string &captureType) +CapturePlugin* CaptureFactoryPrivate::getPlugin(const std::string& captureType) { - // find CapturePlugin implementation according to capture type - CapturePluginMap::iterator itr; + // find CapturePlugin implementation according to capture type + CapturePluginMap::iterator itr; + itr = mCapturePluginMap.find(captureType); + + // if not found, attempt to load plugin + if (itr == mCapturePluginMap.end()) + { + loadPlugin(captureType); itr = mCapturePluginMap.find(captureType); - - // if not found, attempt to load plugin - if (itr == mCapturePluginMap.end()) { - loadPlugin(captureType); - itr = mCapturePluginMap.find(captureType); - } - - // return CapturePlugin implementation if found - CapturePlugin *capturePlugin = NULL; - if (itr != mCapturePluginMap.end()) { - capturePlugin = itr->second; - } - return capturePlugin; + } + + // return CapturePlugin implementation if found + CapturePlugin* capturePlugin = NULL; + if (itr != mCapturePluginMap.end()) + { + capturePlugin = itr->second; + } + return capturePlugin; } - // static class variables instantiation for singleton implementation -CaptureFactory *CaptureFactory::mInstance = NULL; +CaptureFactory* CaptureFactory::mInstance = NULL; Mutex CaptureFactory::mMutex; CaptureFactory::CaptureFactoryDestroyer CaptureFactory::mDestroyer; -CaptureFactory *CaptureFactory::instance() +CaptureFactory* CaptureFactory::instance() { - // do not use double-checked locking - // http://www.aristeia.com/Papers/DDJ_Jul_Aug_2004_revised.pdf - // use a destroyer class to properly clean up resources - // http://www.research.ibm.com/designpatterns/pubs/ph-jun96.txt - Lock lock(&mMutex); - if (mInstance == NULL) { - mInstance = new CaptureFactory(); - mDestroyer.set(mInstance); - } - return mInstance; + // do not use double-checked locking + // http://www.aristeia.com/Papers/DDJ_Jul_Aug_2004_revised.pdf + // use a destroyer class to properly clean up resources + // http://www.research.ibm.com/designpatterns/pubs/ph-jun96.txt + Lock lock(&mMutex); + if (mInstance == NULL) + { + mInstance = new CaptureFactory(); + mDestroyer.set(mInstance); + } + return mInstance; } -CaptureFactory::CaptureFactory() - : d(new CaptureFactoryPrivate()) +CaptureFactory::CaptureFactory() : d(new CaptureFactoryPrivate()) { } CaptureFactory::~CaptureFactory() { - delete d; + delete d; } CaptureFactory::CapturePluginVector CaptureFactory::enumeratePlugins() { - // load all plugins - d->loadPlugins(); - - // return the available plugins as a vector of plugin names - CaptureFactory::CapturePluginVector keys; - for (CaptureFactoryPrivate::PluginMap::iterator itr = d->mPluginMap.begin(); itr != d->mPluginMap.end(); ++itr) { - keys.push_back(itr->first); - } - - return keys; + // load all plugins + d->loadPlugins(); + + // return the available plugins as a vector of plugin names + CaptureFactory::CapturePluginVector keys; + for (CaptureFactoryPrivate::PluginMap::iterator itr = d->mPluginMap.begin(); + itr != d->mPluginMap.end(); ++itr) + { + keys.push_back(itr->first); + } + + return keys; } -CaptureFactory::CaptureDeviceVector CaptureFactory::enumerateDevices(const std::string &captureType) +CaptureFactory::CaptureDeviceVector +CaptureFactory::enumerateDevices(const std::string& captureType) { - CaptureDeviceVector devices; - - // load all plugins and enumerate their devices - if (captureType.empty()) { - d->loadPlugins(); - for (CaptureFactoryPrivate::CapturePluginMap::iterator itr = d->mCapturePluginMap.begin(); itr != d->mCapturePluginMap.end(); ++itr) { - CaptureDeviceVector pluginDevices = itr->second->enumerateDevices(); - devices.insert(devices.end(), pluginDevices.begin(), pluginDevices.end()); - } + CaptureDeviceVector devices; + + // load all plugins and enumerate their devices + if (captureType.empty()) + { + d->loadPlugins(); + for (CaptureFactoryPrivate::CapturePluginMap::iterator itr = + d->mCapturePluginMap.begin(); + itr != d->mCapturePluginMap.end(); ++itr) + { + CaptureDeviceVector pluginDevices = itr->second->enumerateDevices(); + devices.insert(devices.end(), pluginDevices.begin(), pluginDevices.end()); } - // only enumerate the devices of one plugin - else { - CapturePlugin *capturePlugin = d->getPlugin(captureType); - if (capturePlugin) { - devices = capturePlugin->enumerateDevices(); - } + } + // only enumerate the devices of one plugin + else + { + CapturePlugin* capturePlugin = d->getPlugin(captureType); + if (capturePlugin) + { + devices = capturePlugin->enumerateDevices(); } + } - return devices; + return devices; } -Capture *CaptureFactory::createCapture(const CaptureDevice captureDevice) +Capture* CaptureFactory::createCapture(const CaptureDevice captureDevice) { - // get CapturePlugin implementation - CapturePlugin *capturePlugin = d->getPlugin(captureDevice.captureType()); - - // create Capture implementation and return - Capture *capture = NULL; - if (capturePlugin) { - capture = capturePlugin->createCapture(captureDevice); - } - return capture; + // get CapturePlugin implementation + CapturePlugin* capturePlugin = d->getPlugin(captureDevice.captureType()); + + // create Capture implementation and return + Capture* capture = NULL; + if (capturePlugin) + { + capture = capturePlugin->createCapture(captureDevice); + } + return capture; } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/CaptureFactory_unix.cpp b/ar_track_alvar/src/CaptureFactory_unix.cpp index 4b0b1ee..e376d82 100644 --- a/ar_track_alvar/src/CaptureFactory_unix.cpp +++ b/ar_track_alvar/src/CaptureFactory_unix.cpp @@ -24,69 +24,78 @@ #include "ar_track_alvar/CaptureFactory_private.h" #include -#include // for readlink() - -namespace alvar { +#include // for readlink() +namespace alvar +{ void CaptureFactoryPrivate::setupPluginPaths() { - // application path and default plugin path - const int bufferSize = 4096; - char applicationBuffer[bufferSize]; - int count = readlink("/proc/self/exe", applicationBuffer, bufferSize); - if (count != 0 && count < bufferSize) { - std::string applicationPath(applicationBuffer, count); - applicationPath = std::string(applicationPath, 0, applicationPath.find_last_of("/")); - mPluginPaths.push_back(applicationPath); - mPluginPaths.push_back(applicationPath + "/alvarplugins"); - } - - // ALVAR library path - parseEnvironmentVariable(std::string("ALVAR_LIBRARY_PATH")); + // application path and default plugin path + const int bufferSize = 4096; + char applicationBuffer[bufferSize]; + int count = readlink("/proc/self/exe", applicationBuffer, bufferSize); + if (count != 0 && count < bufferSize) + { + std::string applicationPath(applicationBuffer, count); + applicationPath = + std::string(applicationPath, 0, applicationPath.find_last_of("/")); + mPluginPaths.push_back(applicationPath); + mPluginPaths.push_back(applicationPath + "/alvarplugins"); + } - // ALVAR plugin path - parseEnvironmentVariable(std::string("ALVAR_PLUGIN_PATH")); + // ALVAR library path + parseEnvironmentVariable(std::string("ALVAR_LIBRARY_PATH")); + + // ALVAR plugin path + parseEnvironmentVariable(std::string("ALVAR_PLUGIN_PATH")); } -void CaptureFactoryPrivate::parseEnvironmentVariable(const std::string &variable) +void CaptureFactoryPrivate::parseEnvironmentVariable( + const std::string& variable) { - // acquire environment variable - char *buffer; - std::string path(""); - buffer = getenv(variable.data()); - if (buffer) { - path = std::string(buffer); + // acquire environment variable + char* buffer; + std::string path(""); + buffer = getenv(variable.data()); + if (buffer) + { + path = std::string(buffer); + } + + // tokenize paths + char delimitor = ':'; + if (!path.empty()) + { + std::string::size_type start = 0; + std::string::size_type end = 0; + while ((end = path.find_first_of(delimitor, start)) != std::string::npos) + { + std::string tmp(path, start, end - start); + if (!tmp.empty()) + { + mPluginPaths.push_back(tmp); + } + start = end + 1; } - - // tokenize paths - char delimitor = ':'; - if (!path.empty()) { - std::string::size_type start = 0; - std::string::size_type end = 0; - while ((end = path.find_first_of(delimitor, start)) != std::string::npos) { - std::string tmp(path, start, end - start); - if (!tmp.empty()) { - mPluginPaths.push_back(tmp); - } - start = end + 1; - } - if (start != path.size()) { - std::string tmp(path, start, std::string::npos); - if (!tmp.empty()) { - mPluginPaths.push_back(tmp); - } - } + if (start != path.size()) + { + std::string tmp(path, start, std::string::npos); + if (!tmp.empty()) + { + mPluginPaths.push_back(tmp); + } } + } } std::string CaptureFactoryPrivate::pluginPrefix() { - return std::string("lib"); + return std::string("lib"); } std::string CaptureFactoryPrivate::pluginExtension() { - return std::string("so"); + return std::string("so"); } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/CaptureFactory_win.cpp b/ar_track_alvar/src/CaptureFactory_win.cpp index dddce22..fd99436 100644 --- a/ar_track_alvar/src/CaptureFactory_win.cpp +++ b/ar_track_alvar/src/CaptureFactory_win.cpp @@ -25,78 +25,88 @@ #include -namespace alvar { - +namespace alvar +{ void CaptureFactoryPrivate::setupPluginPaths() { - // application path and default plugin path - const DWORD bufferSize = 4096; - char applicationBuffer[bufferSize]; - DWORD count = GetModuleFileName(NULL, applicationBuffer, bufferSize); - if (count != 0 && count < bufferSize) { - std::string applicationPath(applicationBuffer, count); - applicationPath = std::string(applicationPath, 0, applicationPath.find_last_of("\\")); - mPluginPaths.push_back(applicationPath); - mPluginPaths.push_back(applicationPath + "\\alvarplugins"); - } + // application path and default plugin path + const DWORD bufferSize = 4096; + char applicationBuffer[bufferSize]; + DWORD count = GetModuleFileName(NULL, applicationBuffer, bufferSize); + if (count != 0 && count < bufferSize) + { + std::string applicationPath(applicationBuffer, count); + applicationPath = + std::string(applicationPath, 0, applicationPath.find_last_of("\\")); + mPluginPaths.push_back(applicationPath); + mPluginPaths.push_back(applicationPath + "\\alvarplugins"); + } - // ALVAR library path - parseEnvironmentVariable(std::string("ALVAR_LIBRARY_PATH")); + // ALVAR library path + parseEnvironmentVariable(std::string("ALVAR_LIBRARY_PATH")); - // ALVAR plugin path - parseEnvironmentVariable(std::string("ALVAR_PLUGIN_PATH")); + // ALVAR plugin path + parseEnvironmentVariable(std::string("ALVAR_PLUGIN_PATH")); } -void CaptureFactoryPrivate::parseEnvironmentVariable(const std::string &variable) +void CaptureFactoryPrivate::parseEnvironmentVariable( + const std::string& variable) { - // acquire environment variable - char *buffer; - std::string path(""); - #if defined(_MSC_VER) && (_MSC_VER < 1400) - buffer = getenv(variable.data()); - if (buffer) { - path = std::string(buffer); - } - #else - size_t requiredSize; - getenv_s(&requiredSize, NULL, 0, variable.data()); - if (requiredSize > 0) { - buffer = (char *)malloc(requiredSize * sizeof(char)); - getenv_s(&requiredSize, buffer, requiredSize, variable.data()); - path = std::string(buffer, requiredSize - 1); - free(buffer); - } - #endif + // acquire environment variable + char* buffer; + std::string path(""); +#if defined(_MSC_VER) && (_MSC_VER < 1400) + buffer = getenv(variable.data()); + if (buffer) + { + path = std::string(buffer); + } +#else + size_t requiredSize; + getenv_s(&requiredSize, NULL, 0, variable.data()); + if (requiredSize > 0) + { + buffer = (char*)malloc(requiredSize * sizeof(char)); + getenv_s(&requiredSize, buffer, requiredSize, variable.data()); + path = std::string(buffer, requiredSize - 1); + free(buffer); + } +#endif - // tokenize paths - char delimitor = ';'; - if (!path.empty()) { - std::string::size_type start = 0; - std::string::size_type end = 0; - while ((end = path.find_first_of(delimitor, start)) != std::string::npos) { - std::string tmp(path, start, end - start); - if (!tmp.empty()) { - mPluginPaths.push_back(tmp); - } - start = end + 1; - } - if (start != path.size()) { - std::string tmp(path, start, std::string::npos); - if (!tmp.empty()) { - mPluginPaths.push_back(tmp); - } - } + // tokenize paths + char delimitor = ';'; + if (!path.empty()) + { + std::string::size_type start = 0; + std::string::size_type end = 0; + while ((end = path.find_first_of(delimitor, start)) != std::string::npos) + { + std::string tmp(path, start, end - start); + if (!tmp.empty()) + { + mPluginPaths.push_back(tmp); + } + start = end + 1; + } + if (start != path.size()) + { + std::string tmp(path, start, std::string::npos); + if (!tmp.empty()) + { + mPluginPaths.push_back(tmp); + } } + } } std::string CaptureFactoryPrivate::pluginPrefix() { - return std::string(""); + return std::string(""); } std::string CaptureFactoryPrivate::pluginExtension() { - return std::string("dll"); + return std::string("dll"); } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/ConnectedComponents.cpp b/ar_track_alvar/src/ConnectedComponents.cpp index 70daeab..fd68e55 100644 --- a/ar_track_alvar/src/ConnectedComponents.cpp +++ b/ar_track_alvar/src/ConnectedComponents.cpp @@ -40,6 +40,8 @@ Labeling::Labeling() Labeling::~Labeling() { + gray.release(); + bw.release(); } bool Labeling::CheckBorder(const std::vector& contour, int width, @@ -443,4 +445,4 @@ void FitLineGray(cv::Mat& line_data, float params[4], cv::Mat& gray) #endif } -} // namespace alvar \ No newline at end of file +} // namespace alvar diff --git a/ar_track_alvar/src/CvTestbed.cpp b/ar_track_alvar/src/CvTestbed.cpp index 700a2aa..165f63c 100644 --- a/ar_track_alvar/src/CvTestbed.cpp +++ b/ar_track_alvar/src/CvTestbed.cpp @@ -1,192 +1,244 @@ #include "ar_track_alvar/CvTestbed.h" -CvTestbed::CvTestbed() { - cap=NULL; - running=false; - videocallback=NULL; - keycallback=NULL; - images.clear(); -} - -CvTestbed::~CvTestbed() { - // for (size_t i=0; icaptureImage()); - if (!frame.empty()) { - default_videocallback(&frame); - if (wintitle.size() > 0) { - cv::imshow(wintitle.c_str(), frame); - } - } - } - int key; +CvTestbed::CvTestbed() +{ + cap = NULL; + running = false; + videocallback = NULL; + keycallback = NULL; + images.clear(); +} + +CvTestbed::~CvTestbed() +{ + for (size_t i = 0; i < images.size(); i++) + { + if (images[i].release_at_exit) + { + images[i].ipl.release(); + } + } + images.clear(); +} + +void CvTestbed::default_videocallback(cv::Mat& image) +{ + // TODO: Skip frames if we are too slow? Impossible using OpenCV? + /* + static bool semaphore=false; + if (semaphore) return; + semaphore = true; + */ + + if (CvTestbed::Instance().videocallback) + { + CvTestbed::Instance().videocallback(image); + } + CvTestbed::Instance().ShowVisibleImages(); + + // semaphore = false; +} + +void CvTestbed::WaitKeys() +{ + running = true; + static bool pause = false; + while (running) + { + if (cap) + { + cv::Mat& frame = cap->captureImage(); + if (!frame.empty()) + { + default_videocallback(frame); + if (wintitle.size() > 0) + { + cv::imshow(wintitle, frame); + } + } + } + int key; #ifdef WIN32 - if( (key = cv::waitKey(1)) >= 0 ) { + if ((key = cvWaitKey(1)) >= 0) + { #else - if( (key = cv::waitKey(20)) >= 0 ) { + if ((key = cv::waitKey(20)) >= 0) + { #endif - if (keycallback) { - key = keycallback(key); - } - // If 'keycallback' didn't handle key - Use default handling - // By default 'C' for calibration - if (key == 'C') { - if (cap) { - cap->showSettingsDialog(); - } - } - // By default '0'-'9' toggles visible images - else if ((key >= '0') && (key <= '9')) { - int index=key-'0'; - ToggleImageVisible(index); - } - else if(key == 'p') { - pause = !pause; - } - else if (key > 0) { - running=false; - } - } - } -} - -void CvTestbed::ShowVisibleImages() { - for (size_t i=0; ienumerateDevices(); - if (vec.size() < 1) return false; - cap = CaptureFactory::instance()->createCapture(vec[0]); - if (!cap->start()) { - delete cap; - return false; - } - clean=true; - } - if (_wintitle) { - wintitle = _wintitle; - cv::namedWindow(_wintitle, 1); + if (keycallback) + { + key = keycallback(key); + } + // If 'keycallback' didn't handle key - Use default handling + // By default 'C' for calibration + if (key == 'C') + { + if (cap) + { + cap->showSettingsDialog(); + } + } + // By default '0'-'9' toggles visible images + else if ((key >= '0') && (key <= '9')) + { + int index = key - '0'; + ToggleImageVisible(index); + } + else if (key == 'p') + { + pause = !pause; + } + else if (key > 0) + { + running = false; + } + } + } +} + +void CvTestbed::ShowVisibleImages() +{ + for (size_t i = 0; i < images.size(); i++) + { + if (images[i].visible) + { + cv::imshow(images[i].title, images[i].ipl); + } + } +} + +CvTestbed& CvTestbed::Instance() +{ + static CvTestbed obj; + return obj; +} + +void CvTestbed::SetVideoCallback(void (*_videocallback)(cv::Mat& image)) +{ + videocallback = _videocallback; +} + +void CvTestbed::SetKeyCallback(int (*_keycallback)(int key)) +{ + keycallback = _keycallback; +} + +bool CvTestbed::StartVideo(Capture* _cap, const char* _wintitle) +{ + bool clean = false; + cap = _cap; + if (cap == NULL) + { + CaptureFactory::CaptureDeviceVector vec = + CaptureFactory::instance()->enumerateDevices(); + if (vec.size() < 1) + return false; + cap = CaptureFactory::instance()->createCapture(vec[0]); + if (!cap->start()) + { + delete cap; + return false; + } + clean = true; + } + if (_wintitle) + { + wintitle = _wintitle; + cv::namedWindow(_wintitle, 1); + } + WaitKeys(); // Call the main loop + if (clean) + { + cap->stop(); + delete cap; + } + return true; +} + +size_t CvTestbed::SetImage(const char* title, const cv::Mat& ipl, + bool release_at_exit /* =false */) +{ + size_t index = GetImageIndex(title); + if (index == -1) + { + // If the title wasn't found create new + Image i(ipl, title, false, release_at_exit); + images.push_back(i); + return (images.size() - 1); + } + // If the title was found replace the image + if (images[index].release_at_exit) + { + images[index].ipl.release(); + } + images[index].ipl = ipl; + images[index].release_at_exit = release_at_exit; + return index; +} + +cv::Mat CvTestbed::CreateImage(const char* title, cv::Size size, int depth, + int channels) +{ + cv::Mat ipl = cv::Mat(size, CV_MAKETYPE(depth, channels)); + SetImage(title, ipl, true); + return ipl; +} + +cv::Mat CvTestbed::CreateImageWithProto(const char* title, cv::Mat& proto, + int depth /* =0 */, + int channels /* =0 */) +{ + if (depth == 0) + depth = proto.depth(); + if (channels == 0) + channels = proto.channels(); + cv::Mat ipl = + cv::Mat(cv::Size(proto.cols, proto.rows), CV_MAKETYPE(depth, channels)); + SetImage(title, ipl, true); + return ipl; +} + +cv::Mat CvTestbed::GetImage(size_t index) +{ + if (index < 0) + return cv::Mat(); + if (index >= images.size()) + return cv::Mat(); + return images[index].ipl; +} + +size_t CvTestbed::GetImageIndex(const char* title) +{ + std::string s(title); + for (size_t i = 0; i < images.size(); i++) + { + if (s.compare(images[i].title) == 0) + { + return i; } - WaitKeys(); // Call the main loop - if (clean) { - cap->stop(); - delete cap; - } - return true; -} - -size_t CvTestbed::SetImage(const char *title, cv::Mat *ipl, bool release_at_exit /* =false */) { - size_t index = GetImageIndex(title); - if (index == -1) { - // If the title wasn't found create new - Image i(ipl, title, false, release_at_exit); - images.push_back(i); - return (images.size()-1); - } - // // If the title was found replace the image - // if (images[index].release_at_exit) { - // cvReleaseImage(&(images[index])); - // } - images[index].ipl = ipl; - // images[index].release_at_exit = release_at_exit; - return index; -} - -cv::Mat *CvTestbed::CreateImage(const char *title, cv::Size size, int type) { - cv::Mat temp = cv::Mat::zeros(size,type); - cv::Mat *ipl = &temp; - if (!ipl) return NULL; - SetImage(title, ipl, true); - return ipl; -} - -cv::Mat *CvTestbed::CreateImageWithProto(const char *title, cv::Mat *proto, int depth /* =0 */, int channels /* =0 */) { - cv::Mat temp = cv::Mat::zeros(cv::Size(proto->cols, proto->rows), proto->type()); - cv::Mat *ipl= &temp; - if (!ipl) return NULL; - //ipl->origin = proto->origin; - SetImage(title, ipl, true); - return ipl; -} - -cv::Mat *CvTestbed::GetImage(size_t index) { - if (index < 0) return NULL; - if (index >= images.size()) return NULL; - return images[index].ipl; -} - -size_t CvTestbed::GetImageIndex(const char *title) { - std::string s(title); - for (size_t i=0; i= images.size()) return false; - if (images[index].visible == false) { - images[index].visible=true; - cv::namedWindow(images[index].title.c_str(), flags); - return true; - } - else { - images[index].visible=false; - cv::destroyWindow(images[index].title.c_str()); - return false; - } + } + return (size_t)-1; +} + +cv::Mat CvTestbed::GetImage(const char* title) +{ + return GetImage(GetImageIndex(title)); +} + +bool CvTestbed::ToggleImageVisible(size_t index, int flags) +{ + if (index >= images.size()) + return false; + if (images[index].visible == false) + { + images[index].visible = true; + cv::namedWindow(images[index].title, flags); + return true; + } + else + { + images[index].visible = false; + cv::destroyWindow(images[index].title); + return false; + } } diff --git a/ar_track_alvar/src/DirectoryIterator.cpp b/ar_track_alvar/src/DirectoryIterator.cpp old mode 100755 new mode 100644 index d425537..766891b --- a/ar_track_alvar/src/DirectoryIterator.cpp +++ b/ar_track_alvar/src/DirectoryIterator.cpp @@ -25,36 +25,36 @@ #include "ar_track_alvar/DirectoryIterator_private.h" -namespace alvar { - -DirectoryIterator::DirectoryIterator(const std::string &path) - : d(new DirectoryIteratorPrivate(path)) +namespace alvar +{ +DirectoryIterator::DirectoryIterator(const std::string& path) + : d(new DirectoryIteratorPrivate(path)) { } DirectoryIterator::~DirectoryIterator() { - delete d; + delete d; } bool DirectoryIterator::hasNext() { - return d->hasNext(); + return d->hasNext(); } std::string DirectoryIterator::next() { - return d->next(); + return d->next(); } std::string DirectoryIterator::currentEntry() { - return d->mEntry; + return d->mEntry; } std::string DirectoryIterator::currentPath() { - return d->mDirectory + d->mEntry; + return d->mDirectory + d->mEntry; } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/DirectoryIterator_unix.cpp b/ar_track_alvar/src/DirectoryIterator_unix.cpp old mode 100755 new mode 100644 index 68bfa20..bf6534f --- a/ar_track_alvar/src/DirectoryIterator_unix.cpp +++ b/ar_track_alvar/src/DirectoryIterator_unix.cpp @@ -25,88 +25,97 @@ #include -namespace alvar { - +namespace alvar +{ class DirectoryIteratorPrivateData { public: - DirectoryIteratorPrivateData() - : mHandle(NULL) - , mData(NULL) - { - } - - DIR *mHandle; - dirent *mData; + DirectoryIteratorPrivateData() : mHandle(NULL), mData(NULL) + { + } + + DIR* mHandle; + dirent* mData; }; -DirectoryIteratorPrivate::DirectoryIteratorPrivate(const std::string &path) - : d(new DirectoryIteratorPrivateData()) - , mDirectory(path) - , mEntry() - , mValid(false) +DirectoryIteratorPrivate::DirectoryIteratorPrivate(const std::string& path) + : d(new DirectoryIteratorPrivateData()) + , mDirectory(path) + , mEntry() + , mValid(false) { - if (mDirectory.at(mDirectory.length() - 1) != '/') { - mDirectory.append("/"); - } + if (mDirectory.at(mDirectory.length() - 1) != '/') + { + mDirectory.append("/"); + } } DirectoryIteratorPrivate::~DirectoryIteratorPrivate() { - closedir(d->mHandle); - delete d; + closedir(d->mHandle); + delete d; } bool DirectoryIteratorPrivate::hasNext() { - if (d->mHandle == NULL) { - d->mHandle = opendir(mDirectory.data()); - - if (d->mHandle != NULL) { - d->mData = readdir(d->mHandle); - - if (d->mData != NULL) { - mValid = true; - skip(); - } - } + if (d->mHandle == NULL) + { + d->mHandle = opendir(mDirectory.data()); + + if (d->mHandle != NULL) + { + d->mData = readdir(d->mHandle); + + if (d->mData != NULL) + { + mValid = true; + skip(); + } } + } - return mValid; + return mValid; } std::string DirectoryIteratorPrivate::next() { - if (!hasNext()) { - return ""; - } - - mEntry = std::string(d->mData->d_name); - - d->mData = readdir(d->mHandle); - if (d->mData == NULL) { - mValid = false; - } - else { - skip(); - } - - return mEntry; + if (!hasNext()) + { + return ""; + } + + mEntry = std::string(d->mData->d_name); + + d->mData = readdir(d->mHandle); + if (d->mData == NULL) + { + mValid = false; + } + else + { + skip(); + } + + return mEntry; } void DirectoryIteratorPrivate::skip() { - while (true) { - if (std::string(d->mData->d_name) != "." && std::string(d->mData->d_name) != "..") { - return; - } - - d->mData = readdir(d->mHandle); - if (d->mData == NULL) { - mValid = false; - return; - } + while (true) + { + if (std::string(d->mData->d_name) != "." && + std::string(d->mData->d_name) != "..") + { + return; + } + + d->mData = readdir(d->mHandle); + if (d->mData == NULL) + { + mValid = false; + return; } + } } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/DirectoryIterator_win.cpp b/ar_track_alvar/src/DirectoryIterator_win.cpp old mode 100755 new mode 100644 index 09d7a45..74accb7 --- a/ar_track_alvar/src/DirectoryIterator_win.cpp +++ b/ar_track_alvar/src/DirectoryIterator_win.cpp @@ -25,83 +25,91 @@ #include -namespace alvar { - +namespace alvar +{ class DirectoryIteratorPrivateData { public: - DirectoryIteratorPrivateData() - : mHandle(INVALID_HANDLE_VALUE) - , mData() - { - } - - HANDLE mHandle; - WIN32_FIND_DATA mData; + DirectoryIteratorPrivateData() : mHandle(INVALID_HANDLE_VALUE), mData() + { + } + + HANDLE mHandle; + WIN32_FIND_DATA mData; }; -DirectoryIteratorPrivate::DirectoryIteratorPrivate(const std::string &path) - : d(new DirectoryIteratorPrivateData()) - , mDirectory(path) - , mEntry() - , mValid(false) +DirectoryIteratorPrivate::DirectoryIteratorPrivate(const std::string& path) + : d(new DirectoryIteratorPrivateData()) + , mDirectory(path) + , mEntry() + , mValid(false) { - if (mDirectory.at(mDirectory.length() - 1) != '\\') { - mDirectory.append("\\"); - } + if (mDirectory.at(mDirectory.length() - 1) != '\\') + { + mDirectory.append("\\"); + } } DirectoryIteratorPrivate::~DirectoryIteratorPrivate() { - FindClose(d->mHandle); - delete d; + FindClose(d->mHandle); + delete d; } bool DirectoryIteratorPrivate::hasNext() { - if (d->mHandle == INVALID_HANDLE_VALUE) { - std::string search = mDirectory + "*"; - d->mHandle = FindFirstFile(search.data(), &d->mData); - - if (d->mHandle != INVALID_HANDLE_VALUE) { - mValid = true; - skip(); - } + if (d->mHandle == INVALID_HANDLE_VALUE) + { + std::string search = mDirectory + "*"; + d->mHandle = FindFirstFile(search.data(), &d->mData); + + if (d->mHandle != INVALID_HANDLE_VALUE) + { + mValid = true; + skip(); } + } - return mValid; + return mValid; } std::string DirectoryIteratorPrivate::next() { - if (!hasNext()) { - return ""; - } - - mEntry = std::string(d->mData.cFileName); - - if (!FindNextFile(d->mHandle, &d->mData)) { - mValid = false; - } - else { - skip(); - } - - return mEntry; + if (!hasNext()) + { + return ""; + } + + mEntry = std::string(d->mData.cFileName); + + if (!FindNextFile(d->mHandle, &d->mData)) + { + mValid = false; + } + else + { + skip(); + } + + return mEntry; } void DirectoryIteratorPrivate::skip() { - while (true) { - if (std::string(d->mData.cFileName) != "." && std::string(d->mData.cFileName) != "..") { - return; - } - - if (!FindNextFile(d->mHandle, &d->mData)) { - mValid = false; - return; - } + while (true) + { + if (std::string(d->mData.cFileName) != "." && + std::string(d->mData.cFileName) != "..") + { + return; + } + + if (!FindNextFile(d->mHandle, &d->mData)) + { + mValid = false; + return; } + } } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/Draw.cpp b/ar_track_alvar/src/Draw.cpp index 515c770..5186756 100644 --- a/ar_track_alvar/src/Draw.cpp +++ b/ar_track_alvar/src/Draw.cpp @@ -170,12 +170,7 @@ void BuildHideTexture(cv::Mat& image, cv::Mat& hide_texture, Camera* cam, cv::Mat points3d_mat, points2d_mat; points3d_mat = cv::Mat(4, 3, CV_64F, points3d); points2d_mat = cv::Mat(4, 2, CV_64F, points2d); - - - CvMat temp = cvMat(points3d_mat); - CvMat temp2 = cvMat(points2d_mat); - - cam->ProjectPoints(&temp, gl_modelview, &temp2); + cam->ProjectPoints(points3d_mat, gl_modelview, points2d_mat); int kuvanx4 = (int)Limit(points2d[0][0], 0, image.cols - 1); int kuvany4 = (int)Limit(points2d[0][1], 0, image.rows - 1); int kuvanx5 = (int)Limit(points2d[1][0], 0, image.cols - 1); @@ -247,13 +242,7 @@ void DrawTexture(cv::Mat& image, cv::Mat& texture, Camera* cam, cv::Mat points3d_mat, points2d_mat; points3d_mat = cv::Mat(4, 3, CV_64F, points3d); points2d_mat = cv::Mat(4, 2, CV_64F, points2d); - - CvMat temp = cvMat(points3d_mat); - CvMat temp2 = cvMat(points2d_mat); - - cam->ProjectPoints(&temp, gl_modelview, &temp2); - - //cam->ProjectPoints(points3d_mat, gl_modelview, points2d_mat); + cam->ProjectPoints(points3d_mat, gl_modelview, points2d_mat); // Warp texture and mask using the perspective that is based on the corners double map[9]; @@ -299,4 +288,4 @@ void DrawTexture(cv::Mat& image, cv::Mat& texture, Camera* cam, mask2.release(); } -} // namespace alvar \ No newline at end of file +} // namespace alvar diff --git a/ar_track_alvar/src/EC.cpp b/ar_track_alvar/src/EC.cpp index 946d8da..e33a814 100644 --- a/ar_track_alvar/src/EC.cpp +++ b/ar_track_alvar/src/EC.cpp @@ -24,258 +24,287 @@ #include "EC.h" #include "Optimization.h" -namespace alvar { - +namespace alvar +{ struct ProjectParams { - Camera *camera; - const CvMat *object_model; + Camera* camera; + const CvMat* object_model; }; -static void ProjectRot(CvMat* state, CvMat* projection, void* x) +static void ProjectRot(CvMat* state, CvMat* projection, void* x) { - ProjectParams *foo = (ProjectParams*)x; - Camera *camera = foo->camera; - const CvMat *object_model = foo->object_model; - int count = projection->rows; - CvMat rot_mat = cvMat(3, 1, CV_64F, &(state->data.db[0+0])); - double zeros[3] = {0}; - CvMat zero_tra = cvMat(3, 1, CV_64F, zeros); - cvReshape(projection, projection, 2, 1); - cvProjectPoints2(object_model, &rot_mat, &zero_tra, &(camera->calib_K), &(camera->calib_D), projection); - cvReshape(projection, projection, 1, count); + ProjectParams* foo = (ProjectParams*)x; + Camera* camera = foo->camera; + const CvMat* object_model = foo->object_model; + int count = projection->rows; + CvMat rot_mat = cvMat(3, 1, CV_64F, &(state->data.db[0 + 0])); + double zeros[3] = { 0 }; + CvMat zero_tra = cvMat(3, 1, CV_64F, zeros); + cvReshape(projection, projection, 2, 1); + cvProjectPoints2(object_model, &rot_mat, &zero_tra, &(camera->calib_K), + &(camera->calib_D), projection); + cvReshape(projection, projection, 1, count); } // TODO: How this differs from the Camera::ProjectPoints ??? static void Project(CvMat* state, CvMat* projection, void* x) { - ProjectParams *foo = (ProjectParams*)x; - Camera *camera = foo->camera; - const CvMat *object_model = foo->object_model; - int count = projection->rows; - CvMat rot_mat = cvMat(3, 1, CV_64F, &(state->data.db[0+0])); - CvMat tra_mat = cvMat(3, 1, CV_64F, &(state->data.db[0+3])); - cvReshape(projection, projection, 2, 1); - cvProjectPoints2(object_model, &rot_mat, &tra_mat, &(camera->calib_K), &(camera->calib_D), projection); - cvReshape(projection, projection, 1, count); + ProjectParams* foo = (ProjectParams*)x; + Camera* camera = foo->camera; + const CvMat* object_model = foo->object_model; + int count = projection->rows; + CvMat rot_mat = cvMat(3, 1, CV_64F, &(state->data.db[0 + 0])); + CvMat tra_mat = cvMat(3, 1, CV_64F, &(state->data.db[0 + 3])); + cvReshape(projection, projection, 2, 1); + cvProjectPoints2(object_model, &rot_mat, &tra_mat, &(camera->calib_K), + &(camera->calib_D), projection); + cvReshape(projection, projection, 1, count); } -bool CameraEC::UpdatePose(const CvMat* object_points, CvMat* image_points, Pose *pose, CvMat *weights) { - double rot[3]; CvMat rotm = cvMat(3, 1, CV_64F, rot); - double tra[3]; CvMat tram = cvMat(3, 1, CV_64F, tra); - pose->GetRodriques(&rotm); - pose->GetTranslation(&tram); - bool ret = UpdatePose(object_points, image_points, &rotm, &tram, weights); - pose->SetRodriques(&rotm); - pose->SetTranslation(&tram); - return ret; +bool CameraEC::UpdatePose(const CvMat* object_points, CvMat* image_points, + Pose* pose, CvMat* weights) +{ + double rot[3]; + CvMat rotm = cvMat(3, 1, CV_64F, rot); + double tra[3]; + CvMat tram = cvMat(3, 1, CV_64F, tra); + pose->GetRodriques(&rotm); + pose->GetTranslation(&tram); + bool ret = UpdatePose(object_points, image_points, &rotm, &tram, weights); + pose->SetRodriques(&rotm); + pose->SetTranslation(&tram); + return ret; } -bool CameraEC::UpdatePose(const CvMat* object_points, CvMat* image_points, CvMat *rot, CvMat *tra, CvMat *weights) { - if (object_points->height < 4) return false; - /* if (object_points->height < 6) { - return false; - // TODO: We need to change image_points into CV_32FC2 - return Camera::CalcExteriorOrientation(object_points, image_points, rot, tra); - }*/ - CvMat* par = cvCreateMat(6, 1, CV_64F); - memcpy(&(par->data.db[0+0]), rot->data.db, 3*sizeof(double)); - memcpy(&(par->data.db[0+3]), tra->data.db, 3*sizeof(double)); - - ProjectParams pparams; - pparams.camera = this; - pparams.object_model = object_points; - - alvar::Optimization *opt = new alvar::Optimization(6, image_points->height); - double tmp = opt->Optimize(par, image_points, 0.0005, 2, Project, &pparams, alvar::Optimization::TUKEY_LM, 0, 0, weights); - - memcpy(rot->data.db, &(par->data.db[0+0]), 3*sizeof(double)); - memcpy(tra->data.db, &(par->data.db[0+3]), 3*sizeof(double)); - - delete opt; - - cvReleaseMat(&par); - return true; +bool CameraEC::UpdatePose(const CvMat* object_points, CvMat* image_points, + CvMat* rot, CvMat* tra, CvMat* weights) +{ + if (object_points->height < 4) + return false; + /* if (object_points->height < 6) { + return false; + // TODO: We need to change image_points into CV_32FC2 + return Camera::CalcExteriorOrientation(object_points, image_points, rot, + tra); + }*/ + CvMat* par = cvCreateMat(6, 1, CV_64F); + memcpy(&(par->data.db[0 + 0]), rot->data.db, 3 * sizeof(double)); + memcpy(&(par->data.db[0 + 3]), tra->data.db, 3 * sizeof(double)); + + ProjectParams pparams; + pparams.camera = this; + pparams.object_model = object_points; + + alvar::Optimization* opt = new alvar::Optimization(6, image_points->height); + double tmp = opt->Optimize(par, image_points, 0.0005, 2, Project, &pparams, + alvar::Optimization::TUKEY_LM, 0, 0, weights); + + memcpy(rot->data.db, &(par->data.db[0 + 0]), 3 * sizeof(double)); + memcpy(tra->data.db, &(par->data.db[0 + 3]), 3 * sizeof(double)); + + delete opt; + + cvReleaseMat(&par); + return true; } -bool CameraEC::UpdateRotation(const CvMat* object_points, CvMat* image_points, Pose *pose) +bool CameraEC::UpdateRotation(const CvMat* object_points, CvMat* image_points, + Pose* pose) { - double rot[3]; CvMat rotm = cvMat(3, 1, CV_64F, rot); - double tra[3]; CvMat tram = cvMat(3, 1, CV_64F, tra); - pose->GetRodriques(&rotm); - pose->GetTranslation(&tram); - bool ret = UpdateRotation(object_points, image_points, &rotm, &tram); - pose->SetRodriques(&rotm); - pose->SetTranslation(&tram); - return ret; + double rot[3]; + CvMat rotm = cvMat(3, 1, CV_64F, rot); + double tra[3]; + CvMat tram = cvMat(3, 1, CV_64F, tra); + pose->GetRodriques(&rotm); + pose->GetTranslation(&tram); + bool ret = UpdateRotation(object_points, image_points, &rotm, &tram); + pose->SetRodriques(&rotm); + pose->SetTranslation(&tram); + return ret; } -bool CameraEC::UpdateRotation(const CvMat* object_points, CvMat* image_points, CvMat *rot, CvMat *tra) { - - CvMat* par = cvCreateMat(3, 1, CV_64F); - memcpy(&(par->data.db[0+0]), rot->data.db, 3*sizeof(double)); - ProjectParams pparams; - pparams.camera = this; - pparams.object_model = object_points; - alvar::Optimization *opt = new alvar::Optimization(3, image_points->height); - double tmp = opt->Optimize(par, image_points, 0.0005, 2, ProjectRot, &pparams, alvar::Optimization::TUKEY_LM); - memcpy(rot->data.db, &(par->data.db[0+0]), 3*sizeof(double)); - delete opt; - cvReleaseMat(&par); - return true; +bool CameraEC::UpdateRotation(const CvMat* object_points, CvMat* image_points, + CvMat* rot, CvMat* tra) +{ + CvMat* par = cvCreateMat(3, 1, CV_64F); + memcpy(&(par->data.db[0 + 0]), rot->data.db, 3 * sizeof(double)); + ProjectParams pparams; + pparams.camera = this; + pparams.object_model = object_points; + alvar::Optimization* opt = new alvar::Optimization(3, image_points->height); + double tmp = opt->Optimize(par, image_points, 0.0005, 2, ProjectRot, &pparams, + alvar::Optimization::TUKEY_LM); + memcpy(rot->data.db, &(par->data.db[0 + 0]), 3 * sizeof(double)); + delete opt; + cvReleaseMat(&par); + return true; } // Ol etta mirror asia on kunnossa void GetOrigo(Pose* pose, CvMat* O) { - pose->GetTranslation(O); + pose->GetTranslation(O); } -void GetPointOnLine(const Pose* pose, Camera *camera, const CvPoint2D32f *u, CvMat* P) +void GetPointOnLine(const Pose* pose, Camera* camera, const CvPoint2D32f* u, + CvMat* P) { - double kid[9], rotd[9], trad[3], ud[3] = {u->x, u->y, 1}; - CvMat Ki = cvMat(3, 3, CV_64F, kid); - CvMat R = cvMat(3, 3, CV_64F, rotd); - CvMat T = cvMat(3, 1, CV_64F, trad); - CvMat U = cvMat(3, 1, CV_64F, ud); - pose->GetMatrix(&R); - pose->GetTranslation(&T); - cvInv(&(camera->calib_K), &Ki); - cvMatMul(&R, &Ki, &Ki); - cvGEMM(&Ki, &U, 1, &T, 1, P, 0); + double kid[9], rotd[9], trad[3], ud[3] = { u->x, u->y, 1 }; + CvMat Ki = cvMat(3, 3, CV_64F, kid); + CvMat R = cvMat(3, 3, CV_64F, rotd); + CvMat T = cvMat(3, 1, CV_64F, trad); + CvMat U = cvMat(3, 1, CV_64F, ud); + pose->GetMatrix(&R); + pose->GetTranslation(&T); + cvInv(&(camera->calib_K), &Ki); + cvMatMul(&R, &Ki, &Ki); + cvGEMM(&Ki, &U, 1, &T, 1, P, 0); } -bool MidPointAlgorithm(CvMat* o1, CvMat* o2, CvMat* p1, CvMat* p2, CvPoint3D32f& X, double limit) +bool MidPointAlgorithm(CvMat* o1, CvMat* o2, CvMat* p1, CvMat* p2, + CvPoint3D32f& X, double limit) { - double ud[3], vd[3], wd[3]; - CvMat u = cvMat(3, 1, CV_64F, ud); - CvMat v = cvMat(3, 1, CV_64F, vd); - CvMat w = cvMat(3, 1, CV_64F, wd); - - cvSub(p1, o1, &u); - cvSub(p2, o2, &v); - cvSub(o1, o2, &w); - - double a = cvDotProduct(&u, &u); - double b = cvDotProduct(&u, &v); - double c = cvDotProduct(&v, &v); - double d = cvDotProduct(&u, &w); - double e = cvDotProduct(&v, &w); - double D = a*c - b*b; - double sc, tc; - - // compute the line parameters of the two closest points - if (D < limit) - { - return false; - // the lines are almost parallel - sc = 0.0; - tc = (b>c ? d/b : e/c); // use the largest denominator - } - else - { - sc = (b*e - c*d) / D; - tc = (a*e - b*d) / D; - } - - double m1d[3], m2d[3]; - CvMat m1 = cvMat(3, 1, CV_64F, m1d); - CvMat m2 = cvMat(3, 1, CV_64F, m2d); - cvAddWeighted(&u, sc, o1, 1.0, 0.0, &m1); - cvAddWeighted(&v, tc, o2, 1.0, 0.0, &m2); - cvAddWeighted(&m1, 0.5, &m2, 0.5, 0.0, &m1); - - X.x = (float)m1d[0]; - X.y = (float)m1d[1]; - X.z = (float)m1d[2]; - - return true; + double ud[3], vd[3], wd[3]; + CvMat u = cvMat(3, 1, CV_64F, ud); + CvMat v = cvMat(3, 1, CV_64F, vd); + CvMat w = cvMat(3, 1, CV_64F, wd); + + cvSub(p1, o1, &u); + cvSub(p2, o2, &v); + cvSub(o1, o2, &w); + + double a = cvDotProduct(&u, &u); + double b = cvDotProduct(&u, &v); + double c = cvDotProduct(&v, &v); + double d = cvDotProduct(&u, &w); + double e = cvDotProduct(&v, &w); + double D = a * c - b * b; + double sc, tc; + + // compute the line parameters of the two closest points + if (D < limit) + { + return false; + // the lines are almost parallel + sc = 0.0; + tc = (b > c ? d / b : e / c); // use the largest denominator + } + else + { + sc = (b * e - c * d) / D; + tc = (a * e - b * d) / D; + } + + double m1d[3], m2d[3]; + CvMat m1 = cvMat(3, 1, CV_64F, m1d); + CvMat m2 = cvMat(3, 1, CV_64F, m2d); + cvAddWeighted(&u, sc, o1, 1.0, 0.0, &m1); + cvAddWeighted(&v, tc, o2, 1.0, 0.0, &m2); + cvAddWeighted(&m1, 0.5, &m2, 0.5, 0.0, &m1); + + X.x = (float)m1d[0]; + X.y = (float)m1d[1]; + X.z = (float)m1d[2]; + + return true; } // todo -bool CameraEC::ReconstructFeature(const Pose *pose1, const Pose *pose2, const CvPoint2D32f *u1, const CvPoint2D32f *u2, CvPoint3D32f *p3d, double limit) { - double o1d[3], o2d[3], p1d[3], p2d[3]; - CvMat o1 = cvMat(3, 1, CV_64F, o1d); - CvMat o2 = cvMat(3, 1, CV_64F, o2d); - CvMat p1 = cvMat(3, 1, CV_64F, p1d); - CvMat p2 = cvMat(3, 1, CV_64F, p2d); - - Pose po1 = *pose1; // Make copy so that we don't destroy the pose content - Pose po2 = *pose2; - po1.Invert(); - po2.Invert(); - GetOrigo(&po1, &o1); - GetOrigo(&po2, &o2); - GetPointOnLine(&po1, this, u1, &p1); - GetPointOnLine(&po2, this, u2, &p2); - - return MidPointAlgorithm(&o1, &o2, &p1, &p2, *p3d, limit); -} +bool CameraEC::ReconstructFeature(const Pose* pose1, const Pose* pose2, + const CvPoint2D32f* u1, + const CvPoint2D32f* u2, CvPoint3D32f* p3d, + double limit) +{ + double o1d[3], o2d[3], p1d[3], p2d[3]; + CvMat o1 = cvMat(3, 1, CV_64F, o1d); + CvMat o2 = cvMat(3, 1, CV_64F, o2d); + CvMat p1 = cvMat(3, 1, CV_64F, p1d); + CvMat p2 = cvMat(3, 1, CV_64F, p2d); + + Pose po1 = *pose1; // Make copy so that we don't destroy the pose content + Pose po2 = *pose2; + po1.Invert(); + po2.Invert(); + GetOrigo(&po1, &o1); + GetOrigo(&po2, &o2); + GetPointOnLine(&po1, this, u1, &p1); + GetPointOnLine(&po2, this, u2, &p2); -void CameraEC::Get3dOnPlane(const Pose *pose, CvPoint2D32f p2d, CvPoint3D32f &p3d) { - double pd[16], md[9], kd[9]; - CvMat P = cvMat(4, 4, CV_64F, pd); - CvMat H = cvMat(3, 3, CV_64F, md); - CvMat Ki = cvMat(3, 3, CV_64F, kd); - - pose->GetMatrix(&P); - cvInv(&(calib_K), &Ki); - - // Construct homography from pose - int ind_s = 0, ind_c = 0; - for(int i = 0; i < 3; ++i) - { - CvRect r; r.x = ind_s; r.y = 0; r.height = 3; r.width = 1; - CvMat sub = cvMat(3, 1, CV_64F); - cvGetSubRect(&P, &sub, r); - CvMat col = cvMat(3, 1, CV_64F); - cvGetCol(&H, &col, ind_c); - cvCopy(&sub, &col); - ind_c++; - ind_s++; - if(i == 1) ind_s++; - } - - // Apply H to get the 3D coordinates - Camera::Undistort(p2d); - double xd[3] = {p2d.x, p2d.y, 1}; - CvMat X = cvMat(3, 1, CV_64F, xd); - cvMatMul(&Ki, &X, &X); - cvInv(&H, &H); - cvMatMul(&H, &X, &X); - - p3d.x = (float)(xd[0] / xd[2]); - p3d.y = (float)(xd[1] / xd[2]); - p3d.z = 0; + return MidPointAlgorithm(&o1, &o2, &p1, &p2, *p3d, limit); } -void CameraEC::Get3dOnDepth(const Pose *pose, CvPoint2D32f p2d, float depth, CvPoint3D32f &p3d) +void CameraEC::Get3dOnPlane(const Pose* pose, CvPoint2D32f p2d, + CvPoint3D32f& p3d) { - double wx, wy, wz; - Camera::Undistort(p2d); - - // Tassa asetetaan z = inf ja lasketaan x ja y jotenkin?!? - //double object_scale = _dist_to_target; // TODO Same as the pose?!?!?!? - - // inv(K)*[u v 1]'*scale - wx = depth*(p2d.x-calib_K_data[0][2])/calib_K_data[0][0]; - wy = depth*(p2d.y-calib_K_data[1][2])/calib_K_data[1][1]; - wz = depth; - - // Now the points are in camera coordinate frame. - alvar::Pose p = *pose; - p.Invert(); - - double Xd[4] = {wx, wy, wz, 1}; - CvMat Xdm = cvMat(4, 1, CV_64F, Xd); - double Pd[16]; - CvMat Pdm = cvMat(4, 4, CV_64F, Pd); - p.GetMatrix(&Pdm); - cvMatMul(&Pdm, &Xdm, &Xdm); - p3d.x = float(Xd[0]/Xd[3]); - p3d.y = float(Xd[1]/Xd[3]); - p3d.z = float(Xd[2]/Xd[3]); + double pd[16], md[9], kd[9]; + CvMat P = cvMat(4, 4, CV_64F, pd); + CvMat H = cvMat(3, 3, CV_64F, md); + CvMat Ki = cvMat(3, 3, CV_64F, kd); + + pose->GetMatrix(&P); + cvInv(&(calib_K), &Ki); + + // Construct homography from pose + int ind_s = 0, ind_c = 0; + for (int i = 0; i < 3; ++i) + { + CvRect r; + r.x = ind_s; + r.y = 0; + r.height = 3; + r.width = 1; + CvMat sub = cvMat(3, 1, CV_64F); + cvGetSubRect(&P, &sub, r); + CvMat col = cvMat(3, 1, CV_64F); + cvGetCol(&H, &col, ind_c); + cvCopy(&sub, &col); + ind_c++; + ind_s++; + if (i == 1) + ind_s++; + } + + // Apply H to get the 3D coordinates + Camera::Undistort(p2d); + double xd[3] = { p2d.x, p2d.y, 1 }; + CvMat X = cvMat(3, 1, CV_64F, xd); + cvMatMul(&Ki, &X, &X); + cvInv(&H, &H); + cvMatMul(&H, &X, &X); + + p3d.x = (float)(xd[0] / xd[2]); + p3d.y = (float)(xd[1] / xd[2]); + p3d.z = 0; } -} // namespace alvar +void CameraEC::Get3dOnDepth(const Pose* pose, CvPoint2D32f p2d, float depth, + CvPoint3D32f& p3d) +{ + double wx, wy, wz; + Camera::Undistort(p2d); + + // Tassa asetetaan z = inf ja lasketaan x ja y jotenkin?!? + // double object_scale = _dist_to_target; // TODO Same as the pose?!?!?!? + + // inv(K)*[u v 1]'*scale + wx = depth * (p2d.x - calib_K_data[0][2]) / calib_K_data[0][0]; + wy = depth * (p2d.y - calib_K_data[1][2]) / calib_K_data[1][1]; + wz = depth; + + // Now the points are in camera coordinate frame. + alvar::Pose p = *pose; + p.Invert(); + + double Xd[4] = { wx, wy, wz, 1 }; + CvMat Xdm = cvMat(4, 1, CV_64F, Xd); + double Pd[16]; + CvMat Pdm = cvMat(4, 4, CV_64F, Pd); + p.GetMatrix(&Pdm); + cvMatMul(&Pdm, &Xdm, &Xdm); + p3d.x = float(Xd[0] / Xd[3]); + p3d.y = float(Xd[1] / Xd[3]); + p3d.z = float(Xd[2] / Xd[3]); +} +} // namespace alvar diff --git a/ar_track_alvar/src/FernImageDetector.cpp b/ar_track_alvar/src/FernImageDetector.cpp index 3e6b183..dd61402 100644 --- a/ar_track_alvar/src/FernImageDetector.cpp +++ b/ar_track_alvar/src/FernImageDetector.cpp @@ -25,17 +25,16 @@ namespace alvar { +#define PATCH_SIZE 31 +#define PYR_LEVELS 1 +#define N_VIEWS 5000 +#define N_PTS_TO_FIND 400 +#define N_PTS_TO_TEACH 200 +#define SIZE_BLUR 13 -#define PATCH_SIZE 31 -#define PYR_LEVELS 1 -#define N_VIEWS 5000 -#define N_PTS_TO_FIND 400 -#define N_PTS_TO_TEACH 200 -#define SIZE_BLUR 13 - -#define N_STRUCTS 50 -#define STRUCT_SIZE 11 -#define SIGNATURE_SIZE 400 +#define N_STRUCTS 50 +#define STRUCT_SIZE 11 +#define SIGNATURE_SIZE 400 // default opencv parameters // PATCH_SIZE = 31, @@ -55,28 +54,26 @@ namespace alvar // double _phiMin=-CV_PI, double _phiMax=CV_PI ); // Calculate random parameterized affine transformation A -// A = T(patch center) * R(theta) * R(phi)' * S(lambda1, lambda2) * R(phi) * T(-pt) +// A = T(patch center) * R(theta) * R(phi)' * S(lambda1, lambda2) * R(phi) * +// T(-pt) -FernClassifierWrapper::FernClassifierWrapper() - : FernClassifier() +FernClassifierWrapper::FernClassifierWrapper() : FernClassifier() { } -FernClassifierWrapper::FernClassifierWrapper(const FileNode &fileNode) - : FernClassifier(fileNode) +FernClassifierWrapper::FernClassifierWrapper(const FileNode& fileNode) + : FernClassifier(fileNode) { } -FernClassifierWrapper::FernClassifierWrapper(const vector > &points, - const vector &referenceImages, - const vector > &labels, - int _nclasses, int _patchSize, - int _signatureSize, int _nstructs, - int _structSize, int _nviews, - int _compressionMethod, - const PatchGenerator &patchGenerator) - : FernClassifier(points, referenceImages, labels, _nclasses, _patchSize, _signatureSize, - _nstructs, _structSize, _nviews, _compressionMethod, patchGenerator) +FernClassifierWrapper::FernClassifierWrapper( + const vector >& points, const vector& referenceImages, + const vector >& labels, int _nclasses, int _patchSize, + int _signatureSize, int _nstructs, int _structSize, int _nviews, + int _compressionMethod, const PatchGenerator& patchGenerator) + : FernClassifier(points, referenceImages, labels, _nclasses, _patchSize, + _signatureSize, _nstructs, _structSize, _nviews, + _compressionMethod, patchGenerator) { } @@ -84,439 +81,490 @@ FernClassifierWrapper::~FernClassifierWrapper() { } -void FernClassifierWrapper::readBinary(std::fstream &stream) +void FernClassifierWrapper::readBinary(std::fstream& stream) { - clear(); - - stream.read((char *)&verbose, sizeof(verbose)); - stream.read((char *)&nstructs, sizeof(nstructs)); - stream.read((char *)&structSize, sizeof(structSize)); - stream.read((char *)&nclasses, sizeof(nclasses)); - stream.read((char *)&signatureSize, sizeof(signatureSize)); - stream.read((char *)&compressionMethod, sizeof(compressionMethod)); - stream.read((char *)&leavesPerStruct, sizeof(leavesPerStruct)); - stream.read((char *)&patchSize.width, sizeof(patchSize.width)); - stream.read((char *)&patchSize.height, sizeof(patchSize.height)); - - std::vector::size_type featuresSize; - stream.read((char *)&featuresSize, sizeof(featuresSize)); - features.reserve(featuresSize); - unsigned int featuresValue; - Feature value; - for (std::vector::size_type i = 0; i < featuresSize; ++i) { - stream.read((char *)&featuresValue, sizeof(featuresValue)); - value.x1 = (uchar)(featuresValue % patchSize.width); - value.y1 = (uchar)(featuresValue / patchSize.width); - stream.read((char *)&featuresValue, sizeof(featuresValue)); - value.x2 = (uchar)(featuresValue % patchSize.width); - value.y2 = (uchar)(featuresValue / patchSize.width); - features.push_back(value); - } - - // don't read classCounters - /* - std::vector::size_type classCountersSize; - stream.read((char *)&classCountersSize, sizeof(classCountersSize)); - classCounters.reserve(classCountersSize); - int classCountersValue; - for (std::vector::size_type i = 0; i < classCountersSize; ++i) { - stream.read((char *)&classCountersValue, sizeof(classCountersValue)); - classCounters.push_back(classCountersValue); - } - */ - - std::vector::size_type posteriorsSize; - stream.read((char *)&posteriorsSize, sizeof(posteriorsSize)); - posteriors.reserve(posteriorsSize); - float posteriorsValue; - for (std::vector::size_type i = 0; i < posteriorsSize; ++i) { - stream.read((char *)&posteriorsValue, sizeof(posteriorsValue)); - posteriors.push_back(posteriorsValue); - } + clear(); + + stream.read((char*)&verbose, sizeof(verbose)); + stream.read((char*)&nstructs, sizeof(nstructs)); + stream.read((char*)&structSize, sizeof(structSize)); + stream.read((char*)&nclasses, sizeof(nclasses)); + stream.read((char*)&signatureSize, sizeof(signatureSize)); + stream.read((char*)&compressionMethod, sizeof(compressionMethod)); + stream.read((char*)&leavesPerStruct, sizeof(leavesPerStruct)); + stream.read((char*)&patchSize.width, sizeof(patchSize.width)); + stream.read((char*)&patchSize.height, sizeof(patchSize.height)); + + std::vector::size_type featuresSize; + stream.read((char*)&featuresSize, sizeof(featuresSize)); + features.reserve(featuresSize); + unsigned int featuresValue; + Feature value; + for (std::vector::size_type i = 0; i < featuresSize; ++i) + { + stream.read((char*)&featuresValue, sizeof(featuresValue)); + value.x1 = (uchar)(featuresValue % patchSize.width); + value.y1 = (uchar)(featuresValue / patchSize.width); + stream.read((char*)&featuresValue, sizeof(featuresValue)); + value.x2 = (uchar)(featuresValue % patchSize.width); + value.y2 = (uchar)(featuresValue / patchSize.width); + features.push_back(value); + } + + // don't read classCounters + /* + std::vector::size_type classCountersSize; + stream.read((char *)&classCountersSize, sizeof(classCountersSize)); + classCounters.reserve(classCountersSize); + int classCountersValue; + for (std::vector::size_type i = 0; i < classCountersSize; ++i) { + stream.read((char *)&classCountersValue, sizeof(classCountersValue)); + classCounters.push_back(classCountersValue); + } + */ + + std::vector::size_type posteriorsSize; + stream.read((char*)&posteriorsSize, sizeof(posteriorsSize)); + posteriors.reserve(posteriorsSize); + float posteriorsValue; + for (std::vector::size_type i = 0; i < posteriorsSize; ++i) + { + stream.read((char*)&posteriorsValue, sizeof(posteriorsValue)); + posteriors.push_back(posteriorsValue); + } } -void FernClassifierWrapper::writeBinary(std::fstream &stream) const +void FernClassifierWrapper::writeBinary(std::fstream& stream) const { - stream.write((char *)&verbose, sizeof(verbose)); - stream.write((char *)&nstructs, sizeof(nstructs)); - stream.write((char *)&structSize, sizeof(structSize)); - stream.write((char *)&nclasses, sizeof(nclasses)); - stream.write((char *)&signatureSize, sizeof(signatureSize)); - stream.write((char *)&compressionMethod, sizeof(compressionMethod)); - stream.write((char *)&leavesPerStruct, sizeof(leavesPerStruct)); - stream.write((char *)&patchSize.width, sizeof(patchSize.width)); - stream.write((char *)&patchSize.height, sizeof(patchSize.height)); - - std::vector::size_type featuresSize = features.size(); - stream.write((char *)&featuresSize, sizeof(featuresSize)); - unsigned int featuresValue; - for (std::vector::const_iterator itr = features.begin(); itr != features.end(); ++itr) { - featuresValue = itr->y1 * patchSize.width + itr->x1; - stream.write((char *)&featuresValue, sizeof(featuresValue)); - featuresValue = itr->y2 * patchSize.width + itr->x2; - stream.write((char *)&featuresValue, sizeof(featuresValue)); - } - - // don't write classCounters - /* - std::vector::size_type classCountersSize = classCounters.size(); - stream.write((char *)&classCountersSize, sizeof(classCountersSize)); - for (std::vector::const_iterator itr = classCounters.begin(); itr != classCounters.end(); ++itr) { - stream.write((char *)&*itr, sizeof(*itr)); - } - */ - - std::vector::size_type posteriorsSize = posteriors.size(); - stream.write((char *)&posteriorsSize, sizeof(posteriorsSize)); - for (std::vector::const_iterator itr = posteriors.begin(); itr != posteriors.end(); ++itr) { - stream.write((char *)&*itr, sizeof(*itr)); - } + stream.write((char*)&verbose, sizeof(verbose)); + stream.write((char*)&nstructs, sizeof(nstructs)); + stream.write((char*)&structSize, sizeof(structSize)); + stream.write((char*)&nclasses, sizeof(nclasses)); + stream.write((char*)&signatureSize, sizeof(signatureSize)); + stream.write((char*)&compressionMethod, sizeof(compressionMethod)); + stream.write((char*)&leavesPerStruct, sizeof(leavesPerStruct)); + stream.write((char*)&patchSize.width, sizeof(patchSize.width)); + stream.write((char*)&patchSize.height, sizeof(patchSize.height)); + + std::vector::size_type featuresSize = features.size(); + stream.write((char*)&featuresSize, sizeof(featuresSize)); + unsigned int featuresValue; + for (std::vector::const_iterator itr = features.begin(); + itr != features.end(); ++itr) + { + featuresValue = itr->y1 * patchSize.width + itr->x1; + stream.write((char*)&featuresValue, sizeof(featuresValue)); + featuresValue = itr->y2 * patchSize.width + itr->x2; + stream.write((char*)&featuresValue, sizeof(featuresValue)); + } + + // don't write classCounters + /* + std::vector::size_type classCountersSize = classCounters.size(); + stream.write((char *)&classCountersSize, sizeof(classCountersSize)); + for (std::vector::const_iterator itr = classCounters.begin(); itr != + classCounters.end(); ++itr) { stream.write((char *)&*itr, sizeof(*itr)); + } + */ + + std::vector::size_type posteriorsSize = posteriors.size(); + stream.write((char*)&posteriorsSize, sizeof(posteriorsSize)); + for (std::vector::const_iterator itr = posteriors.begin(); + itr != posteriors.end(); ++itr) + { + stream.write((char*)&*itr, sizeof(*itr)); + } } FernImageDetector::FernImageDetector(const bool visualize) - : mPatchGenerator(0, 256, 13, true, /*0.25*/0.10, 1.0/*0.6, 1.5*/, -CV_PI*1.0, CV_PI*1.0, -CV_PI*0.0, CV_PI*0.0/*-2*CV_PI, 2*CV_PI*/) // TODO: check angle values, cant be -2pi..2pi ? - , mLDetector(3, 20, PYR_LEVELS, N_VIEWS, PATCH_SIZE, 2) - , mClassifier() - , mKeyPoints() - , mImagePoints() - , mModelPoints() - , mVisualize(visualize) - , mObjects() - , mSize() - , mCorrespondences() - , mHomography() - , mInlierRatio(0) + : mPatchGenerator(0, 256, 13, true, /*0.25*/ 0.10, 1.0 /*0.6, 1.5*/, + -CV_PI * 1.0, CV_PI * 1.0, -CV_PI * 0.0, + CV_PI * 0.0 /*-2*CV_PI, 2*CV_PI*/) // TODO: check angle + // values, cant be + // -2pi..2pi ? + , mLDetector(3, 20, PYR_LEVELS, N_VIEWS, PATCH_SIZE, 2) + , mClassifier() + , mKeyPoints() + , mImagePoints() + , mModelPoints() + , mVisualize(visualize) + , mObjects() + , mSize() + , mCorrespondences() + , mHomography() + , mInlierRatio(0) { - //mHomography.eye(3, 3, CV_64F); - mClassifier.resize(1); + // mHomography.eye(3, 3, CV_64F); + mClassifier.resize(1); } FernImageDetector::~FernImageDetector() { } -void FernImageDetector::imagePoints(vector &points) +void FernImageDetector::imagePoints(vector& points) { - points.clear(); - for(size_t i = 0; i < mImagePoints.size(); ++i) { - points.push_back(cvPoint2D64f(mImagePoints[i].x, mImagePoints[i].y)); - } + points.clear(); + for (size_t i = 0; i < mImagePoints.size(); ++i) + { + points.push_back(cvPoint2D64f(mImagePoints[i].x, mImagePoints[i].y)); + } } -void FernImageDetector::modelPoints(vector &points, bool normalize) +void FernImageDetector::modelPoints(vector& points, + bool normalize) { - points.clear(); - //int minx = 1e10, miny = 1e10; - //int maxx = 0, maxy = 0; - for(size_t i = 0; i < mModelPoints.size(); ++i) { - CvPoint3D64f pt = cvPoint3D64f(mModelPoints[i].x, mModelPoints[i].y, 0.0); - if(normalize) { - //minx = (pt.xmaxx)?pt.x:maxx; - //maxy = (pt.y>maxy)?pt.y:maxy; - pt.x -= mSize.width/2; - pt.y -= mSize.height/2; - pt.x /= mSize.width*0.10; - pt.y /= mSize.width*0.10; - } - points.push_back(pt); - } + points.clear(); + // int minx = 1e10, miny = 1e10; + // int maxx = 0, maxy = 0; + for (size_t i = 0; i < mModelPoints.size(); ++i) + { + CvPoint3D64f pt = cvPoint3D64f(mModelPoints[i].x, mModelPoints[i].y, 0.0); + if (normalize) + { + // minx = (pt.xmaxx)?pt.x:maxx; + // maxy = (pt.y>maxy)?pt.y:maxy; + pt.x -= mSize.width / 2; + pt.y -= mSize.height / 2; + pt.x /= mSize.width * 0.10; + pt.y /= mSize.width * 0.10; + } + points.push_back(pt); + } } cv::Size FernImageDetector::size() { - return mSize; + return mSize; } cv::Mat FernImageDetector::homography() { - return mHomography; + return mHomography; } double FernImageDetector::inlierRatio() { - return mInlierRatio; + return mInlierRatio; } -void FernImageDetector::train(const std::string &filename) +void FernImageDetector::train(const std::string& filename) { - Mat object = imread(filename.c_str(), CV_LOAD_IMAGE_GRAYSCALE); - train(object); + Mat object = imread(filename.c_str(), CV_LOAD_IMAGE_GRAYSCALE); + train(object); } -void FernImageDetector::train(Mat &object) +void FernImageDetector::train(Mat& object) { - mObjects.push_back(object.clone()); - - Mat blurredObject; - GaussianBlur(mObjects[0], blurredObject, Size(SIZE_BLUR, SIZE_BLUR), 0, 0); - - if(mVisualize) { - cvNamedWindow("Object", 1); - imshow("Object", blurredObject); - cv::waitKey(2000); - } - - //buildPyramid(object, objpyr, mLDetector.nOctaves-1); - //mLDetector(mObjects[0], mKeyPoints, N_PTS_TO_TEACH); // TODO: find robust features, TODO: in pyramids? - mLDetector.getMostStable2D(blurredObject, mKeyPoints, N_PTS_TO_TEACH, mPatchGenerator); - - if(mVisualize) { - for(int i = 0; i < (int)mKeyPoints.size(); ++i) - circle(blurredObject, mKeyPoints[i].pt, int(mKeyPoints[i].size/10), CV_RGB(64,64,64)); - - imshow("Object", blurredObject); - cv::waitKey(2000); - } - - mClassifier[0].trainFromSingleView(blurredObject, - mKeyPoints, - PATCH_SIZE, - SIGNATURE_SIZE, - N_STRUCTS, // TODO: why was (int)mKeyPoints.size(), use here? why not a constant? - STRUCT_SIZE, - N_VIEWS, - FernClassifier::COMPRESSION_NONE, - mPatchGenerator); - - mSize = cv::Size(object.cols, object.rows); + mObjects.push_back(object.clone()); + + Mat blurredObject; + GaussianBlur(mObjects[0], blurredObject, Size(SIZE_BLUR, SIZE_BLUR), 0, 0); + + if (mVisualize) + { + cvNamedWindow("Object", 1); + imshow("Object", blurredObject); + cv::waitKey(2000); + } + + // buildPyramid(object, objpyr, mLDetector.nOctaves-1); + // mLDetector(mObjects[0], mKeyPoints, N_PTS_TO_TEACH); // TODO: find robust + // features, TODO: in pyramids? + mLDetector.getMostStable2D(blurredObject, mKeyPoints, N_PTS_TO_TEACH, + mPatchGenerator); + + if (mVisualize) + { + for (int i = 0; i < (int)mKeyPoints.size(); ++i) + circle(blurredObject, mKeyPoints[i].pt, int(mKeyPoints[i].size / 10), + CV_RGB(64, 64, 64)); + + imshow("Object", blurredObject); + cv::waitKey(2000); + } + + mClassifier[0].trainFromSingleView( + blurredObject, mKeyPoints, PATCH_SIZE, SIGNATURE_SIZE, + N_STRUCTS, // TODO: why was (int)mKeyPoints.size(), use here? why not a + // constant? + STRUCT_SIZE, N_VIEWS, FernClassifier::COMPRESSION_NONE, mPatchGenerator); + + mSize = cv::Size(object.cols, object.rows); } -void FernImageDetector::findFeatures(Mat &object, bool planeAssumption) +void FernImageDetector::findFeatures(Mat& object, bool planeAssumption) { - //cv::flip(object, object, 1); - - vector keypoints; - vector objpyr; - - GaussianBlur(object, object, Size(SIZE_BLUR, SIZE_BLUR), 0, 0); - //buildPyramid(object, objpyr, mLDetector.nOctaves-1); - mLDetector.nOctaves = 1; - mLDetector(object/*objpyr*/, keypoints, N_PTS_TO_FIND); - - int m = mKeyPoints.size(); - int n = keypoints.size(); - vector bestMatches(m, -1); - vector maxLogProb(m, -FLT_MAX); - vector signature; - vector pairs; - - for(size_t i = 0; i < keypoints.size(); ++i) { - Point2f pt = keypoints[i].pt; - //int oct = keypoints[i].octave; std::cout<<"oct "<= 0 && (bestMatches[k] < 0 || signature[k] > maxLogProb[k])) { - maxLogProb[k] = signature[k]; - bestMatches[k] = i; - } - } - - for(int i = 0; i < m; i++ ) - if(bestMatches[i] >= 0) { - pairs.push_back(i); - pairs.push_back(bestMatches[i]); + // cv::flip(object, object, 1); + + vector keypoints; + vector objpyr; + + GaussianBlur(object, object, Size(SIZE_BLUR, SIZE_BLUR), 0, 0); + // buildPyramid(object, objpyr, mLDetector.nOctaves-1); + mLDetector.nOctaves = 1; + mLDetector(object /*objpyr*/, keypoints, N_PTS_TO_FIND); + + int m = mKeyPoints.size(); + int n = keypoints.size(); + vector bestMatches(m, -1); + vector maxLogProb(m, -FLT_MAX); + vector signature; + vector pairs; + + for (size_t i = 0; i < keypoints.size(); ++i) + { + Point2f pt = keypoints[i].pt; + // int oct = keypoints[i].octave; std::cout<<"oct "<= 0 && (bestMatches[k] < 0 || signature[k] > maxLogProb[k])) + { + maxLogProb[k] = signature[k]; + bestMatches[k] = i; + } + } + + for (int i = 0; i < m; i++) + if (bestMatches[i] >= 0) + { + pairs.push_back(i); + pairs.push_back(bestMatches[i]); } - mCorrespondences = Mat(mObjects[0].rows + object.rows, std::max( mObjects[0].cols, object.cols), CV_8UC3); - mCorrespondences = Scalar(0.); - Mat part(mCorrespondences, Rect(0, 0, mObjects[0].cols, mObjects[0].rows)); - cvtColor(mObjects[0], part, CV_GRAY2BGR); - part = Mat(mCorrespondences, Rect(0, mObjects[0].rows, object.cols, object.rows)); - cvtColor(object, part, CV_GRAY2BGR); - - for(int i = 0; i < (int)keypoints.size(); ++i) - circle(object, keypoints[i].pt, int(keypoints[i].size/5), CV_RGB(64,64,64)); - - vector fromPt, toPt; - vector mask; - for(int i = 0; i < m; ++i) - if( bestMatches[i] >= 0 ){ - fromPt.push_back(mKeyPoints[i].pt); - toPt.push_back(keypoints[bestMatches[i]].pt); - } - - static double valmin = 1.0; - static double valmax = 0.0; - mModelPoints.clear(); - mImagePoints.clear(); - int n_inliers = 0; - - if(planeAssumption && fromPt.size() > 8) { - cv::Mat H = cv::findHomography(Mat(fromPt), Mat(toPt), mask, RANSAC/*CV_LMEDS*/, 20); - mHomography = H; - //CompareModelAndObservation(); - - for(size_t i = 0, j = 0; i < (int)pairs.size(); i += 2, ++j) { - if(mask[j]) { - cv::Point2f pi(keypoints[pairs[i+1]].pt); - cv::Point2f pw(mKeyPoints[pairs[i]].pt); - mModelPoints.push_back(pw); - mImagePoints.push_back(pi); - line(mCorrespondences, mKeyPoints[pairs[i]].pt, - keypoints[pairs[i+1]].pt + Point2f(0.0,(float)mObjects[0].rows), - Scalar(i*i%244,100-i*100%30,i*i-50*i)); - n_inliers++; - } - } - } else { - for(size_t i = 0, j = 0; i < (int)pairs.size(); i += 2, ++j) { - cv::Point2f pi(keypoints[pairs[i+1]].pt); - cv::Point2f pw(mKeyPoints[pairs[i]].pt); - mModelPoints.push_back(pw); - mImagePoints.push_back(pi); - line(mCorrespondences, mKeyPoints[pairs[i]].pt, - keypoints[pairs[i+1]].pt + Point2f(0.0,(float)mObjects[0].rows), - Scalar(i*i%244,100-i*100%30,i*i-50*i)); - } - } - - - double val = 0.0; - if(fromPt.size()>0) val = 1.*n_inliers/fromPt.size(); - if(val > valmax) valmax = val; - if(val < valmin) valmin = val; - - mInlierRatio = val; - - if (mVisualize) { - cvNamedWindow("Matches", 1); - imshow("Matches", mCorrespondences); - cv::waitKey(1); + mCorrespondences = Mat(mObjects[0].rows + object.rows, + std::max(mObjects[0].cols, object.cols), CV_8UC3); + mCorrespondences = Scalar(0.); + Mat part(mCorrespondences, Rect(0, 0, mObjects[0].cols, mObjects[0].rows)); + cvtColor(mObjects[0], part, CV_GRAY2BGR); + part = Mat(mCorrespondences, + Rect(0, mObjects[0].rows, object.cols, object.rows)); + cvtColor(object, part, CV_GRAY2BGR); + + for (int i = 0; i < (int)keypoints.size(); ++i) + circle(object, keypoints[i].pt, int(keypoints[i].size / 5), + CV_RGB(64, 64, 64)); + + vector fromPt, toPt; + vector mask; + for (int i = 0; i < m; ++i) + if (bestMatches[i] >= 0) + { + fromPt.push_back(mKeyPoints[i].pt); + toPt.push_back(keypoints[bestMatches[i]].pt); + } + + static double valmin = 1.0; + static double valmax = 0.0; + mModelPoints.clear(); + mImagePoints.clear(); + int n_inliers = 0; + + if (planeAssumption && fromPt.size() > 8) + { + cv::Mat H = cv::findHomography(Mat(fromPt), Mat(toPt), mask, + RANSAC /*CV_LMEDS*/, 20); + mHomography = H; + // CompareModelAndObservation(); + + for (size_t i = 0, j = 0; i < (int)pairs.size(); i += 2, ++j) + { + if (mask[j]) + { + cv::Point2f pi(keypoints[pairs[i + 1]].pt); + cv::Point2f pw(mKeyPoints[pairs[i]].pt); + mModelPoints.push_back(pw); + mImagePoints.push_back(pi); + line(mCorrespondences, mKeyPoints[pairs[i]].pt, + keypoints[pairs[i + 1]].pt + Point2f(0.0, (float)mObjects[0].rows), + Scalar(i * i % 244, 100 - i * 100 % 30, i * i - 50 * i)); + n_inliers++; + } + } + } + else + { + for (size_t i = 0, j = 0; i < (int)pairs.size(); i += 2, ++j) + { + cv::Point2f pi(keypoints[pairs[i + 1]].pt); + cv::Point2f pw(mKeyPoints[pairs[i]].pt); + mModelPoints.push_back(pw); + mImagePoints.push_back(pi); + line(mCorrespondences, mKeyPoints[pairs[i]].pt, + keypoints[pairs[i + 1]].pt + Point2f(0.0, (float)mObjects[0].rows), + Scalar(i * i % 244, 100 - i * 100 % 30, i * i - 50 * i)); } + } + + double val = 0.0; + if (fromPt.size() > 0) + val = 1. * n_inliers / fromPt.size(); + if (val > valmax) + valmax = val; + if (val < valmin) + valmin = val; + + mInlierRatio = val; + + if (mVisualize) + { + cvNamedWindow("Matches", 1); + imshow("Matches", mCorrespondences); + cv::waitKey(1); + } } -bool FernImageDetector::read(const std::string &filename, const bool binary) +bool FernImageDetector::read(const std::string& filename, const bool binary) { - if (binary) { - std::fstream bs(filename.c_str(), std::fstream::in | std::fstream::binary); - - if (!bs.is_open()) { - return false; - } - - bs.read((char *)&mLDetector.radius, sizeof(mLDetector.radius)); - bs.read((char *)&mLDetector.threshold, sizeof(mLDetector.threshold)); - bs.read((char *)&mLDetector.nOctaves, sizeof(mLDetector.nOctaves)); - bs.read((char *)&mLDetector.nViews, sizeof(mLDetector.nViews)); - bs.read((char *)&mLDetector.verbose, sizeof(mLDetector.verbose)); - bs.read((char *)&mLDetector.baseFeatureSize, sizeof(mLDetector.baseFeatureSize)); - bs.read((char *)&mLDetector.clusteringDistance, sizeof(mLDetector.clusteringDistance)); - - mClassifier[0].readBinary(bs); - - std::vector::size_type size; - bs.read((char *)&size, sizeof(size)); - mKeyPoints.reserve(size); - KeyPoint value; - for (std::vector::size_type i = 0; i < size; ++i) { - bs.read((char *)&value.pt.x, sizeof(value.pt.x)); - bs.read((char *)&value.pt.y, sizeof(value.pt.y)); - bs.read((char *)&value.size, sizeof(value.size)); - bs.read((char *)&value.angle, sizeof(value.angle)); - bs.read((char *)&value.response, sizeof(value.response)); - bs.read((char *)&value.octave, sizeof(value.octave)); - bs.read((char *)&value.class_id, sizeof(value.class_id)); - mKeyPoints.push_back(value); - } - - bs.read((char *)&mSize.width, sizeof(mSize.width)); - bs.read((char *)&mSize.height, sizeof(mSize.height)); - - std::vector::size_type objectsSize; - bs.read((char *)&objectsSize, sizeof(objectsSize)); - mObjects.reserve(objectsSize); - int rows; - int cols; - int type; - for (std::vector::size_type i = 0; i < objectsSize; ++i) { - bs.read((char *)&rows, sizeof(rows)); - bs.read((char *)&cols, sizeof(cols)); - bs.read((char *)&type, sizeof(type)); - Mat objectsValue(rows, cols, type); - bs.read((char *)objectsValue.data, objectsValue.elemSize() * objectsValue.total()); - mObjects.push_back(objectsValue); - } - - bs.close(); + if (binary) + { + std::fstream bs(filename.c_str(), std::fstream::in | std::fstream::binary); + + if (!bs.is_open()) + { + return false; + } + + bs.read((char*)&mLDetector.radius, sizeof(mLDetector.radius)); + bs.read((char*)&mLDetector.threshold, sizeof(mLDetector.threshold)); + bs.read((char*)&mLDetector.nOctaves, sizeof(mLDetector.nOctaves)); + bs.read((char*)&mLDetector.nViews, sizeof(mLDetector.nViews)); + bs.read((char*)&mLDetector.verbose, sizeof(mLDetector.verbose)); + bs.read((char*)&mLDetector.baseFeatureSize, + sizeof(mLDetector.baseFeatureSize)); + bs.read((char*)&mLDetector.clusteringDistance, + sizeof(mLDetector.clusteringDistance)); + + mClassifier[0].readBinary(bs); + + std::vector::size_type size; + bs.read((char*)&size, sizeof(size)); + mKeyPoints.reserve(size); + KeyPoint value; + for (std::vector::size_type i = 0; i < size; ++i) + { + bs.read((char*)&value.pt.x, sizeof(value.pt.x)); + bs.read((char*)&value.pt.y, sizeof(value.pt.y)); + bs.read((char*)&value.size, sizeof(value.size)); + bs.read((char*)&value.angle, sizeof(value.angle)); + bs.read((char*)&value.response, sizeof(value.response)); + bs.read((char*)&value.octave, sizeof(value.octave)); + bs.read((char*)&value.class_id, sizeof(value.class_id)); + mKeyPoints.push_back(value); } - else { - FileStorage fs(filename, FileStorage::READ); - - if (!fs.isOpened()) { - return false; - } - - FileNode node = fs.getFirstTopLevelNode(); - std::cout << "loaded file" << std::endl; - cv::read(node["model_points"], mKeyPoints); - std::cout << "loaded model points" << std::endl; - mClassifier[0].read(node["fern_classifier"]); - std::cout << "loaded classifier" << std::endl; + + bs.read((char*)&mSize.width, sizeof(mSize.width)); + bs.read((char*)&mSize.height, sizeof(mSize.height)); + + std::vector::size_type objectsSize; + bs.read((char*)&objectsSize, sizeof(objectsSize)); + mObjects.reserve(objectsSize); + int rows; + int cols; + int type; + for (std::vector::size_type i = 0; i < objectsSize; ++i) + { + bs.read((char*)&rows, sizeof(rows)); + bs.read((char*)&cols, sizeof(cols)); + bs.read((char*)&type, sizeof(type)); + Mat objectsValue(rows, cols, type); + bs.read((char*)objectsValue.data, + objectsValue.elemSize() * objectsValue.total()); + mObjects.push_back(objectsValue); + } + + bs.close(); + } + else + { + FileStorage fs(filename, FileStorage::READ); + + if (!fs.isOpened()) + { + return false; } - return true; + FileNode node = fs.getFirstTopLevelNode(); + std::cout << "loaded file" << std::endl; + cv::read(node["model_points"], mKeyPoints); + std::cout << "loaded model points" << std::endl; + mClassifier[0].read(node["fern_classifier"]); + std::cout << "loaded classifier" << std::endl; + } + + return true; } -bool FernImageDetector::write(const std::string &filename, const bool binary) +bool FernImageDetector::write(const std::string& filename, const bool binary) { - if (binary) { - std::fstream bs(filename.c_str(), std::fstream::out | std::fstream::binary); - - if (!bs.is_open()) { - return false; - } - - bs.write((char *)&mLDetector.radius, sizeof(mLDetector.radius)); - bs.write((char *)&mLDetector.threshold, sizeof(mLDetector.threshold)); - bs.write((char *)&mLDetector.nOctaves, sizeof(mLDetector.nOctaves)); - bs.write((char *)&mLDetector.nViews, sizeof(mLDetector.nViews)); - bs.write((char *)&mLDetector.verbose, sizeof(mLDetector.verbose)); - bs.write((char *)&mLDetector.baseFeatureSize, sizeof(mLDetector.baseFeatureSize)); - bs.write((char *)&mLDetector.clusteringDistance, sizeof(mLDetector.clusteringDistance)); - - mClassifier[0].writeBinary(bs); - - std::vector::size_type size = mKeyPoints.size(); - bs.write((char *)&size, sizeof(size)); - for (std::vector::const_iterator itr = mKeyPoints.begin(); itr != mKeyPoints.end(); ++itr) { - bs.write((char *)&itr->pt.x, sizeof(itr->pt.x)); - bs.write((char *)&itr->pt.y, sizeof(itr->pt.y)); - bs.write((char *)&itr->size, sizeof(itr->size)); - bs.write((char *)&itr->angle, sizeof(itr->angle)); - bs.write((char *)&itr->response, sizeof(itr->response)); - bs.write((char *)&itr->octave, sizeof(itr->octave)); - bs.write((char *)&itr->class_id, sizeof(itr->class_id)); - } - - bs.write((char *)&mSize.width, sizeof(mSize.width)); - bs.write((char *)&mSize.height, sizeof(mSize.height)); - - std::vector::size_type objectsSize = mObjects.size(); - bs.write((char *)&objectsSize, sizeof(objectsSize)); - for (std::vector::const_iterator itr = mObjects.begin(); itr != mObjects.end(); ++itr) { - bs.write((char *)&itr->rows, sizeof(itr->rows)); - bs.write((char *)&itr->cols, sizeof(itr->cols)); - int type = itr->type(); - bs.write((char *)&type, sizeof(type)); - bs.write((char *)itr->data, itr->elemSize() * itr->total()); - } - - bs.close(); + if (binary) + { + std::fstream bs(filename.c_str(), std::fstream::out | std::fstream::binary); + + if (!bs.is_open()) + { + return false; } - else { - FileStorage fs(filename, FileStorage::WRITE); - if (!fs.isOpened()) { - return false; - } + bs.write((char*)&mLDetector.radius, sizeof(mLDetector.radius)); + bs.write((char*)&mLDetector.threshold, sizeof(mLDetector.threshold)); + bs.write((char*)&mLDetector.nOctaves, sizeof(mLDetector.nOctaves)); + bs.write((char*)&mLDetector.nViews, sizeof(mLDetector.nViews)); + bs.write((char*)&mLDetector.verbose, sizeof(mLDetector.verbose)); + bs.write((char*)&mLDetector.baseFeatureSize, + sizeof(mLDetector.baseFeatureSize)); + bs.write((char*)&mLDetector.clusteringDistance, + sizeof(mLDetector.clusteringDistance)); + + mClassifier[0].writeBinary(bs); + + std::vector::size_type size = mKeyPoints.size(); + bs.write((char*)&size, sizeof(size)); + for (std::vector::const_iterator itr = mKeyPoints.begin(); + itr != mKeyPoints.end(); ++itr) + { + bs.write((char*)&itr->pt.x, sizeof(itr->pt.x)); + bs.write((char*)&itr->pt.y, sizeof(itr->pt.y)); + bs.write((char*)&itr->size, sizeof(itr->size)); + bs.write((char*)&itr->angle, sizeof(itr->angle)); + bs.write((char*)&itr->response, sizeof(itr->response)); + bs.write((char*)&itr->octave, sizeof(itr->octave)); + bs.write((char*)&itr->class_id, sizeof(itr->class_id)); + } - WriteStructContext ws(fs, "fern_image_detector", CV_NODE_MAP); - cv::write(fs, "model_points", mKeyPoints); - mClassifier[0].write(fs, "fern_classifier"); + bs.write((char*)&mSize.width, sizeof(mSize.width)); + bs.write((char*)&mSize.height, sizeof(mSize.height)); + + std::vector::size_type objectsSize = mObjects.size(); + bs.write((char*)&objectsSize, sizeof(objectsSize)); + for (std::vector::const_iterator itr = mObjects.begin(); + itr != mObjects.end(); ++itr) + { + bs.write((char*)&itr->rows, sizeof(itr->rows)); + bs.write((char*)&itr->cols, sizeof(itr->cols)); + int type = itr->type(); + bs.write((char*)&type, sizeof(type)); + bs.write((char*)itr->data, itr->elemSize() * itr->total()); } - return true; + bs.close(); + } + else + { + FileStorage fs(filename, FileStorage::WRITE); + + if (!fs.isOpened()) + { + return false; + } + + WriteStructContext ws(fs, "fern_image_detector", CV_NODE_MAP); + cv::write(fs, "model_points", mKeyPoints); + mClassifier[0].write(fs, "fern_classifier"); + } + + return true; } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/FernPoseEstimator.cpp b/ar_track_alvar/src/FernPoseEstimator.cpp index d50497e..3485480 100644 --- a/ar_track_alvar/src/FernPoseEstimator.cpp +++ b/ar_track_alvar/src/FernPoseEstimator.cpp @@ -23,12 +23,9 @@ #include "FernPoseEstimator.h" -namespace alvar { - -FernPoseEstimator::FernPoseEstimator() - : mPose() - , mCamera() - , mCameraEC() +namespace alvar +{ +FernPoseEstimator::FernPoseEstimator() : mPose(), mCamera(), mCameraEC() { } @@ -38,46 +35,53 @@ FernPoseEstimator::~FernPoseEstimator() Pose FernPoseEstimator::pose() const { - return mPose; + return mPose; } Camera FernPoseEstimator::camera() const { - return mCamera; + return mCamera; } -bool FernPoseEstimator::setCalibration(const std::string &filename, int width, int height) +bool FernPoseEstimator::setCalibration(const std::string& filename, int width, + int height) { - bool r1 = mCamera.SetCalib(filename.c_str(), width, height); - bool r2 = mCameraEC.SetCalib(filename.c_str(), width, height); - return r1 && r2; + bool r1 = mCamera.SetCalib(filename.c_str(), width, height); + bool r2 = mCameraEC.SetCalib(filename.c_str(), width, height); + return r1 && r2; } void FernPoseEstimator::setResolution(int width, int height) { - mCamera.SetRes(width, height); - mCameraEC.SetRes(width, height); + mCamera.SetRes(width, height); + mCameraEC.SetRes(width, height); } -void FernPoseEstimator::calculateFromPointCorrespondences(FernPoseEstimator::ModelPointVector &mpts, FernPoseEstimator::ImagePointVector &ipts) +void FernPoseEstimator::calculateFromPointCorrespondences( + FernPoseEstimator::ModelPointVector& mpts, + FernPoseEstimator::ImagePointVector& ipts) { - mCamera.CalcExteriorOrientation(mpts, ipts, &mPose); // TODO replace camera->cameraec + mCamera.CalcExteriorOrientation(mpts, ipts, + &mPose); // TODO replace camera->cameraec } -void FernPoseEstimator::updateFromTrackedPoints(FernPoseEstimator::ExternalContainerMap &container) +void FernPoseEstimator::updateFromTrackedPoints( + FernPoseEstimator::ExternalContainerMap& container) { - mCameraEC.UpdatePose(container, &mPose); + mCameraEC.UpdatePose(container, &mPose); } -void FernPoseEstimator::extractPlaneCoordinates(FernPoseEstimator::ExternalContainerMap &container) +void FernPoseEstimator::extractPlaneCoordinates( + FernPoseEstimator::ExternalContainerMap& container) { - ExternalContainerMap::iterator iter = container.begin(); - ExternalContainerMap::iterator iter_end = container.end(); - for(; iter != iter_end; ++iter) { - alvar::ExternalContainer &f = iter->second; - mCameraEC.Get3dOnPlane(&mPose, f.p2d, f.p3d); - f.has_p3d = true; - } + ExternalContainerMap::iterator iter = container.begin(); + ExternalContainerMap::iterator iter_end = container.end(); + for (; iter != iter_end; ++iter) + { + alvar::ExternalContainer& f = iter->second; + mCameraEC.Get3dOnPlane(&mPose, f.p2d, f.p3d); + f.has_p3d = true; + } } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/FileFormatUtils.cpp b/ar_track_alvar/src/FileFormatUtils.cpp index 4b597a9..54b6499 100644 --- a/ar_track_alvar/src/FileFormatUtils.cpp +++ b/ar_track_alvar/src/FileFormatUtils.cpp @@ -26,90 +26,111 @@ #include #include -namespace alvar { - - bool FileFormatUtils::decodeXMLMatrix(const tinyxml2::XMLElement *xml_matrix, int &type, int &rows, int &cols) { - const char * xml_type = xml_matrix->Attribute("type"); - if (strcmp("CV_32F", xml_type) == 0) type = CV_32F; - else if (strcmp("CV_64F", xml_type) == 0) type = CV_64F; - else return false; - - if (xml_matrix->QueryIntAttribute("rows", &rows) != EXIT_SUCCESS) return false; - if (xml_matrix->QueryIntAttribute("cols", &cols) != EXIT_SUCCESS) return false; - - return true; - } - - CvMat* FileFormatUtils::allocateXMLMatrix(const tinyxml2::XMLElement *xml_matrix) { - if (!xml_matrix) return NULL; - - int type, rows, cols; - if (!decodeXMLMatrix(xml_matrix, type, rows, cols)) return NULL; - - return cvCreateMat(rows, cols, type); - } - - bool FileFormatUtils::parseXMLMatrix(const tinyxml2::XMLElement *xml_matrix, CvMat *matrix) { - if (!xml_matrix || !matrix) return false; - - int type, rows, cols; - if (!decodeXMLMatrix(xml_matrix, type, rows, cols)) return false; - - if (type != cvGetElemType(matrix)) return false; - if (rows != matrix->rows) return false; - if (cols != matrix->cols) return false; - - const tinyxml2::XMLElement *xml_data = xml_matrix->FirstChildElement("data"); - for (int r = 0; r < matrix->rows; ++r) { - for (int c = 0; c < matrix->cols; ++c) { - if (!xml_data) return false; - double value = atof(xml_data->GetText()); - cvSetReal2D(matrix, r, c, value); - xml_data = (const tinyxml2::XMLElement *) xml_data->NextSiblingElement("data"); - } - } - - return true; - } - - tinyxml2::XMLElement * FileFormatUtils::createXMLMatrix(const char* element_name, const CvMat *matrix) { - if (!matrix) return NULL; - - tinyxml2::XMLElement * xml_matrix; - xml_matrix->SetName(element_name); - int precision; - if (cvGetElemType(matrix) == CV_32F) { - xml_matrix->SetAttribute("type", "CV_32F"); - precision = std::numeric_limits::digits10 + 2; - } - else if (cvGetElemType(matrix) == CV_64F) { - xml_matrix->SetAttribute("type", "CV_64F"); - precision = std::numeric_limits::digits10 + 2; - } - else { - //delete xml_matrix; - return NULL; - } - - xml_matrix->SetAttribute("rows", matrix->rows); - xml_matrix->SetAttribute("cols", matrix->cols); - - for (int r = 0; r < matrix->rows; ++r) { - for (int c = 0; c < matrix->cols; ++c) { - tinyxml2::XMLElement *xml_data; - xml_data->SetName("data"); - - - - xml_matrix->LinkEndChild(xml_data); - std::stringstream ss; - ss.precision(precision); - ss<SetText(ss.str().c_str()); - } - } - return xml_matrix; - } +namespace alvar +{ +bool FileFormatUtils::decodeXMLMatrix(const TiXmlElement* xml_matrix, int& type, + int& rows, int& cols) +{ + const char* xml_type = xml_matrix->Attribute("type"); + if (strcmp("CV_32F", xml_type) == 0) + type = CV_32F; + else if (strcmp("CV_64F", xml_type) == 0) + type = CV_64F; + else + return false; + + if (xml_matrix->QueryIntAttribute("rows", &rows) != TIXML_SUCCESS) + return false; + if (xml_matrix->QueryIntAttribute("cols", &cols) != TIXML_SUCCESS) + return false; + + return true; } + +cv::Mat* FileFormatUtils::allocateXMLMatrix(const TiXmlElement* xml_matrix) +{ + if (!xml_matrix) + return NULL; + + int type, rows, cols; + if (!decodeXMLMatrix(xml_matrix, type, rows, cols)) + return NULL; + + return new cv::Mat(rows, cols, type); +} + +bool FileFormatUtils::parseXMLMatrix(const TiXmlElement* xml_matrix, + cv::Mat& matrix) +{ + if (!xml_matrix || matrix.empty()) + return false; + + int type, rows, cols; + if (!decodeXMLMatrix(xml_matrix, type, rows, cols)) + return false; + + if (type != matrix.type()) + return false; + if (rows != matrix.rows) + return false; + if (cols != matrix.cols) + return false; + + const TiXmlElement* xml_data = xml_matrix->FirstChildElement("data"); + for (int r = 0; r < matrix.rows; ++r) + { + for (int c = 0; c < matrix.cols; ++c) + { + if (!xml_data) + return false; + double value = atof(xml_data->GetText()); + matrix.at(r, c) = value; + xml_data = (const TiXmlElement*)xml_data->NextSibling("data"); + } + } + + return true; +} + +TiXmlElement* FileFormatUtils::createXMLMatrix(const char* element_name, + const cv::Mat& matrix) +{ + if (matrix.empty()) + return NULL; + + TiXmlElement* xml_matrix = new TiXmlElement(element_name); + int precision; + if (matrix.type() == CV_32F) + { + xml_matrix->SetAttribute("type", "CV_32F"); + precision = std::numeric_limits::digits10 + 2; + } + else if (matrix.type() == CV_64F) + { + xml_matrix->SetAttribute("type", "CV_64F"); + precision = std::numeric_limits::digits10 + 2; + } + else + { + delete xml_matrix; + return NULL; + } + + xml_matrix->SetAttribute("rows", matrix.rows); + xml_matrix->SetAttribute("cols", matrix.cols); + + for (int r = 0; r < matrix.rows; ++r) + { + for (int c = 0; c < matrix.cols; ++c) + { + TiXmlElement* xml_data = new TiXmlElement("data"); + xml_matrix->LinkEndChild(xml_data); + std::stringstream ss; + ss.precision(precision); + ss << matrix.at(r, c); + xml_data->LinkEndChild(new TiXmlText(ss.str().c_str())); + } + } + return xml_matrix; +} +} // namespace alvar diff --git a/ar_track_alvar/src/Filter.cpp b/ar_track_alvar/src/Filter.cpp index 7cd80b0..d6875c2 100644 --- a/ar_track_alvar/src/Filter.cpp +++ b/ar_track_alvar/src/Filter.cpp @@ -27,87 +27,116 @@ template class ALVAR_EXPORT alvar::FilterArray; template class ALVAR_EXPORT alvar::FilterArray; template class ALVAR_EXPORT alvar::FilterArray; -template class ALVAR_EXPORT alvar::FilterArray; +template class ALVAR_EXPORT + alvar::FilterArray; using namespace std; -namespace alvar { +namespace alvar +{ using namespace std; -Filter::Filter() { - value=0; +Filter::Filter() +{ + value = 0; } -void FilterAverage::push_to_buffer(double y) { - buffer.push_back(y); - while (buffer.size() > window_size) { - buffer.pop_front(); - } +void FilterAverage::push_to_buffer(double y) +{ + buffer.push_back(y); + while (buffer.size() > window_size) + { + buffer.pop_front(); + } } -double FilterAverage::next(double y) { - if (window_size <= 0) { - count++; - double alpha = 1.0/count; - return (value=((1.0-alpha)*value)+(alpha*y)); - } else { - push_to_buffer(y); - double sum = 0; - for (deque::iterator iter = buffer.begin(); iter != buffer.end(); iter++) { - sum += (double)*iter; - } - return (value=sum/buffer.size()); - } +double FilterAverage::next(double y) +{ + if (window_size <= 0) + { + count++; + double alpha = 1.0 / count; + return (value = ((1.0 - alpha) * value) + (alpha * y)); + } + else + { + push_to_buffer(y); + double sum = 0; + for (deque::iterator iter = buffer.begin(); iter != buffer.end(); + iter++) + { + sum += (double)*iter; + } + return (value = sum / buffer.size()); + } } -void FilterAverage::reset() { - buffer.clear(); +void FilterAverage::reset() +{ + buffer.clear(); } -double FilterAverage::deviation() const { - double sum = 0; - if (buffer.size() == 0) return 0; - for (deque::const_iterator iter = buffer.begin(); iter != buffer.end(); iter++) { - double val = ((double)*iter)-value; - sum += (val*val); - } - sum /= buffer.size(); - return sqrt(sum); +double FilterAverage::deviation() const +{ + double sum = 0; + if (buffer.size() == 0) + return 0; + for (deque::const_iterator iter = buffer.begin(); + iter != buffer.end(); iter++) + { + double val = ((double)*iter) - value; + sum += (val * val); + } + sum /= buffer.size(); + return sqrt(sum); } -double FilterMedian::next(double y) { - if (window_size <= 1) return y; - push_to_buffer(y); - copy(buffer.begin(), buffer.end(), sort_buffer.begin()); - int nth = buffer.size()/2; - nth_element(sort_buffer.begin(), sort_buffer.begin() + nth, sort_buffer.begin()+buffer.size()); - return value = sort_buffer[nth]; +double FilterMedian::next(double y) +{ + if (window_size <= 1) + return y; + push_to_buffer(y); + copy(buffer.begin(), buffer.end(), sort_buffer.begin()); + int nth = buffer.size() / 2; + nth_element(sort_buffer.begin(), sort_buffer.begin() + nth, + sort_buffer.begin() + buffer.size()); + return value = sort_buffer[nth]; } -double FilterRunningAverage::next(double y) { - if (breset) { - breset=false; - value=(double)y; - } else { - value = ((1.0-alpha) * value) + (alpha * (double)y); - } - return value; +double FilterRunningAverage::next(double y) +{ + if (breset) + { + breset = false; + value = (double)y; + } + else + { + value = ((1.0 - alpha) * value) + (alpha * (double)y); + } + return value; } -void FilterRunningAverage::reset() { breset=true; } +void FilterRunningAverage::reset() +{ + breset = true; +} -double FilterDoubleExponentialSmoothing::next(double y) { - if (breset) { - breset=false; - value=(double)y; - slope=0.0; - } - else { - double value_prev = value; - value = ((1.0-alpha) * (value + slope)) + (alpha * (double)y); - slope = ((1.0-gamma) * (slope)) + (gamma * (value - value_prev)); - } - return value; +double FilterDoubleExponentialSmoothing::next(double y) +{ + if (breset) + { + breset = false; + value = (double)y; + slope = 0.0; + } + else + { + double value_prev = value; + value = ((1.0 - alpha) * (value + slope)) + (alpha * (double)y); + slope = ((1.0 - gamma) * (slope)) + (gamma * (value - value_prev)); + } + return value; } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/GlutViewer.cpp b/ar_track_alvar/src/GlutViewer.cpp index e361bcc..f255cdb 100644 --- a/ar_track_alvar/src/GlutViewer.cpp +++ b/ar_track_alvar/src/GlutViewer.cpp @@ -7,136 +7,142 @@ using namespace std; using namespace alvar; using namespace GlutViewer; -Drawable::Drawable(double _scale, double _r, double _g, double _b) { - SetScale(_scale); - SetColor(_r, _g, _b); +Drawable::Drawable(double _scale, double _r, double _g, double _b) +{ + SetScale(_scale); + SetColor(_r, _g, _b); } -void Drawable::SetScale(double _scale) { - scale=_scale; +void Drawable::SetScale(double _scale) +{ + scale = _scale; } -void Drawable::SetColor(double _r, double _g, double _b) { - color[0] = _r; - color[1] = _g; - color[2] = _b; +void Drawable::SetColor(double _r, double _g, double _b) +{ + color[0] = _r; + color[1] = _g; + color[2] = _b; } -void Drawable::Draw() { - //double color[3] = {1, 1, 1}; - glPushMatrix(); - glMultMatrixd(gl_mat); - DrawAxis(scale, color); - glPopMatrix(); +void Drawable::Draw() +{ + // double color[3] = {1, 1, 1}; + glPushMatrix(); + glMultMatrixd(gl_mat); + DrawAxis(scale, color); + glPopMatrix(); } void Drawable::DrawAxis(double scale, double color[3]) { - glEnable(GL_DEPTH_TEST); - glDisable(GL_CULL_FACE); - - glColor3d(color[0], color[1], color[2]); - glBegin(GL_QUADS); - glVertex3f(-scale/2, -scale/2, 0.0); - glVertex3f(-scale/2, scale/2, 0.0); - - glVertex3f(-scale/2, scale/2, 0.0); - glVertex3f( scale/2, scale/2, 0.0); - - glVertex3f( scale/2, scale/2, 0.0); - glVertex3f( scale/2, -scale/2, 0.0); - - glVertex3f( scale/2, -scale/2, 0.0); - glVertex3f(-scale/2, -scale/2, 0.0); - glEnd(); - - // Z - glColor3d(0.0, 0.0, 1.0); - glBegin(GL_LINES); - glVertex3f(0.0, 0.0, 0.0); - glVertex3f(0.0, 0.0, scale); - glEnd(); - - glDisable(GL_DEPTH_TEST); - - // X - glColor3d(1.0, 0.0, 0.0); - glBegin(GL_LINES); - glVertex3f(0.0, 0.0, 0.0); - glVertex3f(scale, 0.0, 0.0); - glEnd(); - - // Y - glColor3d(0.0, 1.0, 0.0); - glBegin(GL_LINES); - glVertex3f(0.0, 0.0, 0.0); - glVertex3f(0.0, scale, 0.0); - glEnd(); + glEnable(GL_DEPTH_TEST); + glDisable(GL_CULL_FACE); + + glColor3d(color[0], color[1], color[2]); + glBegin(GL_QUADS); + glVertex3f(-scale / 2, -scale / 2, 0.0); + glVertex3f(-scale / 2, scale / 2, 0.0); + + glVertex3f(-scale / 2, scale / 2, 0.0); + glVertex3f(scale / 2, scale / 2, 0.0); + + glVertex3f(scale / 2, scale / 2, 0.0); + glVertex3f(scale / 2, -scale / 2, 0.0); + + glVertex3f(scale / 2, -scale / 2, 0.0); + glVertex3f(-scale / 2, -scale / 2, 0.0); + glEnd(); + + // Z + glColor3d(0.0, 0.0, 1.0); + glBegin(GL_LINES); + glVertex3f(0.0, 0.0, 0.0); + glVertex3f(0.0, 0.0, scale); + glEnd(); + + glDisable(GL_DEPTH_TEST); + + // X + glColor3d(1.0, 0.0, 0.0); + glBegin(GL_LINES); + glVertex3f(0.0, 0.0, 0.0); + glVertex3f(scale, 0.0, 0.0); + glEnd(); + + // Y + glColor3d(0.0, 1.0, 0.0); + glBegin(GL_LINES); + glVertex3f(0.0, 0.0, 0.0); + glVertex3f(0.0, scale, 0.0); + glEnd(); } -void Drawable::SetGLMatTraQuat(double *tra, double *quat, bool flip) +void Drawable::SetGLMatTraQuat(double* tra, double* quat, bool flip) { - Rotation r; - if (quat != 0) - { - CvMat cv_mat; - cvInitMatHeader(&cv_mat, 4, 1, CV_64F, quat); - r.SetQuaternion(&cv_mat); - } - - int flp = 1; - if (flip) - { - r.Transpose(); - //flp=-1; - } - - CvMat cv_gl_mat; - cvInitMatHeader(&cv_gl_mat, 4, 4, CV_64F, gl_mat); cvZero(&cv_gl_mat); - r.GetMatrix(&cv_gl_mat); - cvSet2D(&cv_gl_mat, 0, 3, cvScalar(flp*tra[0])); - cvSet2D(&cv_gl_mat, 1, 3, cvScalar(flp*tra[1])); - cvSet2D(&cv_gl_mat, 2, 3, cvScalar(flp*tra[2])); - cvSet2D(&cv_gl_mat, 3, 3, cvScalar(1)); - - cvTranspose(&cv_gl_mat, &cv_gl_mat); + Rotation r; + if (quat != 0) + { + CvMat cv_mat; + cvInitMatHeader(&cv_mat, 4, 1, CV_64F, quat); + r.SetQuaternion(&cv_mat); + } + + int flp = 1; + if (flip) + { + r.Transpose(); + // flp=-1; + } + + CvMat cv_gl_mat; + cvInitMatHeader(&cv_gl_mat, 4, 4, CV_64F, gl_mat); + cvZero(&cv_gl_mat); + r.GetMatrix(&cv_gl_mat); + cvSet2D(&cv_gl_mat, 0, 3, cvScalar(flp * tra[0])); + cvSet2D(&cv_gl_mat, 1, 3, cvScalar(flp * tra[1])); + cvSet2D(&cv_gl_mat, 2, 3, cvScalar(flp * tra[2])); + cvSet2D(&cv_gl_mat, 3, 3, cvScalar(1)); + + cvTranspose(&cv_gl_mat, &cv_gl_mat); } -void Drawable::SetGLMatTraRod(double *tra, double *rod) +void Drawable::SetGLMatTraRod(double* tra, double* rod) { - // This is the OpenGL augmentation matrix - CvMat cv_gl_mat; - cvInitMatHeader(&cv_gl_mat, 4, 4, CV_64F, gl_mat); cvSetIdentity(&cv_gl_mat); - - // Figure out the rotation part - double rot_mat_data[3][3]; - CvMat rot_mat = cvMat(3, 3, CV_64F, rot_mat_data); - cvSetIdentity(&rot_mat); - if (rod != 0) - { - CvMat rod_mat; - cvInitMatHeader(&rod_mat, 3, 1, CV_64F, rod); - cvRodrigues2(&rod_mat, &rot_mat, 0); - } - - // Fill in the rotation part - cvmSet(&cv_gl_mat, 0, 0, cvmGet(&rot_mat, 0, 0)); - cvmSet(&cv_gl_mat, 0, 1, cvmGet(&rot_mat, 0, 1)); - cvmSet(&cv_gl_mat, 0, 2, cvmGet(&rot_mat, 0, 2)); - cvmSet(&cv_gl_mat, 1, 0, cvmGet(&rot_mat, 1, 0)); - cvmSet(&cv_gl_mat, 1, 1, cvmGet(&rot_mat, 1, 1)); - cvmSet(&cv_gl_mat, 1, 2, cvmGet(&rot_mat, 1, 2)); - cvmSet(&cv_gl_mat, 2, 0, cvmGet(&rot_mat, 2, 0)); - cvmSet(&cv_gl_mat, 2, 1, cvmGet(&rot_mat, 2, 1)); - cvmSet(&cv_gl_mat, 2, 2, cvmGet(&rot_mat, 2, 2)); - - // Fill in the translation part - cvSet2D(&cv_gl_mat, 0, 3, cvScalar(tra[0])); - cvSet2D(&cv_gl_mat, 1, 3, cvScalar(tra[1])); - cvSet2D(&cv_gl_mat, 2, 3, cvScalar(tra[2])); - - // Transpose into OpenGL presentation order - cvTranspose(&cv_gl_mat, &cv_gl_mat); + // This is the OpenGL augmentation matrix + CvMat cv_gl_mat; + cvInitMatHeader(&cv_gl_mat, 4, 4, CV_64F, gl_mat); + cvSetIdentity(&cv_gl_mat); + + // Figure out the rotation part + double rot_mat_data[3][3]; + CvMat rot_mat = cvMat(3, 3, CV_64F, rot_mat_data); + cvSetIdentity(&rot_mat); + if (rod != 0) + { + CvMat rod_mat; + cvInitMatHeader(&rod_mat, 3, 1, CV_64F, rod); + cvRodrigues2(&rod_mat, &rot_mat, 0); + } + + // Fill in the rotation part + cvmSet(&cv_gl_mat, 0, 0, cvmGet(&rot_mat, 0, 0)); + cvmSet(&cv_gl_mat, 0, 1, cvmGet(&rot_mat, 0, 1)); + cvmSet(&cv_gl_mat, 0, 2, cvmGet(&rot_mat, 0, 2)); + cvmSet(&cv_gl_mat, 1, 0, cvmGet(&rot_mat, 1, 0)); + cvmSet(&cv_gl_mat, 1, 1, cvmGet(&rot_mat, 1, 1)); + cvmSet(&cv_gl_mat, 1, 2, cvmGet(&rot_mat, 1, 2)); + cvmSet(&cv_gl_mat, 2, 0, cvmGet(&rot_mat, 2, 0)); + cvmSet(&cv_gl_mat, 2, 1, cvmGet(&rot_mat, 2, 1)); + cvmSet(&cv_gl_mat, 2, 2, cvmGet(&rot_mat, 2, 2)); + + // Fill in the translation part + cvSet2D(&cv_gl_mat, 0, 3, cvScalar(tra[0])); + cvSet2D(&cv_gl_mat, 1, 3, cvScalar(tra[1])); + cvSet2D(&cv_gl_mat, 2, 3, cvScalar(tra[2])); + + // Transpose into OpenGL presentation order + cvTranspose(&cv_gl_mat, &cv_gl_mat); } Mutex mutex_items; @@ -145,353 +151,360 @@ vector items; int cur_button; float elev = 0.0, azim = 0.0, rad = 0.0; float panx = 0.0, pany = 0.0; -float jaw = 0.0, jawx, jawy, jawz; +float jaw = 0.0, jawx, jawy, jawz; int ar_window; int vr_window; -float off_x=0, off_y=0; +float off_x = 0, off_y = 0; -unsigned char* image=0; +unsigned char* image = 0; int a_argc; -char **a_argv; +char** a_argv; int width; int height; Threads threads; double proj_mat[16]; -double modelview_mat[16] = { 1,0,0,0, 0,1,0,0, 0,0,1,0, 0,0,0,1 }; +double modelview_mat[16] = { 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1 }; -static void *glut_thread(void *lpThreadParameter) +static void* glut_thread(void* lpThreadParameter) { - //InitializeCriticalSection(&critical_section_items); + // InitializeCriticalSection(&critical_section_items); - glutInit(&a_argc, a_argv); - glutInitDisplayMode(GLUT_DEPTH | GLUT_RGB | GLUT_DOUBLE); - glutInitWindowSize(width, height); + glutInit(&a_argc, a_argv); + glutInitDisplayMode(GLUT_DEPTH | GLUT_RGB | GLUT_DOUBLE); + glutInitWindowSize(width, height); - ar_window = glutCreateWindow("AR"); - glutDisplayFunc(DrawAr); - glutSpecialFunc(KeyCallback); - glutPositionWindow(0, 0); + ar_window = glutCreateWindow("AR"); + glutDisplayFunc(DrawAr); + glutSpecialFunc(KeyCallback); + glutPositionWindow(0, 0); - vr_window = glutCreateWindow("VR"); - glutDisplayFunc(DrawVr); - glutPositionWindow(0, height); + vr_window = glutCreateWindow("VR"); + glutDisplayFunc(DrawVr); + glutPositionWindow(0, height); - glEnable(GL_CULL_FACE); - glEnable(GL_DEPTH_TEST); - glEnable(GL_COLOR_MATERIAL); + glEnable(GL_CULL_FACE); + glEnable(GL_DEPTH_TEST); + glEnable(GL_COLOR_MATERIAL); - glutMouseFunc(Mouse); - glutMotionFunc(Motion); + glutMouseFunc(Mouse); + glutMotionFunc(Motion); - atexit(Exit); + atexit(Exit); - glutMainLoop(); - return 0; + glutMainLoop(); + return 0; } void GlutViewer::KeyCallback(int key, int x, int y) { - switch(key) - { - case GLUT_KEY_LEFT: - off_x-=1; - break; - case GLUT_KEY_RIGHT: - off_x+=1; - break; - case GLUT_KEY_UP: - off_y-=1; - break; - case GLUT_KEY_DOWN: - off_y+=1; - break; - - } + switch (key) + { + case GLUT_KEY_LEFT: + off_x -= 1; + break; + case GLUT_KEY_RIGHT: + off_x += 1; + break; + case GLUT_KEY_UP: + off_y -= 1; + break; + case GLUT_KEY_DOWN: + off_y += 1; + break; + } } double GlutViewer::GetXOffset() { - return off_x; + return off_x; } double GlutViewer::GetYOffset() { - return off_y; + return off_y; } void GlutViewer::Start(int argc, char** argv, int w, int h, float r) { - a_argc = argc; - a_argv = argv; - width = w; - height = h; - rad = r; + a_argc = argc; + a_argv = argv; + width = w; + height = h; + rad = r; - threads.create(glut_thread, 0); + threads.create(glut_thread, 0); } void GlutViewer::DrawFloor() { - glColor3f(0.5, 0.5, 1.0); - - glBegin(GL_LINES); - for(int i = -20; i <= 20; i+=1) - { - glVertex3f((float)i, 0.0f, -20); - glVertex3f((float)i, 0.0f, 20); - glVertex3f(-20, 0.0f, (float)i); - glVertex3f( 20, 0.0f, (float)i); - } - glEnd(); + glColor3f(0.5, 0.5, 1.0); + + glBegin(GL_LINES); + for (int i = -20; i <= 20; i += 1) + { + glVertex3f((float)i, 0.0f, -20); + glVertex3f((float)i, 0.0f, 20); + glVertex3f(-20, 0.0f, (float)i); + glVertex3f(20, 0.0f, (float)i); + } + glEnd(); } void GlutViewer::Mouse(int button, int state, int x, int y) { - cur_button = button; + cur_button = button; } void GlutViewer::DrawAxis(float scale) { - glColor3f(1.0, 0.0, 0.0); - glBegin(GL_LINES); - glVertex3f(0.0, 0.0, 0.0); - glVertex3f(scale, 0.0, 0.0); - glEnd(); - glColor3f(0.0, 1.0, 0.0); - glBegin(GL_LINES); - glVertex3f(0.0, 0.0, 0.0); - glVertex3f(0.0, scale, 0.0); - glEnd(); - glColor3f(0.0, 0.0, 1.0); - glBegin(GL_LINES); - glVertex3f(0.0, 0.0, 0.0); - glVertex3f(0.0, 0.0, scale); - glEnd(); + glColor3f(1.0, 0.0, 0.0); + glBegin(GL_LINES); + glVertex3f(0.0, 0.0, 0.0); + glVertex3f(scale, 0.0, 0.0); + glEnd(); + glColor3f(0.0, 1.0, 0.0); + glBegin(GL_LINES); + glVertex3f(0.0, 0.0, 0.0); + glVertex3f(0.0, scale, 0.0); + glEnd(); + glColor3f(0.0, 0.0, 1.0); + glBegin(GL_LINES); + glVertex3f(0.0, 0.0, 0.0); + glVertex3f(0.0, 0.0, scale); + glEnd(); } void GlutViewer::Motion(int x, int y) { - static int oldx, oldy; - - int dx = oldx-x; - int dy = oldy-y; - - switch(cur_button) - { - case GLUT_LEFT_BUTTON : - if (abs(dx)>abs(dy)) - { - if (dx>0) - azim += 3.0; - else if (dx<0) - azim -= 3.0; - } - else if (dy>0) - elev -= 3.0; - else if (dy<0) - elev += 3.0; - break; - - case GLUT_MIDDLE_BUTTON : - if (abs(dx)>abs(dy)) - { - if (dx>0) - panx += 10.5; - else if (dx<0) - panx -= 10.5; - } - else if (dy>0) - pany -= 10.5; - else if (dy<0) - pany += 10.5; - break;//?? - - case GLUT_RIGHT_BUTTON : - if (dy > 0) - rad += (10.2); - else if (dy < 0) - rad -= (10.2); - break; - - default: - break; - } - - oldx = x; - oldy = y; + static int oldx, oldy; + + int dx = oldx - x; + int dy = oldy - y; + + switch (cur_button) + { + case GLUT_LEFT_BUTTON: + if (abs(dx) > abs(dy)) + { + if (dx > 0) + azim += 3.0; + else if (dx < 0) + azim -= 3.0; + } + else if (dy > 0) + elev -= 3.0; + else if (dy < 0) + elev += 3.0; + break; + + case GLUT_MIDDLE_BUTTON: + if (abs(dx) > abs(dy)) + { + if (dx > 0) + panx += 10.5; + else if (dx < 0) + panx -= 10.5; + } + else if (dy > 0) + pany -= 10.5; + else if (dy < 0) + pany += 10.5; + break; //?? + + case GLUT_RIGHT_BUTTON: + if (dy > 0) + rad += (10.2); + else if (dy < 0) + rad -= (10.2); + break; + + default: + break; + } + + oldx = x; + oldy = y; } - void GlutViewer::DrawContent() { - DrawAxis(100.f); - Lock lock(&mutex_items); - for (unsigned i = 0; i < items.size(); ++i) { - items[i]->Draw(); - } + DrawAxis(100.f); + Lock lock(&mutex_items); + for (unsigned i = 0; i < items.size(); ++i) + { + items[i]->Draw(); + } } - void GlutViewer::DrawVr() { - glutSetWindow(vr_window); - glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT); - glClearColor(0.5, 0.2, 0.2, 1.0); - - //glViewport(0, 0, w, h); - glMatrixMode(GL_PROJECTION); - glLoadIdentity(); - //gluPerspective(70, 1, 0.001, 5000); - - glMatrixMode(GL_MODELVIEW); - glLoadIdentity(); - - glTranslatef(panx, pany, -rad); - glRotatef(-elev, 1.0, 0.0, 0.0); - glRotatef( azim, 0.0, 1.0, 0.0); - - float pos[4] = {50, 0, 50}; - glLightfv(GL_LIGHT0, GL_POSITION, pos); - - DrawContent(); - - //glFlush(); - glutSwapBuffers(); - glutPostRedisplay(); + glutSetWindow(vr_window); + glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT); + glClearColor(0.5, 0.2, 0.2, 1.0); + + // glViewport(0, 0, w, h); + glMatrixMode(GL_PROJECTION); + glLoadIdentity(); + // gluPerspective(70, 1, 0.001, 5000); + + glMatrixMode(GL_MODELVIEW); + glLoadIdentity(); + + glTranslatef(panx, pany, -rad); + glRotatef(-elev, 1.0, 0.0, 0.0); + glRotatef(azim, 0.0, 1.0, 0.0); + + float pos[4] = { 50, 0, 50 }; + glLightfv(GL_LIGHT0, GL_POSITION, pos); + + DrawContent(); + + // glFlush(); + glutSwapBuffers(); + glutPostRedisplay(); } void GlutViewer::DrawAr() { - glutSetWindow(ar_window); - glClearColor(0.2, 0.5, 0.2, 1.0); - glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT); + glutSetWindow(ar_window); + glClearColor(0.2, 0.5, 0.2, 1.0); + glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT); - DrawVideo(); + DrawVideo(); - glMatrixMode(GL_PROJECTION); - glLoadMatrixd(proj_mat); + glMatrixMode(GL_PROJECTION); + glLoadMatrixd(proj_mat); - glMatrixMode(GL_MODELVIEW); - glLoadMatrixd(modelview_mat); + glMatrixMode(GL_MODELVIEW); + glLoadMatrixd(modelview_mat); - DrawContent(); + DrawContent(); - glutSwapBuffers(); - glutPostRedisplay(); + glutSwapBuffers(); + glutPostRedisplay(); } -void GlutViewer::SetGlProjectionMatrix(double p[16]) { - memcpy(proj_mat, p, sizeof(double)*16); +void GlutViewer::SetGlProjectionMatrix(double p[16]) +{ + memcpy(proj_mat, p, sizeof(double) * 16); } -void GlutViewer::SetGlModelviewMatrix(double p[16]) { - memcpy(modelview_mat, p, sizeof(double)*16); +void GlutViewer::SetGlModelviewMatrix(double p[16]) +{ + memcpy(modelview_mat, p, sizeof(double) * 16); } void GlutViewer::Reshape(int w, int h) { - h = (h == 0 ? 1 : h); - //glViewport(0, 0, w, h); + h = (h == 0 ? 1 : h); + // glViewport(0, 0, w, h); - //glMatrixMode(GL_PROJECTION); + // glMatrixMode(GL_PROJECTION); - //glLoadIdentity(); - //glMultMatrixd(projmat); + // glLoadIdentity(); + // glMultMatrixd(projmat); - //glMatrixMode(GL_MODELVIEW); + // glMatrixMode(GL_MODELVIEW); - //glutPostRedisplay(); + // glutPostRedisplay(); } void GlutViewer::Exit() { - //DeleteCriticalSection(&critical_section_items); + // DeleteCriticalSection(&critical_section_items); } -void GlutViewer::DrawableClear() { - Lock lock(&mutex_items); - items.clear(); +void GlutViewer::DrawableClear() +{ + Lock lock(&mutex_items); + items.clear(); } void GlutViewer::DrawableAdd(Drawable* item) { - Lock lock(&mutex_items); - items.push_back(item); + Lock lock(&mutex_items); + items.push_back(item); } // TODO ensure that threading doesn't cause any problems... void GlutViewer::SetVideo(const IplImage* _image) { - image = (unsigned char*)_image->imageData; + image = (unsigned char*)_image->imageData; } void GlutViewer::DrawVideo() { - if(!image) return; - - glDepthMask(GL_FALSE); - glDisable(GL_DEPTH_TEST); - - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); - - glColor3f(1, 1, 1); - glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, 512, 512, 0, GL_BGR_EXT, GL_UNSIGNED_BYTE, image); - - glEnable(GL_TEXTURE_2D); - - glMatrixMode(GL_PROJECTION); - glPushMatrix(); - glLoadIdentity(); - glOrtho(0, 1, 0, 1, 0, 1); - - glMatrixMode(GL_MODELVIEW); - glLoadIdentity(); - - glBegin( GL_QUADS ); - glTexCoord2d(0.0,1.0); glVertex2d(0.0,0.0); - glTexCoord2d(1.0,1.0); glVertex2d(1.0,0.0); - glTexCoord2d(1.0,0.0); glVertex2d(1.0,1.0); - glTexCoord2d(0.0,0.0); glVertex2d(0.0,1.0); - glEnd(); - - glDisable(GL_TEXTURE_2D); - glDepthMask(GL_TRUE); - glEnable(GL_DEPTH_TEST); - - glMatrixMode(GL_PROJECTION); - glPopMatrix(); + if (!image) + return; + + glDepthMask(GL_FALSE); + glDisable(GL_DEPTH_TEST); + + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); + + glColor3f(1, 1, 1); + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, 512, 512, 0, GL_BGR_EXT, + GL_UNSIGNED_BYTE, image); + + glEnable(GL_TEXTURE_2D); + + glMatrixMode(GL_PROJECTION); + glPushMatrix(); + glLoadIdentity(); + glOrtho(0, 1, 0, 1, 0, 1); + + glMatrixMode(GL_MODELVIEW); + glLoadIdentity(); + + glBegin(GL_QUADS); + glTexCoord2d(0.0, 1.0); + glVertex2d(0.0, 0.0); + glTexCoord2d(1.0, 1.0); + glVertex2d(1.0, 0.0); + glTexCoord2d(1.0, 0.0); + glVertex2d(1.0, 1.0); + glTexCoord2d(0.0, 0.0); + glVertex2d(0.0, 1.0); + glEnd(); + + glDisable(GL_TEXTURE_2D); + glDepthMask(GL_TRUE); + glEnable(GL_DEPTH_TEST); + + glMatrixMode(GL_PROJECTION); + glPopMatrix(); } /* void GlutViewer::Init(int argc, char** argv, int w, int h) { - glutInit(&argc, argv); - glutInitDisplayMode(GLUT_DEPTH | GLUT_RGB | GLUT_DOUBLE); - glutInitWindowSize(w, h); - - ar_window = glutCreateWindow("AR"); - glutDisplayFunc(DrawAr); - //glutReshapeFunc(Reshape); - - vr_window = glutCreateWindow("VR"); - glutDisplayFunc(DrawVr); - //glutReshapeFunc(Reshape); - - glEnable(GL_CULL_FACE); - glEnable(GL_DEPTH_TEST); - glEnable(GL_COLOR_MATERIAL); - - glutMouseFunc(Mouse); - glutMotionFunc(Motion); - - atexit(Exit); + glutInit(&argc, argv); + glutInitDisplayMode(GLUT_DEPTH | GLUT_RGB | GLUT_DOUBLE); + glutInitWindowSize(w, h); + + ar_window = glutCreateWindow("AR"); + glutDisplayFunc(DrawAr); + //glutReshapeFunc(Reshape); + + vr_window = glutCreateWindow("VR"); + glutDisplayFunc(DrawVr); + //glutReshapeFunc(Reshape); + + glEnable(GL_CULL_FACE); + glEnable(GL_DEPTH_TEST); + glEnable(GL_COLOR_MATERIAL); + + glutMouseFunc(Mouse); + glutMotionFunc(Motion); + + atexit(Exit); } */ diff --git a/ar_track_alvar/src/IntegralImage.cpp b/ar_track_alvar/src/IntegralImage.cpp index 40576f5..cae773b 100644 --- a/ar_track_alvar/src/IntegralImage.cpp +++ b/ar_track_alvar/src/IntegralImage.cpp @@ -23,172 +23,207 @@ #include "IntegralImage.h" -namespace alvar { - -void IntIndex::update_next_step() { - next_step = step; - estep += step_remainder; - if (estep >= steps) { - estep -= steps; - next_step++; - } +namespace alvar +{ +void IntIndex::update_next_step() +{ + next_step = step; + estep += step_remainder; + if (estep >= steps) + { + estep -= steps; + next_step++; + } } -IntIndex::IntIndex(int _res, int _steps) { - res = _res; - steps = _steps; - operator=(0); -} -int IntIndex::operator=(int v) { - index = 0; - step = res / steps; - step_remainder = res % steps; - estep = 0; - update_next_step(); - while ((index+next_step-1) < v) next(); - return index; -} -int IntIndex::next() { - index += next_step; - update_next_step(); - return index; -} -int IntIndex::get() const { - return index; -} -int IntIndex::get_next_step() const { - return next_step; -} -int IntIndex::end() const { - return res; +IntIndex::IntIndex(int _res, int _steps) +{ + res = _res; + steps = _steps; + operator=(0); +} +int IntIndex::operator=(int v) +{ + index = 0; + step = res / steps; + step_remainder = res % steps; + estep = 0; + update_next_step(); + while ((index + next_step - 1) < v) + next(); + return index; +} +int IntIndex::next() +{ + index += next_step; + update_next_step(); + return index; +} +int IntIndex::get() const +{ + return index; +} +int IntIndex::get_next_step() const +{ + return next_step; +} +int IntIndex::end() const +{ + return res; } -IntegralImage::IntegralImage() { - sum = 0; -} -IntegralImage::~IntegralImage() { - if (sum) cvReleaseImage(&sum); -} -void IntegralImage::Update(IplImage *gray) { - if ((sum == 0) || - (sum->height != gray->width+1) || - (sum->width != gray->height+1)) - { - if (sum) cvReleaseImage(&sum); - // TODO: Now we assume 'double' - is it ok? - sum = cvCreateImage(cvSize(gray->width+1, gray->height+1), IPL_DEPTH_64F, 1); - } - cvIntegral(gray, sum); -} -double IntegralImage::GetSum(CvRect &rect, int *count /*=0*/) { - int x1 = rect.x; - int x2 = rect.x + rect.width; // Note, not -1 - int y1 = rect.y; - int y2 = rect.y + rect.height; - //cout<height != gray->width + 1) || + (sum->width != gray->height + 1)) + { + if (sum) + cvReleaseImage(&sum); + // TODO: Now we assume 'double' - is it ok? + sum = cvCreateImage(cvSize(gray->width + 1, gray->height + 1), + IPL_DEPTH_64F, 1); + } + cvIntegral(gray, sum); +} +double IntegralImage::GetSum(CvRect& rect, int* count /*=0*/) +{ + int x1 = rect.x; + int x2 = rect.x + rect.width; // Note, not -1 + int y1 = rect.y; + int y2 = rect.y + rect.height; + // cout<imageData)[y2*sum->width+x2] - -((double *)sum->imageData)[y2*sum->width+x1] - -((double *)sum->imageData)[y1*sum->width+x2] - +((double *)sum->imageData)[y1*sum->width+x1]; + double v = +((double*)sum->imageData)[y2 * sum->width + x2] - + ((double*)sum->imageData)[y2 * sum->width + x1] - + ((double*)sum->imageData)[y1 * sum->width + x2] + + ((double*)sum->imageData)[y1 * sum->width + x1]; - if (count) *count = rect.width*rect.height; - return v; -} -double IntegralImage::GetAve(CvRect &rect) { - int count=1; - return GetSum(rect, &count)/count; -} -void IntegralImage::GetSubimage(const CvRect &rect, IplImage *sub) { - int yi=0; - for (IntIndex yy(rect.height, sub->height); yy.get() != yy.end(); yy.next(),yi++) { - int xi=0; - for (IntIndex xx(rect.width, sub->width); xx.get() != xx.end(); xx.next(),xi++) { - //cout<<"res: "<height<<","<width<<" - "; - //cout<height); yy.get() != yy.end(); + yy.next(), yi++) + { + int xi = 0; + for (IntIndex xx(rect.width, sub->width); xx.get() != xx.end(); + xx.next(), xi++) + { + // cout<<"res: "<height<<","<width<<" - "; + // cout<imageData[yi*sub->widthStep+xi] = (char)ave; - } - } -} -void IntegralGradient::CalculatePointNormals(IplImage *gray) { - int width = gray->width-1; - int height = gray->height-1; - if ((normalx == 0) || - (normalx->width != width) || - (normalx->height != height)) - { - if (normalx) cvReleaseImage(&normalx); - if (normaly) cvReleaseImage(&normaly); - normalx = cvCreateImage(cvSize(width, height), IPL_DEPTH_64F, 1); - normaly = cvCreateImage(cvSize(width, height), IPL_DEPTH_64F, 1); - } - for (int j=0; jimageData[yi * sub->widthStep + xi] = (char)ave; + } + } +} +void IntegralGradient::CalculatePointNormals(IplImage* gray) +{ + int width = gray->width - 1; + int height = gray->height - 1; + if ((normalx == 0) || (normalx->width != width) || + (normalx->height != height)) + { + if (normalx) + cvReleaseImage(&normalx); + if (normaly) + cvReleaseImage(&normaly); + normalx = cvCreateImage(cvSize(width, height), IPL_DEPTH_64F, 1); + normaly = cvCreateImage(cvSize(width, height), IPL_DEPTH_64F, 1); + } + for (int j = 0; j < height; j++) + { + for (int i = 0; i < width; i++) + { + /* + // As we assume top-left coordinates we have these reverse compared + to Donahue1992 double a4 = cvGet2D(gray, j, i+1).val[0]; double a3 = + cvGet2D(gray, j, i).val[0]; double a2 = cvGet2D(gray, j+1, i).val[0]; double a1 = cvGet2D(gray, j+1, i+1).val[0]; // Normal vectors; - double nx = (-a1+a2+a3-a4)/4; + double nx = (-a1+a2+a3-a4)/4; double ny = (-a1-a2+a3+a4)/4; cvSet2D(normalx, j, i, cvScalar(nx)); cvSet2D(normaly, j, i, cvScalar(ny)); - */ - // As we assume top-left coordinates we have these reverse compared to Donahue1992 - // TODO: Now we assume 8-bit gray - double a4 = (unsigned char)gray->imageData[(j)*gray->widthStep+(i+1)]; - double a3 = (unsigned char)gray->imageData[(j)*gray->widthStep+(i)]; - double a2 = (unsigned char)gray->imageData[(j+1)*gray->widthStep+(i)]; - double a1 = (unsigned char)gray->imageData[(j+1)*gray->widthStep+(i+1)]; - // Normal vectors; - double nx = (-a1+a2+a3-a4)/4; - double ny = (-a1-a2+a3+a4)/4; - ((double *)normalx->imageData)[j*normalx->width+i] = nx; - ((double *)normaly->imageData)[j*normaly->width+i] = ny; - } - } -} -IntegralGradient::IntegralGradient() { - normalx = 0; - normaly = 0; -} -IntegralGradient::~IntegralGradient() { - if (normalx) cvReleaseImage(&normalx); - if (normaly) cvReleaseImage(&normaly); -} -void IntegralGradient::Update(IplImage *gray) { - CalculatePointNormals(gray); - integx.Update(normalx); - integy.Update(normaly); -} -void IntegralGradient::GetGradient(CvRect &rect, double *dirx, double *diry, int *count /*=0*/) { - CvRect r = {rect.x, rect.y, rect.width-1, rect.height-1}; - if (count) *dirx = integx.GetSum(r, count); - else *dirx = integx.GetSum(r); - *diry = integy.GetSum(r); -} -void IntegralGradient::GetAveGradient(CvRect &rect, double *dirx, double *diry) { - int count=1; - GetGradient(rect, dirx, diry, &count); - *dirx /= count; - *diry /= count; + */ + // As we assume top-left coordinates we have these reverse compared to + // Donahue1992 + // TODO: Now we assume 8-bit gray + double a4 = (unsigned char)gray->imageData[(j)*gray->widthStep + (i + 1)]; + double a3 = (unsigned char)gray->imageData[(j)*gray->widthStep + (i)]; + double a2 = + (unsigned char)gray->imageData[(j + 1) * gray->widthStep + (i)]; + double a1 = + (unsigned char)gray->imageData[(j + 1) * gray->widthStep + (i + 1)]; + // Normal vectors; + double nx = (-a1 + a2 + a3 - a4) / 4; + double ny = (-a1 - a2 + a3 + a4) / 4; + ((double*)normalx->imageData)[j * normalx->width + i] = nx; + ((double*)normaly->imageData)[j * normaly->width + i] = ny; + } + } +} +IntegralGradient::IntegralGradient() +{ + normalx = 0; + normaly = 0; +} +IntegralGradient::~IntegralGradient() +{ + if (normalx) + cvReleaseImage(&normalx); + if (normaly) + cvReleaseImage(&normaly); +} +void IntegralGradient::Update(IplImage* gray) +{ + CalculatePointNormals(gray); + integx.Update(normalx); + integy.Update(normaly); +} +void IntegralGradient::GetGradient(CvRect& rect, double* dirx, double* diry, + int* count /*=0*/) +{ + CvRect r = { rect.x, rect.y, rect.width - 1, rect.height - 1 }; + if (count) + *dirx = integx.GetSum(r, count); + else + *dirx = integx.GetSum(r); + *diry = integy.GetSum(r); +} +void IntegralGradient::GetAveGradient(CvRect& rect, double* dirx, double* diry) +{ + int count = 1; + GetGradient(rect, dirx, diry, &count); + *dirx /= count; + *diry /= count; } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/Kalman.cpp b/ar_track_alvar/src/Kalman.cpp index 381180c..c8e7c84 100644 --- a/ar_track_alvar/src/Kalman.cpp +++ b/ar_track_alvar/src/Kalman.cpp @@ -22,455 +22,510 @@ */ #include -#include // for std::max -#include +#include // for std::max +#include +#include +#include #include "ar_track_alvar/Kalman.h" #include "ar_track_alvar/Util.h" #include "ar_track_alvar/Alvar.h" -namespace alvar { - -KalmanSensorCore::KalmanSensorCore(const KalmanSensorCore &k) { - m = k.m; - n = k.n; - z = cvCloneMat(k.z); - H = cvCloneMat(k.H); - H_trans = cvCloneMat(k.H_trans); - K = cvCloneMat(k.K); - z_pred = cvCloneMat(k.z_pred); - z_residual = cvCloneMat(k.z_residual); - x_gain = cvCloneMat(k.x_gain); -} - -KalmanSensorCore::KalmanSensorCore(int _n, int _m) { - n = _n; - m = _m; - z = cvCreateMat(m,1,CV_64FC1); cvSetZero(z); - H = cvCreateMat(m,n,CV_64FC1); cvSetZero(H); - H_trans = cvCreateMat(n,m,CV_64FC1); cvSetZero(H_trans); - K = cvCreateMat(n,m,CV_64FC1); cvSetZero(K); - z_pred = cvCreateMat(m,1,CV_64FC1); cvSetZero(z_pred); - z_residual = cvCreateMat(m,1,CV_64FC1); cvSetZero(z_residual); - x_gain = cvCreateMat(n,1,CV_64FC1); cvSetZero(x_gain); -} - -KalmanSensorCore::~KalmanSensorCore() { - cvReleaseMat(&z); - cvReleaseMat(&H); - cvReleaseMat(&H_trans); - cvReleaseMat(&K); - cvReleaseMat(&z_pred); - cvReleaseMat(&z_residual); - cvReleaseMat(&x_gain); -} - -void KalmanSensorCore::update_x(CvMat *x_pred, CvMat *x) { - // x = x_pred + K * (z - H*x_pred) - cvMatMul(H, x_pred, z_pred); - cvScaleAdd(z_pred, cvScalar(-1), z, z_residual); - cvMatMul(K, z_residual, x_gain); - cvScaleAdd(x_pred, cvScalar(1), x_gain, x); -} - -void KalmanCore::predict_x(unsigned long tick) { - // x_pred = F * x; - cvMatMul(F, x, x_pred); -} - -KalmanCore::KalmanCore(const KalmanCore &s) { - n = s.n; - x = cvCloneMat(s.x); - F = cvCloneMat(s.F); - x_pred = cvCloneMat(s.x_pred); - F_trans = cvCloneMat(s.F_trans); -} - -KalmanCore::KalmanCore(int _n) { - n = _n; - x = cvCreateMat(n,1,CV_64FC1); cvSetZero(x); - F = cvCreateMat(n,n,CV_64FC1); cvSetIdentity(F); - F_trans = cvCreateMat(n,n,CV_64FC1); cvSetIdentity(F_trans); - x_pred = cvCreateMat(n,1,CV_64FC1); cvSetZero(x_pred); -} - -KalmanCore::~KalmanCore() { - cvReleaseMat(&x); - cvReleaseMat(&F); - cvReleaseMat(&F_trans); - cvReleaseMat(&x_pred); -} - -CvMat *KalmanCore::predict() { - predict_x(0); - return x_pred; -} - -CvMat *KalmanCore::predict_update(KalmanSensorCore *sensor) { - predict(); - sensor->update_x(x_pred, x); - return x; -} - -KalmanSensor::KalmanSensor(const KalmanSensor &k) : KalmanSensorCore(k) { - R = cvCloneMat(k.R); - R_tmp = cvCloneMat(k.R_tmp); - P_tmp = cvCloneMat(k.P_tmp); -} - -KalmanSensor::KalmanSensor(int n, int _m) : KalmanSensorCore(n, _m) { - R = cvCreateMat(m,m,CV_64FC1); cvSetZero(R); - R_tmp = cvCreateMat(m,m,CV_64FC1); cvSetZero(R); - P_tmp = cvCreateMat(n,n,CV_64FC1); cvSetZero(P_tmp); -} - -KalmanSensor::~KalmanSensor() { - cvReleaseMat(&R); - cvReleaseMat(&R_tmp); - cvReleaseMat(&P_tmp); -} - -void KalmanSensor::update_K(CvMat *P_pred) { - // K = P * trans(H) * inv(H*P*trans(H) + R) - cvTranspose(H, H_trans); - cvMatMul(P_pred, H_trans, K); - cvMatMul(H, K, R_tmp); - cvScaleAdd(R_tmp, cvScalar(1), R, R_tmp); - cvInvert(R_tmp, R_tmp); - cvMatMul(H_trans, R_tmp, K); - cvMatMul(P_pred, K, K); -} - -void KalmanSensor::update_P(CvMat *P_pred, CvMat *P) { - //P = (I - K*H) * P_pred - cvMatMul(K, H, P_tmp); - cvSetIdentity(P); - cvScaleAdd(P_tmp, cvScalar(-1), P, P); - cvMatMul(P, P_pred, P); -} - -void Kalman::predict_P() { - // P_pred = F*P*trans(F) + Q - cvTranspose(F, F_trans); - cvMatMul(P, F_trans, P_pred); - cvMatMul(F, P_pred, P_pred); - cvScaleAdd(P_pred, cvScalar(1), Q, P_pred); -} - -Kalman::Kalman(int _n) : KalmanCore(_n) { - prev_tick = 0; - Q = cvCreateMat(n,n,CV_64FC1); cvSetZero(Q); - P = cvCreateMat(n,n,CV_64FC1); cvSetZero(P); - P_pred = cvCreateMat(n,n,CV_64FC1); cvSetZero(P_pred); -} - -Kalman::~Kalman() { - cvReleaseMat(&Q); - cvReleaseMat(&P); - cvReleaseMat(&P_pred); -} - -void Kalman::update_F(unsigned long tick) { - //cvSetIdentity(F); -} - -CvMat *Kalman::predict(unsigned long tick) { - update_F(tick); - predict_x(tick); - predict_P(); - return x_pred; -} - -CvMat *Kalman::predict_update(KalmanSensor *sensor, unsigned long tick) { - predict(tick); - sensor->update_H(x_pred); - sensor->update_K(P_pred); - sensor->update_x(x_pred, x); - sensor->update_P(P_pred, P); - prev_tick = tick; - return x; -} - -double Kalman::seconds_since_update(unsigned long tick) { - unsigned long tick_diff = (prev_tick ? tick-prev_tick : 0); - return ((double)tick_diff/1000.0); -} - -void KalmanSensorEkf::update_H(CvMat *x_pred) { - // By default we update the H by calculating Jacobian numerically - const double step = 0.000001; - cvZero(H); - for (int i=0; icols, mat->rows); - cv::Mat im = img->clone(); - cv::Mat image_roi = im(roi); - CvMat img2 = cvMat(image_roi); - - //cvSetImageROI(img, cvRect(top, left, mat->cols, mat->rows)); - for (int j=0; jrows; j++) { - for (int i=0; icols; i++) { - double d = cvGet2D(mat, j, i).val[0]; - if (d < 0) d=-d; - double c1=0, c2=0, c3=0; - if (d < 0.1) { - c1 = 0 + ((d - 0.0)/(0.1 - 0.0)*(127 - 0)); - } else if(d < 1.0) { - c1 = 127 + ((d - 0.1)/(1.0 - 0.1)*(255 - 127)); - } else if (d < 10.0) { - c1 = 255; - c2 = 0 + ((d - 1.0)/(10.0 - 1.0)*(255 - 0)); - } else if (d < 100.0) { - c1 = 255; - c2 = 255; - c3 = 0 + ((d - 10.0)/(100.0 - 10.0)*(255 - 0)); - } else { - c1 = 255; c2 = 255; c3 = 255; - } - if (d < 0) { - cvSet2D(&img2, j, i, cvScalar(c3, c2, c1)); // BRG - } else { - cvSet2D(&img2, j, i, cvScalar(c2, c1, c3)); // BGR - } - } - } - //cvResetImageROI(img); -} - -void KalmanVisualize::Init() { - n = kalman->get_n(); - m = sensor->get_m(); - int img_width = std::max(3+n+3+n+5+m+5, 1+n+1+n+1+n+1+m+1+n+1); - int img_height = 1+n+1+std::max(n, m+1+m)+1; - cv::Mat img_r = cv::Mat(cv::Size(img_width, img_height),CV_8UC3); - img_r = cv::Scalar(64,64,64); - img = &img_r; - //cvSet(img, ); - cv::Mat load_im = cv::imread("Legend.png"); - img_legend = &load_im; - CvMat img_legend2 = cvMat(load_im); - - if (img_legend) { - for (img_scale=1; img_scale<50; img_scale++) { - if (img_scale*img_width > img_legend2.width) { - break; - } - } - - - //img_show = cvCreateImage(cvSize(img_width*img_scale, img_legend2.height + img_height*img_scale), IPL_DEPTH_8U, 3); - - cv::Mat img_temp = cv::Mat(cv::Size(img_width*img_scale, img_legend2.height + img_height*img_scale),CV_8UC3); - img_show = &img_temp; - - CvMat img_show2 = cvMat(img_temp); - cvSet(&img_show2, cvScalar(64,64,64)); - - - cv::Rect roi2(0, 0, img_legend2.width, img_legend2.height); - cv::Mat im2 = img_show->clone(); - cv::Mat image_roi2 = im2(roi2); - CvMat img3 = cvMat(image_roi2); - - //cvSetImageROI(&img_show2, cvRect(0, 0, img_legend2.width, img_legend2.height)); - - - cvCopy(&img_legend2, &img3); - //cvResetImageROI(img_show); - cv::namedWindow("KalmanVisualize"); - } else { - img_scale = 1; - cv::Mat img_temp = cv::Mat(cv::Size(img_width*img_scale, img_legend2.height + img_height*img_scale),CV_8UC3); - img_show = &img_temp; - CvMat img_show2 = cvMat(img_temp); - cvSet(&img_show2, cvScalar(64,64,64)); - cv::namedWindow("KalmanVisualize",0); - } -} - -void KalmanVisualize::out_matrix(CvMat *m, char *name) { - if (m->cols == 1) { - std::cout<rows; j++) { - std::cout<<" "<rows == 1) { - std::cout<cols; i++) { - std::cout<<" "<rows; j++) { - for (int i=0; icols; i++) { - std::cout<<" "<x, 1, 1); // 1 - if (kalman_ext && sensor_ext) { - int y = std::max(2+n, 3+m+m); - img_matrix(kalman_ext->P, 1, y); // n - } -} - -void KalmanVisualize::update_post() { - img_matrix(kalman->F, 3, 1); // n - img_matrix(kalman->x_pred, 4+n, 1); // 1 - img_matrix(sensor->H, 6+n, 1); // n - img_matrix(sensor->z_pred, 7+n+n, 1); // 1 - img_matrix(sensor->z, 7+n+n, 2 + m); - img_matrix(sensor->z_residual, 9+n+n, 1); // 1 - img_matrix(sensor->K, 11+n+n, 1); // m - img_matrix(sensor->x_gain, 12+n+n+m, 1); // 1 - img_matrix(kalman->x, 14+n+n+m, 1); // 1 - if (kalman_ext && sensor_ext) { - int y = std::max(2+n, 3+m+m); - img_matrix(kalman_ext->Q, 2+n, y); // n - img_matrix(kalman_ext->P_pred, 3+n+n, y); // n - img_matrix(sensor_ext->R, 4+n+n+n, y); // m - img_matrix(kalman_ext->P, img->rows - 1 - n, y); // n - } - if (img_legend) { - - cv::Rect roi(0, img_legend->cols, img->rows * img_scale, img->cols * img_scale); - cv::Mat im2 = img_show->clone(); - cv::Mat image_roi2 = im2(roi); - CvMat img3 = cvMat(image_roi2); - - //cvSetImageROI(img_show, cvRect(0, img_legend->height, img->width * img_scale, img->height * img_scale)); - //cvResize(img, img_show, CV_INTER_NN); - //cvResetImageROI(img_show); - - - - } else { - //cvResize(img, img_show, CV_INTER_NN); - } -} - -void KalmanVisualize::show() { - //out_matrix(sensor->K, "K"); - cv::imshow("KalmanVisualize", *img_show); -} - -} // namespace alvar +namespace alvar +{ +KalmanSensorCore::KalmanSensorCore(const KalmanSensorCore& k) +{ + m = k.m; + n = k.n; + z = k.z.clone(); + H = k.H.clone(); + H_trans = k.H_trans.clone(); + K = k.K.clone(); + z_pred = k.z_pred.clone(); + z_residual = k.z_residual.clone(); + x_gain = k.x_gain.clone(); +} + +KalmanSensorCore::KalmanSensorCore(int _n, int _m) +{ + n = _n; + m = _m; + z = cv::Mat::zeros(m, 1, CV_64FC1); + H = cv::Mat::zeros(m, n, CV_64FC1); + H_trans = cv::Mat::zeros(n, m, CV_64FC1); + K = cv::Mat::zeros(n, m, CV_64FC1); + z_pred = cv::Mat::zeros(m, 1, CV_64FC1); + z_residual = cv::Mat::zeros(m, 1, CV_64FC1); + x_gain = cv::Mat::zeros(n, 1, CV_64FC1); +} + +KalmanSensorCore::~KalmanSensorCore() +{ + z.release(); + H.release(); + H_trans.release(); + K.release(); + z_pred.release(); + z_residual.release(); + x_gain.release(); +} + +void KalmanSensorCore::update_x(const cv::Mat& x_pred, cv::Mat& x) +{ + // x = x_pred + K * (z - H*x_pred) + z_pred = H * x_pred; + z_residual = x_pred * -1 + z; + x_gain = K * z_residual; + x = x_pred * 1 + x_gain; +} + +void KalmanCore::predict_x(unsigned long tick) +{ + // x_pred = F * x; + x_pred = F * x; +} + +KalmanCore::KalmanCore(const KalmanCore& s) +{ + n = s.n; + x = s.x.clone(); + F = s.F.clone(); + x_pred = s.x_pred.clone(); + F_trans = s.F_trans.clone(); +} + +KalmanCore::KalmanCore(int _n) +{ + n = _n; + x = cv::Mat::zeros(n, 1, CV_64FC1); + F = cv::Mat::eye(n, n, CV_64FC1); + F_trans = cv::Mat::eye(n, n, CV_64FC1); + x_pred = cv::Mat::zeros(n, 1, CV_64FC1); +} + +KalmanCore::~KalmanCore() +{ + x.release(); + F.release(); + F_trans.release(); + x_pred.release(); +} + +cv::Mat& KalmanCore::predict() +{ + predict_x(0); + return x_pred; +} + +cv::Mat& KalmanCore::predict_update(KalmanSensorCore* sensor) +{ + predict(); + sensor->update_x(x_pred, x); + return x; +} + +KalmanSensor::KalmanSensor(const KalmanSensor& k) : KalmanSensorCore(k) +{ + R = k.R.clone(); + R_tmp = k.R_tmp.clone(); + P_tmp = k.P_tmp.clone(); +} + +KalmanSensor::KalmanSensor(int n, int _m) : KalmanSensorCore(n, _m) +{ + R = cv::Mat::zeros(m, m, CV_64FC1); + R_tmp = cv::Mat::zeros(m, m, CV_64FC1); + P_tmp = cv::Mat::zeros(n, n, CV_64FC1); +} + +KalmanSensor::~KalmanSensor() +{ + R.release(); + R_tmp.release(); + P_tmp.release(); +} + +void KalmanSensor::update_K(const cv::Mat& P_pred) +{ + // K = P * trans(H) * inv(H*P*trans(H) + R) + H_trans = H.t(); + K = P_pred * H_trans; + R_tmp = H * K; + R_tmp = R_tmp * 1 + R; + R_tmp = R_tmp.inv(); + R_tmp = R_tmp.inv(); + K = H_trans * R_tmp; + K = P_pred * K; +} + +void KalmanSensor::update_P(const cv::Mat& P_pred, cv::Mat& P) +{ + // P = (I - K*H) * P_pred + P_tmp = K * H; + cv::setIdentity(P); + P = P_tmp * -1 + P; + P = P * P_pred; +} + +void Kalman::predict_P() +{ + // P_pred = F*P*trans(F) + Q + F_trans = F.t(); + P_pred = P * F_trans; + P_pred = F * P_pred; + P_pred = P_pred * 1 + Q; +} + +Kalman::Kalman(int _n) : KalmanCore(_n) +{ + prev_tick = 0; + Q = cv::Mat::zeros(n, n, CV_64FC1); + P = cv::Mat::zeros(n, n, CV_64FC1); + P_pred = cv::Mat::zeros(n, n, CV_64FC1); +} + +Kalman::~Kalman() +{ + Q.release(); + P.release(); + P_pred.release(); +} + +void Kalman::update_F(unsigned long tick) +{ + // cvSetIdentity(F); +} + +cv::Mat& Kalman::predict(unsigned long tick) +{ + update_F(tick); + predict_x(tick); + predict_P(); + return x_pred; +} + +cv::Mat& Kalman::predict_update(KalmanSensor* sensor, unsigned long tick) +{ + predict(tick); + sensor->update_H(x_pred); + sensor->update_K(P_pred); + sensor->update_x(x_pred, x); + sensor->update_P(P_pred, P); + prev_tick = tick; + return x; +} + +double Kalman::seconds_since_update(unsigned long tick) +{ + unsigned long tick_diff = (prev_tick ? tick - prev_tick : 0); + return ((double)tick_diff / 1000.0); +} + +void KalmanSensorEkf::update_H(const cv::Mat& x_pred) +{ + // By default we update the H by calculating Jacobian numerically + const double step = 0.000001; + H.setTo(cv::Scalar::all(0)); + for (int i = 0; i < n; i++) + { + cv::Mat H_column; + H_column = H.col(i); + + delta.setTo(cv::Scalar::all(0)); + delta.at(i, 0) = step; + x_plus = x_pred + delta; + delta.at(i, 0) = -step; + x_minus = x_pred + delta; + + h(x_plus, z_tmp1); + h(x_minus, z_tmp2); + H_column = z_tmp1 - z_tmp2; + H_column = H_column * (1.0 / (2 * step)); + } +} + +void KalmanSensorEkf::update_x(const cv::Mat& x_pred, cv::Mat& x) +{ + // x = x_pred + K * (z - h(x_pred)) + h(x_pred, z_pred); + z_residual = z_pred * -1 + z; + x_gain = K * z_residual; + x = x_pred * 1 + x_gain; +} + +KalmanSensorEkf::KalmanSensorEkf(const KalmanSensorEkf& k) : KalmanSensor(k) +{ + delta = k.delta.clone(); + x_plus = k.x_plus.clone(); + x_minus = k.x_minus.clone(); + z_tmp1 = k.z_tmp1.clone(); + z_tmp2 = k.z_tmp2.clone(); +} + +KalmanSensorEkf::KalmanSensorEkf(int _n, int _m) : KalmanSensor(_n, _m) +{ + delta = cv::Mat::zeros(n, 1, CV_64FC1); + x_plus = cv::Mat::zeros(n, 1, CV_64FC1); + x_minus = cv::Mat::zeros(n, 1, CV_64FC1); + z_tmp1 = cv::Mat::zeros(m, 1, CV_64FC1); + z_tmp2 = cv::Mat::zeros(m, 1, CV_64FC1); +} + +KalmanSensorEkf::~KalmanSensorEkf() +{ + delta.release(); + x_plus.release(); + x_minus.release(); + z_tmp1.release(); + z_tmp2.release(); +} + +void KalmanEkf::update_F(unsigned long tick) +{ + // By default we update the F by calculating Jacobian numerically + // TODO + double dt = (tick - prev_tick) / 1000.0; + const double step = 0.000001; + F.setTo(cv::Scalar::all(0)); + for (int i = 0; i < n; i++) + { + cv::Mat F_column; + F_column = F.col(i); + + delta.setTo(cv::Scalar::all(0)); + delta.at(i, 0) = step; + x_plus = x + delta; + delta.at(i, 0) = -step; + x_minus = x + delta; + + f(x_plus, x_tmp1, dt); + f(x_minus, x_tmp2, dt); + F_column = x_tmp1 - x_tmp2; + F_column = F_column * (1.0 / (2 * step)); + } +} + +void KalmanEkf::predict_x(unsigned long tick) +{ + double dt = (tick - prev_tick) / 1000.0; + f(x, x_pred, dt); +} + +KalmanEkf::KalmanEkf(int _n) : Kalman(_n) +{ + delta = cv::Mat::zeros(n, 1, CV_64FC1); + x_plus = cv::Mat::zeros(n, 1, CV_64FC1); + x_minus = cv::Mat::zeros(n, 1, CV_64FC1); + x_tmp1 = cv::Mat::zeros(n, 1, CV_64FC1); + x_tmp2 = cv::Mat::zeros(n, 1, CV_64FC1); +} + +KalmanEkf::~KalmanEkf() +{ + delta.release(); + x_plus.release(); + x_minus.release(); + x_tmp1.release(); + x_tmp2.release(); +} + +void KalmanVisualize::img_matrix(const cv::Mat& mat, int top, int left) +{ + int roi_t = -top; + int roi_b = -(img.rows - mat.rows - top); + int roi_l = -left; + int roi_r = -(img.cols - mat.cols - left); + img.adjustROI(roi_t, roi_b, roi_l, roi_r); + for (int j = 0; j < mat.rows; j++) + { + for (int i = 0; i < mat.cols; i++) + { + double d = mat.at(j, i); + if (d < 0) + d = -d; + double c1 = 0, c2 = 0, c3 = 0; + if (d < 0.1) + { + c1 = 0 + ((d - 0.0) / (0.1 - 0.0) * (127 - 0)); + } + else if (d < 1.0) + { + c1 = 127 + ((d - 0.1) / (1.0 - 0.1) * (255 - 127)); + } + else if (d < 10.0) + { + c1 = 255; + c2 = 0 + ((d - 1.0) / (10.0 - 1.0) * (255 - 0)); + } + else if (d < 100.0) + { + c1 = 255; + c2 = 255; + c3 = 0 + ((d - 10.0) / (100.0 - 10.0) * (255 - 0)); + } + else + { + c1 = 255; + c2 = 255; + c3 = 255; + } + if (d < 0) + { // BRG + img.at(j, i) = cv::Vec3b(c3, c2, c1); + } + else + { // BGR + img.at(j, i) = cv::Vec3b(c2, c1, c3); + } + } + } + img.adjustROI(-roi_t, -roi_b, -roi_l, -roi_r); +} + +void KalmanVisualize::Init() +{ + n = kalman->get_n(); + m = sensor->get_m(); + int img_width = std::max(3 + n + 3 + n + 5 + m + 5, + 1 + n + 1 + n + 1 + n + 1 + m + 1 + n + 1); + int img_height = 1 + n + 1 + std::max(n, m + 1 + m) + 1; + img = + cv::Mat(cv::Size(img_width, img_height), CV_8UC3, cv::Scalar(64, 64, 64)); + img_legend = cv::imread("Legend.png"); + if (!img.empty()) + { + for (img_scale = 1; img_scale < 50; img_scale++) + { + if (img_scale * img_width > img_legend.cols) + { + break; + } + } + img_show = cv::Mat(cv::Size(img_width * img_scale, + img_legend.rows + img_height * img_scale), + CV_8UC3, cv::Scalar(64, 64, 64)); + int roi_bot = img_show.rows - img_legend.rows; + int roi_right = img_show.cols - img_legend.cols; + img_show.adjustROI(0, -roi_bot, 0, -roi_right); + img_legend.copyTo(img_show); + img_show.adjustROI(0, roi_bot, 0, roi_right); + cv::namedWindow("KalmanVisualize"); + } + else + { + img_scale = 1; + img_show = cv::Mat(img_width * img_scale, img_height * img_scale, CV_8UC3, + cv::Scalar(64, 64, 64)); + cv::namedWindow("KalmanVisualize", 0); + } +} + +void KalmanVisualize::out_matrix(const cv::Mat& m, char* name) +{ + if (m.cols == 1) + { + std::cout << name << " = ["; + for (int j = 0; j < m.rows; j++) + { + std::cout << " " << m.at(j, 0); + } + std::cout << "]^T" << std::endl; + } + else if (m.rows == 1) + { + std::cout << name << " = ["; + for (int i = 0; i < m.cols; i++) + { + std::cout << " " << m.at(0, i); + } + std::cout << "]^T" << std::endl; + } + else + { + std::cout << name << " = [" << std::endl; + for (int j = 0; j < m.rows; j++) + { + for (int i = 0; i < m.cols; i++) + { + std::cout << " " << m.at(j, i); + } + std::cout << std::endl; + } + std::cout << "]" << std::endl; + } +} + +KalmanVisualize::KalmanVisualize(Kalman* _kalman, KalmanSensor* _sensor) +{ + kalman = _kalman; + sensor = _sensor; + kalman_ext = _kalman; + sensor_ext = _sensor; + Init(); +} + +KalmanVisualize::KalmanVisualize(KalmanCore* _kalman, KalmanSensorCore* _sensor) +{ + kalman = _kalman; + sensor = _sensor; + kalman_ext = NULL; + sensor_ext = NULL; + Init(); +} + +KalmanVisualize::~KalmanVisualize() +{ + img.release(); +} + +void KalmanVisualize::update_pre() +{ + img_matrix(kalman->x, 1, 1); // 1 + if (kalman_ext && sensor_ext) + { + int y = std::max(2 + n, 3 + m + m); + img_matrix(kalman_ext->P, 1, y); // n + } +} + +void KalmanVisualize::update_post() +{ + img_matrix(kalman->F, 3, 1); // n + img_matrix(kalman->x_pred, 4 + n, 1); // 1 + img_matrix(sensor->H, 6 + n, 1); // n + img_matrix(sensor->z_pred, 7 + n + n, 1); // 1 + img_matrix(sensor->z, 7 + n + n, 2 + m); + img_matrix(sensor->z_residual, 9 + n + n, 1); // 1 + img_matrix(sensor->K, 11 + n + n, 1); // m + img_matrix(sensor->x_gain, 12 + n + n + m, 1); // 1 + img_matrix(kalman->x, 14 + n + n + m, 1); // 1 + if (kalman_ext && sensor_ext) + { + int y = std::max(2 + n, 3 + m + m); + img_matrix(kalman_ext->Q, 2 + n, y); // n + img_matrix(kalman_ext->P_pred, 3 + n + n, y); // n + img_matrix(sensor_ext->R, 4 + n + n + n, y); // m + img_matrix(kalman_ext->P, img.cols - 1 - n, y); // n + } + if (!img_legend.empty()) + { + int roi_t = -img_legend.rows; + int roi_b = -img_show.rows + img.rows * img_scale + img_legend.rows; + int roi_l = 0; + int roi_r = -img_show.cols + img.cols * img_scale; + img_show.adjustROI(roi_t, roi_b, roi_l, roi_r); + cv::resize(img, img_show, img_show.size(), 0, 0, cv::INTER_NEAREST); + img_show.adjustROI(-roi_t, -roi_b, -roi_l, -roi_r); + } + else + { + cv::resize(img, img_show, img_show.size(), 0, 0, cv::INTER_NEAREST); + } +} + +void KalmanVisualize::show() +{ + // out_matrix(sensor->K, "K"); + cv::imshow("KalmanVisualize", img_show); +} + +} // namespace alvar diff --git a/ar_track_alvar/src/Line.cpp b/ar_track_alvar/src/Line.cpp index e5aa15e..6697ed6 100644 --- a/ar_track_alvar/src/Line.cpp +++ b/ar_track_alvar/src/Line.cpp @@ -22,103 +22,99 @@ */ #include "ar_track_alvar/Line.h" -#include +#include using namespace std; -namespace alvar { +namespace alvar +{ using namespace std; -Line::Line(cv::Vec4f params) +Line::Line(const cv::Vec4f& params) { - c.x = params[2]; - c.y = params[3]; - s.x = params[0]; - s.y = params[1]; + c.x = params[2]; + c.y = params[3]; + s.x = params[0]; + s.y = params[1]; } void FitLines(vector& lines) { - } -int FitLines(vector &lines, - const vector& corners, - const vector& edge, - cv::Mat *grey /*=0*/) // grey image for future sub pixel accuracy +int FitLines(vector& lines, const vector& corners, + const vector& edge) { - lines.clear(); - for(unsigned j = 0; j < corners.size(); ++j) - { - int start, end, first; - int size = (int)edge.size(); - - first = corners[0]; - start = corners[j]; - - if(j < corners.size()-1) - end = corners[j+1]; - else - end = first; - - int len = 0; - - if(start < end) - len = end-start+1; - else - len = size-start+end+1; - - int ind; - double* data = new double[2*len]; - - // OpenCV routine... - cv::Mat line_data = cv::Mat(1, len, CV_32FC2); - for(int i = 0; i < len; ++i) - { - ind = i + start; - if(ind >= size) - ind = ind-size; - - double px = double(edge[ind].x); - double py = double(edge[ind].y); - line_data.at(0,i) = cv::Point2f(px, py); - } - - //float params[4] = {0}; - cv::Vec4f params; - cv::fitLine(line_data, params, CV_DIFF_L2, 0, 0.01, 0.01); - lines.push_back(Line(params)); - - delete [] data; - //cvReleaseMat(&line_data); - - } - - return lines.size(); + lines.clear(); + for (unsigned j = 0; j < corners.size(); ++j) + { + int start, end, first; + int size = (int)edge.size(); + + first = corners[0]; + start = corners[j]; + + if (j < corners.size() - 1) + end = corners[j + 1]; + else + end = first; + + int len = 0; + + if (start < end) + len = end - start + 1; + else + len = size - start + end + 1; + + int ind; + double* data = new double[2 * len]; + + // OpenCV routine... + cv::Mat line_data = cv::Mat(1, len, CV_32FC2); + for (int i = 0; i < len; ++i) + { + ind = i + start; + if (ind >= size) + ind = ind - size; + + double px = double(edge[ind].x); + double py = double(edge[ind].y); + line_data.at(0, i) = cv::Vec2d(px, py); + } + + cv::Vec4f line; + cv::fitLine(line_data, line, cv::DIST_L2, 0, 0.01, 0.01); + lines.push_back(Line(line)); + + delete[] data; + line_data.release(); + } + + return lines.size(); } PointDouble Intersection(const Line& l1, const Line& l2) { - - double vx = l1.s.x; - double vy = l1.s.y; - double ux = l2.s.x; - double uy = l2.s.y; - double wx = l2.c.x-l1.c.x; - double wy = l2.c.y-l1.c.y; - - double s, px, py; - double tmp = vx*uy-vy*ux; - if(tmp==0) tmp = 1; - - //if(/*tmp <= 1.f && tmp >= -1.f && */tmp != 0.f && ang > 0.1) - { - s = (vy*wx-vx*wy) / (tmp); - px = l2.c.x+s*ux; - py = l2.c.y+s*uy; - } - - return PointDouble(px, py); + double vx = l1.s.x; + double vy = l1.s.y; + double ux = l2.s.x; + double uy = l2.s.y; + double wx = l2.c.x - l1.c.x; + double wy = l2.c.y - l1.c.y; + + double s, px, py; + double tmp = vx * uy - vy * ux; + if (tmp == 0) + tmp = 1; + + // if(/*tmp <= 1.f && tmp >= -1.f && */tmp != 0.f && ang > 0.1) + { + s = (vy * wx - vx * wy) / (tmp); + px = l2.c.x + s * ux; + py = l2.c.y + s * uy; + } + + return PointDouble(px, py); } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/Marker.cpp b/ar_track_alvar/src/Marker.cpp index 62bef81..071b726 100644 --- a/ar_track_alvar/src/Marker.cpp +++ b/ar_track_alvar/src/Marker.cpp @@ -23,10 +23,7 @@ #include "ar_track_alvar/Alvar.h" #include "ar_track_alvar/Marker.h" -#include -#include "opencv2/core/types_c.h" -#include -#include +#include template class ALVAR_EXPORT alvar::MarkerIteratorImpl; template class ALVAR_EXPORT alvar::MarkerIteratorImpl; @@ -34,1067 +31,1467 @@ template class ALVAR_EXPORT alvar::MarkerIteratorImpl; using namespace std; -namespace alvar { +namespace alvar +{ using namespace std; #define HEADER_SIZE 8 -void Marker::VisualizeMarkerPose(cv::Mat *image, Camera *cam, double visualize2d_points[12][2], cv::Scalar color) const { - // Cube - for (int i=0; i<4; i++) { - cv::line(*image, cvPoint((int)visualize2d_points[i][0], (int)visualize2d_points[i][1]), cvPoint((int)visualize2d_points[(i+1)%4][0], (int)visualize2d_points[(i+1)%4][1]), color); - cv::line(*image, cvPoint((int)visualize2d_points[i][0], (int)visualize2d_points[i][1]), cvPoint((int)visualize2d_points[4+i][0], (int)visualize2d_points[4+i][1]), color); - cv::line(*image, cvPoint((int)visualize2d_points[4+i][0], (int)visualize2d_points[4+i][1]), cvPoint((int)visualize2d_points[4+((i+1)%4)][0], (int)visualize2d_points[4+((i+1)%4)][1]), color); - } - // Coordinates - cv::line(*image, cvPoint((int)visualize2d_points[8][0], (int)visualize2d_points[8][1]), cvPoint((int)visualize2d_points[9][0], (int)visualize2d_points[9][1]), cv::Scalar(255,0,0)); - cv::line(*image, cvPoint((int)visualize2d_points[8][0], (int)visualize2d_points[8][1]), cvPoint((int)visualize2d_points[10][0], (int)visualize2d_points[10][1]), cv::Scalar(0,255,0)); - cv::line(*image, cvPoint((int)visualize2d_points[8][0], (int)visualize2d_points[8][1]), cvPoint((int)visualize2d_points[11][0], (int)visualize2d_points[11][1]), cv::Scalar(0,0,255)); +void Marker::VisualizeMarkerPose(cv::Mat& image, Camera* cam, + double visualize2d_points[12][2], + const cv::Scalar& color) const +{ + // Cube + for (int i = 0; i < 4; i++) + { + cv::line( + image, + cv::Point((int)visualize2d_points[i][0], (int)visualize2d_points[i][1]), + cv::Point((int)visualize2d_points[(i + 1) % 4][0], + (int)visualize2d_points[(i + 1) % 4][1]), + color); + cv::line( + image, + cv::Point((int)visualize2d_points[i][0], (int)visualize2d_points[i][1]), + cv::Point((int)visualize2d_points[4 + i][0], + (int)visualize2d_points[4 + i][1]), + color); + cv::line(image, + cv::Point((int)visualize2d_points[4 + i][0], + (int)visualize2d_points[4 + i][1]), + cv::Point((int)visualize2d_points[4 + ((i + 1) % 4)][0], + (int)visualize2d_points[4 + ((i + 1) % 4)][1]), + color); + } + // Coordinates + cv::line( + image, + cv::Point((int)visualize2d_points[8][0], (int)visualize2d_points[8][1]), + cv::Point((int)visualize2d_points[9][0], (int)visualize2d_points[9][1]), + CV_RGB(255, 0, 0)); + cv::line( + image, + cv::Point((int)visualize2d_points[8][0], (int)visualize2d_points[8][1]), + cv::Point((int)visualize2d_points[10][0], (int)visualize2d_points[10][1]), + CV_RGB(0, 255, 0)); + cv::line( + image, + cv::Point((int)visualize2d_points[8][0], (int)visualize2d_points[8][1]), + cv::Point((int)visualize2d_points[11][0], (int)visualize2d_points[11][1]), + CV_RGB(0, 0, 255)); } -void Marker::VisualizeMarkerContent(cv::Mat *image, Camera *cam, double datatext_point[2], double content_point[2]) const { +void Marker::VisualizeMarkerContent(cv::Mat& image, Camera* cam, + double datatext_point[2], + double content_point[2]) const +{ #ifdef VISUALIZE_MARKER_POINTS - for (size_t i=0; i= 0) && (x < image->rows) && - (y >= 0) && (y < image->cols)) - { - if (cvGet2D(marker_content, j/3, i/3).val[0]) { - cvSet2D(image, y, x, cvScalar(255, 255, 255, 0)); - } else { - cvSet2D(image, y, x, cvScalar(0, 0, 0, 0)); - } - } - } - } + // Marker data + std::stringstream val; + val << int(GetId()); + cv::putText(image, val.str(), + cv::Point((int)datatext_point[0], (int)datatext_point[1]), 0, 0.5, + CV_RGB(255, 255, 0)); + + // MarkerContent + int xc = int(content_point[0]); + int yc = int(content_point[1]); + for (int j = 0; j < res * 3; j++) + { + for (int i = 0; i < res * 3; i++) + { + int x = xc + i; + int y = yc + j; + if ((x >= 0) && (x < image.cols) && (y >= 0) && (y < image.rows)) + { + if (marker_content.at(j / 3, i / 3)) + { + image.at(y, x) = cv::Vec3b(255, 255, 255); + } + else + { + image.at(y, x) = cv::Vec3b(0, 0, 0); + } + } + } + } } -void Marker::VisualizeMarkerError(cv::Mat *image, Camera *cam, double errortext_point[2]) const { - // CvFont font; - // cvInitFont(cv::FONT_HERSHEY_SIMPLEX, 0, 0.5, 0.5, 0); - std::stringstream val; - if (GetError(MARGIN_ERROR|DECODE_ERROR) > 0) { - val.str(""); - val< 0.01) { - val.str(""); - val< 0) + { + val.str(""); + val << int(GetError(MARGIN_ERROR) * 100) << "% "; + val << int(GetError(DECODE_ERROR) * 100) << "% "; + cv::putText(image, val.str(), + cv::Point((int)errortext_point[0], (int)errortext_point[1]), 0, + 0.5, CV_RGB(255, 0, 0)); + } + else if (GetError(TRACK_ERROR) > 0.01) + { + val.str(""); + val << int(GetError(TRACK_ERROR) * 100) << "%"; + cv::putText(image, val.str(), + cv::Point((int)errortext_point[0], (int)errortext_point[1]), 0, + 0.5, CV_RGB(128, 0, 0)); + } } -void MarkerData::VisualizeMarkerContent(cv::Mat *image, Camera *cam, double datatext_point[2], double content_point[2]) const { +void MarkerData::VisualizeMarkerContent(cv::Mat& image, Camera* cam, + double datatext_point[2], + double content_point[2]) const +{ #ifdef VISUALIZE_MARKER_POINTS - for (size_t i=0; iProjectPoints(&visualize3d_points_mat, &pose, &visualize2d_points_mat); - - VisualizeMarkerPose(image, cam, visualize2d_points, color); - VisualizeMarkerContent(image, cam, visualize2d_points[0], visualize2d_points[8]); - VisualizeMarkerError(image, cam, visualize2d_points[2]); +void Marker::Visualize(cv::Mat& image, Camera* cam, + const cv::Scalar& color) const +{ + double visualize3d_points[12][3] = { + // cube + { -(edge_length / 2), -(edge_length / 2), 0 }, + { -(edge_length / 2), (edge_length / 2), 0 }, + { (edge_length / 2), (edge_length / 2), 0 }, + { (edge_length / 2), -(edge_length / 2), 0 }, + { -(edge_length / 2), -(edge_length / 2), edge_length }, + { -(edge_length / 2), (edge_length / 2), edge_length }, + { (edge_length / 2), (edge_length / 2), edge_length }, + { (edge_length / 2), -(edge_length / 2), edge_length }, + // coordinates + { 0, 0, 0 }, + { edge_length, 0, 0 }, + { 0, edge_length, 0 }, + { 0, 0, edge_length }, + }; + double visualize2d_points[12][2]; + cv::Mat visualize3d_points_mat; + cv::Mat visualize2d_points_mat; + visualize3d_points_mat = cv::Mat(12, 3, CV_64F, visualize3d_points); + visualize2d_points_mat = cv::Mat(12, 2, CV_64F, visualize2d_points); + cam->ProjectPoints(visualize3d_points_mat, &pose, visualize2d_points_mat); + + VisualizeMarkerPose(image, cam, visualize2d_points, color); + VisualizeMarkerContent(image, cam, visualize2d_points[0], + visualize2d_points[8]); + VisualizeMarkerError(image, cam, visualize2d_points[2]); } -void Marker::CompareCorners(std::vector &_marker_corners_img, int *orientation, double *error) { - vector::iterator corners_new = _marker_corners_img.begin(); - vector::const_iterator corners_old = marker_corners_img.begin(); - vector errors(4); - for (int i=0; i<4; i++) { - errors[0] += PointSquaredDistance(marker_corners_img[i], _marker_corners_img[i]); - errors[1] += PointSquaredDistance(marker_corners_img[i], _marker_corners_img[(i+1)%4]); - errors[2] += PointSquaredDistance(marker_corners_img[i], _marker_corners_img[(i+2)%4]); - errors[3] += PointSquaredDistance(marker_corners_img[i], _marker_corners_img[(i+3)%4]); - } - *orientation = min_element(errors.begin(), errors.end()) - errors.begin(); - *error = sqrt(errors[*orientation]/4); - *error /= sqrt(max(PointSquaredDistance(marker_corners_img[0], marker_corners_img[2]), PointSquaredDistance(marker_corners_img[1], marker_corners_img[3]))); +void Marker::CompareCorners(vector& _marker_corners_img, + int* orientation, double* error) +{ + vector::iterator corners_new = _marker_corners_img.begin(); + vector::const_iterator corners_old = marker_corners_img.begin(); + vector errors(4); + for (int i = 0; i < 4; i++) + { + errors[0] += + PointSquaredDistance(marker_corners_img[i], _marker_corners_img[i]); + errors[1] += PointSquaredDistance(marker_corners_img[i], + _marker_corners_img[(i + 1) % 4]); + errors[2] += PointSquaredDistance(marker_corners_img[i], + _marker_corners_img[(i + 2) % 4]); + errors[3] += PointSquaredDistance(marker_corners_img[i], + _marker_corners_img[(i + 3) % 4]); + } + *orientation = min_element(errors.begin(), errors.end()) - errors.begin(); + *error = sqrt(errors[*orientation] / 4); + *error /= sqrt( + max(PointSquaredDistance(marker_corners_img[0], marker_corners_img[2]), + PointSquaredDistance(marker_corners_img[1], marker_corners_img[3]))); } -void Marker::CompareContent(std::vector &_marker_corners_img, cv::Mat *gray, Camera *cam, int *orientation) const { - // TODO: Note, that to use this method you need to straighten the content - // TODO: This method can be used with image based trackingt - +void Marker::CompareContent(vector& _marker_corners_img, + cv::Mat& gray, Camera* cam, int* orientation) const +{ + // TODO: Note, that to use this method you need to straighten the content + // TODO: This method can be used with image based trackingt } -bool Marker::UpdateContent(std::vector &_marker_corners_img, cv::Mat *gray, Camera *cam, int frame_no /*= 0*/) { - return UpdateContentBasic(_marker_corners_img, gray, cam, frame_no); +bool Marker::UpdateContent(vector& _marker_corners_img, + cv::Mat& gray, Camera* cam, int frame_no /*= 0*/) +{ + return UpdateContentBasic(_marker_corners_img, gray, cam, frame_no); } -bool Marker::UpdateContentBasic(std::vector &_marker_corners_img, cv::Mat *gray, Camera *cam, int frame_no /*= 0*/) { - vector marker_corners_img_undist; - marker_corners_img_undist.resize(_marker_corners_img.size()); - copy(_marker_corners_img.begin(), _marker_corners_img.end(), marker_corners_img_undist.begin()); - - // Figure out the marker point position in the image - Homography H; - vector marker_points_img(marker_points.size()); - marker_points_img.resize(marker_points.size()); - cam->Undistort(marker_corners_img_undist); - H.Find(marker_corners, marker_corners_img_undist); - H.ProjectPoints(marker_points, marker_points_img); - cam->Distort(marker_points_img); - - ros_marker_points_img.clear(); - - // Read the content - int x, y; - double min = 255.0, max = 0.0; - for (int j=0; jheight; j++) { - for (int i=0; iwidth; i++) { - x = (int)(0.5+Limit(marker_points_img[(j*marker_content->width)+i].x, 1, gray->rows-2)); - y = (int)(0.5+Limit(marker_points_img[(j*marker_content->width)+i].y, 1, gray->cols-2)); - - marker_points_img[(j*marker_content->width)+i].val = (int)cvGetReal2D(gray, y, x); - - ros_marker_points_img.push_back(PointDouble(x,y)); - - /* - // Use median of 5 neighbor pixels - vector vals; - vals.clear(); - vals.push_back(); - vals.push_back((int)cvGetReal2D(gray, y-1, x)); - vals.push_back((int)cvGetReal2D(gray, y, x-1)); - vals.push_back((int)cvGetReal2D(gray, y+1, x)); - vals.push_back((int)cvGetReal2D(gray, y, x+1)); - nth_element(vals.begin(), vals.begin()+2, vals.end()); - tmp = vals[2]; - */ - - cvSet2D(marker_content, j, i, cvScalar(marker_points_img[(j*marker_content->width)+i].val)); - if(marker_points_img[(j*marker_content->width)+i].val > max) max = marker_points_img[(j*marker_content->width)+i].val; - if(marker_points_img[(j*marker_content->width)+i].val < min) min = marker_points_img[(j*marker_content->width)+i].val; - } - } - - // Take few additional points from border and just - // outside the border to make the right thresholding - vector marker_margin_w_img(marker_margin_w.size()); - vector marker_margin_b_img(marker_margin_b.size()); - H.ProjectPoints(marker_margin_w, marker_margin_w_img); - H.ProjectPoints(marker_margin_b, marker_margin_b_img); - cam->Distort(marker_margin_w_img); - cam->Distort(marker_margin_b_img); - - min = max = 0; // Now min and max values are averages over black and white border pixels. - for (size_t i=0; irows-1)); - y = (int)(0.5+Limit(marker_margin_w_img[i].y, 0, gray->cols-1)); - marker_margin_w_img[i].val = (int)cvGetReal2D(gray, y, x); - max += marker_margin_w_img[i].val; - //if(marker_margin_w_img[i].val > max) max = marker_margin_w_img[i].val; - //if(marker_margin_w_img[i].val < min) min = marker_margin_w_img[i].val; - } - for (size_t i=0; irows-1)); - y = (int)(0.5+Limit(marker_margin_b_img[i].y, 0, gray->cols-1)); - marker_margin_b_img[i].val = (int)cvGetReal2D(gray, y, x); - min += marker_margin_b_img[i].val; - //if(marker_margin_b_img[i].val > max) max = marker_margin_b_img[i].val; - //if(marker_margin_b_img[i].val < min) min = marker_margin_b_img[i].val; - ros_marker_points_img.push_back(PointDouble(x,y)); - } - max /= marker_margin_w_img.size(); - min /= marker_margin_b_img.size(); - - // Threshold the marker content - - std::cout << "potential problem 1" << std::endl; - cv::Mat temp = cv::cvarrToMat(marker_content); - - cv::threshold(temp, temp, (max+min)/2.0, 255, cv::THRESH_BINARY); - - CvMat temp2 = cvMat(temp); - marker_content = &temp2; - - // Count erroneous margin nodes - int erroneous = 0; - int total = 0; - for (size_t i=0; i (max+min)/2.0) erroneous++; - total++; - } - margin_error = (double)erroneous/total; - track_error; +bool Marker::UpdateContentBasic(vector& _marker_corners_img, + cv::Mat& gray, Camera* cam, + int frame_no /*= 0*/) +{ + vector marker_corners_img_undist(_marker_corners_img); + + // Figure out the marker point position in the image + Homography H; + vector marker_points_img(marker_points.size()); + marker_points_img.resize(marker_points.size()); + cam->Undistort(marker_corners_img_undist); + H.Find(marker_corners, marker_corners_img_undist); + H.ProjectPoints(marker_points, marker_points_img); + cam->Distort(marker_points_img); + + ros_marker_points_img.clear(); + + // Read the content + int x, y; + double min = 255.0, max = 0.0; + for (int j = 0; j < marker_content.rows; j++) + { + for (int i = 0; i < marker_content.cols; i++) + { + x = (int)(0.5 + Limit(marker_points_img[(j * marker_content.cols) + i].x, + 1, gray.cols - 2)); + y = (int)(0.5 + Limit(marker_points_img[(j * marker_content.cols) + i].y, + 1, gray.rows - 2)); + + marker_points_img[(j * marker_content.cols) + i].val = + (int)gray.at(y, x); + + ros_marker_points_img.push_back(PointDouble(x, y)); + + /* + // Use median of 5 neighbor pixels + vector vals; + vals.clear(); + vals.push_back(); + vals.push_back((int)cvGetReal2D(gray, y-1, x)); + vals.push_back((int)cvGetReal2D(gray, y, x-1)); + vals.push_back((int)cvGetReal2D(gray, y+1, x)); + vals.push_back((int)cvGetReal2D(gray, y, x+1)); + nth_element(vals.begin(), vals.begin()+2, vals.end()); + tmp = vals[2]; + */ + + marker_content.at(j, i) = + marker_points_img[(j * marker_content.cols) + i].val; + if (marker_points_img[(j * marker_content.cols) + i].val > max) + max = marker_points_img[(j * marker_content.cols) + i].val; + if (marker_points_img[(j * marker_content.cols) + i].val < min) + min = marker_points_img[(j * marker_content.cols) + i].val; + } + } + + // Take few additional points from border and just + // outside the border to make the right thresholding + vector marker_margin_w_img(marker_margin_w.size()); + vector marker_margin_b_img(marker_margin_b.size()); + H.ProjectPoints(marker_margin_w, marker_margin_w_img); + H.ProjectPoints(marker_margin_b, marker_margin_b_img); + cam->Distort(marker_margin_w_img); + cam->Distort(marker_margin_b_img); + + min = max = 0; // Now min and max values are averages over black and white + // border pixels. + for (size_t i = 0; i < marker_margin_w_img.size(); i++) + { + x = (int)(0.5 + Limit(marker_margin_w_img[i].x, 0, gray.cols - 1)); + y = (int)(0.5 + Limit(marker_margin_w_img[i].y, 0, gray.rows - 1)); + marker_margin_w_img[i].val = (int)gray.at(y, x); + max += marker_margin_w_img[i].val; + // if(marker_margin_w_img[i].val > max) max = marker_margin_w_img[i].val; + // if(marker_margin_w_img[i].val < min) min = marker_margin_w_img[i].val; + } + for (size_t i = 0; i < marker_margin_b_img.size(); i++) + { + x = (int)(0.5 + Limit(marker_margin_b_img[i].x, 0, gray.cols - 1)); + y = (int)(0.5 + Limit(marker_margin_b_img[i].y, 0, gray.rows - 1)); + marker_margin_b_img[i].val = (int)gray.at(y, x); + min += marker_margin_b_img[i].val; + // if(marker_margin_b_img[i].val > max) max = marker_margin_b_img[i].val; + // if(marker_margin_b_img[i].val < min) min = marker_margin_b_img[i].val; + ros_marker_points_img.push_back(PointDouble(x, y)); + } + max /= marker_margin_w_img.size(); + min /= marker_margin_b_img.size(); + + // Threshold the marker content + cv::threshold(marker_content, marker_content, (max + min) / 2.0, 255, + cv::THRESH_BINARY); + + // Count erroneous margin nodes + int erroneous = 0; + int total = 0; + for (size_t i = 0; i < marker_margin_w_img.size(); i++) + { + if (marker_margin_w_img[i].val < (max + min) / 2.0) + erroneous++; + total++; + } + for (size_t i = 0; i < marker_margin_b_img.size(); i++) + { + if (marker_margin_b_img[i].val > (max + min) / 2.0) + erroneous++; + total++; + } + margin_error = (double)erroneous / total; + track_error; #ifdef VISUALIZE_MARKER_POINTS - // Now we fill also this temporary debug table for visualizing marker code reading - // TODO: this whole vector is only for debug purposes - marker_allpoints_img.clear(); - for (size_t i=0; i (max+min)/2.0) p.val=255; // error - else p.val=0; // ok - marker_allpoints_img.push_back(p); - } - for (size_t i=0; i (max + min) / 2.0) + p.val = 255; // error + else + p.val = 0; // ok + marker_allpoints_img.push_back(p); + } + for (size_t i = 0; i < marker_points_img.size(); i++) + { + PointDouble p = marker_points_img[i]; + p.val = 128; // Unknown? + marker_allpoints_img.push_back(p); + } #endif - return true; + return true; } -void Marker::UpdatePose(std::vector &_marker_corners_img, Camera *cam, int orientation, int frame_no /* =0 */, bool update_pose /* =true */) { - marker_corners_img.resize(_marker_corners_img.size()); - copy(_marker_corners_img.begin(), _marker_corners_img.end(), marker_corners_img.begin()); +void Marker::UpdatePose(vector& _marker_corners_img, Camera* cam, + int orientation, int frame_no /* =0 */, + bool update_pose /* =true */) +{ + marker_corners_img = _marker_corners_img; - // Calculate exterior orientation - if(orientation > 0) - std::rotate(marker_corners_img.begin(), marker_corners_img.begin() + orientation, marker_corners_img.end()); + // Calculate exterior orientation + if (orientation > 0) + std::rotate(marker_corners_img.begin(), + marker_corners_img.begin() + orientation, + marker_corners_img.end()); - if (update_pose) cam->CalcExteriorOrientation(marker_corners, marker_corners_img, &pose); + if (update_pose) + cam->CalcExteriorOrientation(marker_corners, marker_corners_img, &pose); } -bool Marker::DecodeContent(int *orientation) { - *orientation = 0; - decode_error = 0; - return true; +bool Marker::DecodeContent(int* orientation) +{ + *orientation = 0; + decode_error = 0; + return true; } -void Marker::SaveMarkerImage(const char *filename, int save_res) const { - double scale; - if (save_res == 0) { - // TODO: More intelligent deduction of a minimum save_res - save_res = int((res+margin+margin)*12); - } - scale = double(save_res)/double(res+margin+margin); - - - cv::Mat temp = cv::Mat(cv::Size(save_res, save_res),CV_8UC1); - cv::Mat *img = &temp; - - cv::Mat temp2 = cv::Mat(cv::Size(int(res*scale+0.5), int(res*scale+0.5)),CV_8UC1); - cv::Mat *img_content = &temp2; - - // cvCreateImage(cvSize(int(res*scale+0.5), int(res*scale+0.5)), IPL_DEPTH_8U, 1); - - - cvZero(img); - CvMat submat; - cvGetSubRect(img, &submat, cvRect(int(margin*scale), int(margin*scale), int(res*scale), int(res*scale))); - std::cout << "potential problem 2" << std::endl; - cv::resize(cv::cvarrToMat(marker_content), *img_content,img_content->size(), cv::INTER_NEAREST); - cvCopy(img_content, &submat); - cv::imwrite(filename, *img); - // cvReleaseImage(&img_content); - // cvReleaseImage(&img); +void Marker::SaveMarkerImage(const char* filename, int save_res) const +{ + double scale; + if (save_res == 0) + { + // TODO: More intelligent deduction of a minimum save_res + save_res = int((res + margin + margin) * 12); + } + scale = double(save_res) / double(res + margin + margin); + + cv::Mat img = cv::Mat::zeros(save_res, save_res, CV_8UC1); + cv::Mat img_content = cv::Mat( + cv::Size(int(res * scale + 0.5), int(res * scale + 0.5)), CV_8UC1); + cv::Mat submat = img(cv::Rect(int(margin * scale), int(margin * scale), + int(res * scale), int(res * scale))); + cv::resize(marker_content, img_content, img_content.size(), 0, 0, + cv::INTER_NEAREST); + img_content.copyTo(submat); + cv::imwrite(filename, img); + img_content.release(); + img.release(); } -void Marker::ScaleMarkerToImage(cv::Mat *image) const { - const int multiplier=96; - cv::Mat temp = cv::Mat(cv::Size(int(multiplier*(res+margin+margin)+0.5), int(multiplier*(res+margin+margin)+0.5)),CV_8UC1); - cv::Mat *img = &temp; - - cv::Mat temp2 = cv::Mat(cv::Size(int(multiplier*res+0.5), int(multiplier*res+0.5)),CV_8UC1); - cv::Mat *img_content = &temp2; - - - - img->setTo(cv::Scalar::all(0)); - CvMat submat; - CvMat temp3 = cvMat(*img); - cvGetSubRect(&temp3, &submat, cvRect(int(multiplier*margin+0.5), int(multiplier*margin+0.5), int(multiplier*res+0.5), int(multiplier*res+0.5))); - cv::resize(cv::cvarrToMat(marker_content), *img_content, img_content->size(), cv::INTER_NEAREST); - - CvMat temp4 = cvMat(*img_content); - cvCopy(&temp4, &submat); - - cv::resize(*img, *image,image->size(), cv::INTER_NEAREST); - // cvReleaseImage(&img_content); - // cvReleaseImage(&img); +void Marker::ScaleMarkerToImage(cv::Mat& image) const +{ + const int multiplier = 96; + cv::Mat img = + cv::Mat::zeros(cv::Size(int(multiplier * (res + margin + margin) + 0.5), + int(multiplier * (res + margin + margin) + 0.5)), + CV_8UC1); + cv::Mat img_content = cv::Mat( + cv::Size(int(multiplier * res + 0.5), int(multiplier * res + 0.5)), + CV_8UC1); + cv::Mat submat = img( + cv::Rect(int(multiplier * margin + 0.5), int(multiplier * margin + 0.5), + int(multiplier * res + 0.5), int(multiplier * res + 0.5))); + cv::resize(marker_content, img_content, img_content.size(), 0, 0, + cv::INTER_NEAREST); + img_content.copyTo(submat); + cv::resize(img, image, image.size(), 0, 0, cv::INTER_NEAREST); + img_content.release(); + img.release(); } -void Marker::SetMarkerSize(double _edge_length, int _res, double _margin) { - // TODO: Is this right place for default marker size? - edge_length = (_edge_length?_edge_length:1); - res = _res; //(_res?_res:10); - margin = (_margin?_margin:1); - double x_min = -0.5*edge_length; - double y_min = -0.5*edge_length; - double x_max = 0.5*edge_length; - double y_max = 0.5*edge_length; - double cx_min = (x_min * res)/(res + margin + margin); - double cy_min = (y_min * res)/(res + margin + margin); - double cx_max = (x_max * res)/(res + margin + margin); - double cy_max = (y_max * res)/(res + margin + margin); - double step = edge_length / (res + margin + margin); - - // marker_corners - marker_corners_img.resize(4); - - // Same order as the detected corners - marker_corners.clear(); - marker_corners.push_back(PointDouble(x_min, y_min)); - marker_corners.push_back(PointDouble(x_max, y_min)); - marker_corners.push_back(PointDouble(x_max, y_max)); - marker_corners.push_back(PointDouble(x_min, y_max)); - - // Rest can be done only if we have existing resolution - if (res <= 0) return; - - // marker_points - marker_points.clear(); - for(int j = 0; j < res; ++j) { - for(int i = 0; i < res; ++i) { - PointDouble pt; - pt.y = cy_max - (step*j) - (step/2); - pt.x = cx_min + (step*i) + (step/2); - marker_points.push_back(pt); - } - } - - // Samples to be used in margins - // TODO: Now this works only if the "margin" is without decimals - // TODO: This should be made a lot cleaner - marker_margin_w.clear(); - marker_margin_b.clear(); - for(int j = -1; j<=margin-1; j++) { - PointDouble pt; - // Sides - for (int i=0; i x_max) || (pt.y > y_max)) - { - marker_margin_w.push_back(pt); - } - else - if ((pt.x < cx_min) || (pt.y < cy_min) || - (pt.x > cx_max) || (pt.y > cy_max)) - { - marker_margin_b.push_back(pt); - } - } - } - /* - //double step = edge_length / (res + margin + margin); - for(int j = 0; j < res+margin+margin+2; ++j) { - for(int i = 0; i < res+margin+margin+2; ++i) { - PointDouble pt; - pt.y = y_min - step/2 + step*j; - pt.x = x_min - step/2 + step*i; - if ((pt.x < x_min) || (pt.y < y_min) || - (pt.x > x_max) || (pt.y > y_max)) - { - marker_margin_w.push_back(pt); - } - else - if ((pt.x < cx_min) || (pt.y < cy_min) || - (pt.x > cx_max) || (pt.y > cy_max)) - { - marker_margin_b.push_back(pt); - } - } - } - */ - /* - marker_margin_w.clear(); - marker_margin_b.clear(); - for (double y=y_min-(step/2); y x_max) || (y > y_max)) - { - marker_margin_w.push_back(pt); - } - else - if ((x < cx_min) || (y < cy_min) || - (x > cx_max) || (y > cy_max)) - { - marker_margin_b.push_back(pt); - } - } - } - */ - /* - marker_points.clear(); - marker_margin_w.clear(); - marker_margin_b.clear(); - for(int j = 0; j < res+margin+margin+2; ++j) { - for(int i = 0; i < res+margin+margin+2; ++i) { - PointDouble pt; - } - } - */ - - // marker content - if (marker_content) cvReleaseMat(&marker_content); - marker_content = cvCreateMat(res, res, CV_8U); - cvSet(marker_content, cvScalar(255)); +void Marker::SetMarkerSize(double _edge_length, int _res, double _margin) +{ + // TODO: Is this right place for default marker size? + edge_length = (_edge_length ? _edge_length : 1); + res = _res; //(_res?_res:10); + margin = (_margin ? _margin : 1); + double x_min = -0.5 * edge_length; + double y_min = -0.5 * edge_length; + double x_max = 0.5 * edge_length; + double y_max = 0.5 * edge_length; + double cx_min = (x_min * res) / (res + margin + margin); + double cy_min = (y_min * res) / (res + margin + margin); + double cx_max = (x_max * res) / (res + margin + margin); + double cy_max = (y_max * res) / (res + margin + margin); + double step = edge_length / (res + margin + margin); + + // marker_corners + marker_corners_img.resize(4); + + // Same order as the detected corners + marker_corners.clear(); + marker_corners.push_back(PointDouble(x_min, y_min)); + marker_corners.push_back(PointDouble(x_max, y_min)); + marker_corners.push_back(PointDouble(x_max, y_max)); + marker_corners.push_back(PointDouble(x_min, y_max)); + + // Rest can be done only if we have existing resolution + if (res <= 0) + return; + + // marker_points + marker_points.clear(); + for (int j = 0; j < res; ++j) + { + for (int i = 0; i < res; ++i) + { + PointDouble pt; + pt.y = cy_max - (step * j) - (step / 2); + pt.x = cx_min + (step * i) + (step / 2); + marker_points.push_back(pt); + } + } + + // Samples to be used in margins + // TODO: Now this works only if the "margin" is without decimals + // TODO: This should be made a lot cleaner + marker_margin_w.clear(); + marker_margin_b.clear(); + for (int j = -1; j <= margin - 1; j++) + { + PointDouble pt; + // Sides + for (int i = 0; i < res; i++) + { + pt.x = cx_min + step * i + step / 2; + pt.y = y_min + step * j + step / 2; + if (j < 0) + marker_margin_w.push_back(pt); + else + marker_margin_b.push_back(pt); + pt.y = y_max - step * j - step / 2; + if (j < 0) + marker_margin_w.push_back(pt); + else + marker_margin_b.push_back(pt); + pt.y = cy_min + step * i + step / 2; + pt.x = x_min + step * j + step / 2; + if (j < 0) + marker_margin_w.push_back(pt); + else + marker_margin_b.push_back(pt); + pt.x = x_max - step * j - step / 2; + if (j < 0) + marker_margin_w.push_back(pt); + else + marker_margin_b.push_back(pt); + } + // Corners + for (int i = -1; i <= margin - 1; i++) + { + pt.x = x_min + step * i + step / 2; + pt.y = y_min + step * j + step / 2; + if ((j < 0) || (i < 0)) + marker_margin_w.push_back(pt); + else + marker_margin_b.push_back(pt); + pt.x = x_min + step * i + step / 2; + pt.y = y_max - step * j - step / 2; + if ((j < 0) || (i < 0)) + marker_margin_w.push_back(pt); + else + marker_margin_b.push_back(pt); + pt.x = x_max - step * i - step / 2; + pt.y = y_max - step * j - step / 2; + if ((j < 0) || (i < 0)) + marker_margin_w.push_back(pt); + else + marker_margin_b.push_back(pt); + pt.x = x_max - step * i - step / 2; + pt.y = y_min + step * j + step / 2; + if ((j < 0) || (i < 0)) + marker_margin_w.push_back(pt); + else + marker_margin_b.push_back(pt); + } + } + /* + for(int j = -margin-1; j < res+margin+margin+2; ++j) { + for(int i = 0; i < res+margin+margin+2; ++i) { + PointDouble pt; + pt.y = y_min - step/2 + step*j; + pt.x = x_min - step/2 + step*i; + if ((pt.x < x_min) || (pt.y < y_min) || + (pt.x > x_max) || (pt.y > y_max)) + { + marker_margin_w.push_back(pt); + } + else + if ((pt.x < cx_min) || (pt.y < cy_min) || + (pt.x > cx_max) || (pt.y > cy_max)) + { + marker_margin_b.push_back(pt); + } + } + } + /* + //double step = edge_length / (res + margin + margin); + for(int j = 0; j < res+margin+margin+2; ++j) { + for(int i = 0; i < res+margin+margin+2; ++i) { + PointDouble pt; + pt.y = y_min - step/2 + step*j; + pt.x = x_min - step/2 + step*i; + if ((pt.x < x_min) || (pt.y < y_min) || + (pt.x > x_max) || (pt.y > y_max)) + { + marker_margin_w.push_back(pt); + } + else + if ((pt.x < cx_min) || (pt.y < cy_min) || + (pt.x > cx_max) || (pt.y > cy_max)) + { + marker_margin_b.push_back(pt); + } + } + } + */ + /* + marker_margin_w.clear(); + marker_margin_b.clear(); + for (double y=y_min-(step/2); y x_max) || (y > y_max)) + { + marker_margin_w.push_back(pt); + } + else + if ((x < cx_min) || (y < cy_min) || + (x > cx_max) || (y > cy_max)) + { + marker_margin_b.push_back(pt); + } + } + } + */ + /* + marker_points.clear(); + marker_margin_w.clear(); + marker_margin_b.clear(); + for(int j = 0; j < res+margin+margin+2; ++j) { + for(int i = 0; i < res+margin+margin+2; ++i) { + PointDouble pt; + } + } + */ + + // marker content + if (!marker_content.empty()) + marker_content.release(); + marker_content = cv::Mat(res, res, CV_8U, cv::Scalar(255)); } -Marker::~Marker() { - if (marker_content) cvReleaseMat(&marker_content); +Marker::~Marker() +{ + if (!marker_content.empty()) + marker_content.release(); } Marker::Marker(double _edge_length, int _res, double _margin) { - marker_content = NULL; - margin_error = 0; - decode_error = 0; - track_error = 0; - SetMarkerSize(_edge_length, _res, _margin); - ros_orientation = -1; - ros_corners_3D.resize(4); - valid=false; + margin_error = 0; + decode_error = 0; + track_error = 0; + SetMarkerSize(_edge_length, _res, _margin); + ros_orientation = -1; + ros_corners_3D.resize(4); + valid = false; } -Marker::Marker(const Marker& m) { - marker_content = NULL; - SetMarkerSize(m.edge_length, m.res, m.margin); - - pose = m.pose; - margin_error = m.margin_error; - decode_error = m.decode_error; - track_error = m.track_error; - cvCopy(m.marker_content, marker_content); - ros_orientation = m.ros_orientation; - - ros_marker_points_img.resize(m.ros_marker_points_img.size()); - copy(m.ros_marker_points_img.begin(), m.ros_marker_points_img.end(), ros_marker_points_img.begin()); - marker_corners.resize(m.marker_corners.size()); - copy(m.marker_corners.begin(), m.marker_corners.end(), marker_corners.begin()); - marker_points.resize(m.marker_points.size()); - copy(m.marker_points.begin(), m.marker_points.end(), marker_points.begin()); - marker_corners_img.resize(m.marker_corners_img.size()); - copy(m.marker_corners_img.begin(), m.marker_corners_img.end(), marker_corners_img.begin()); - ros_corners_3D.resize(m.ros_corners_3D.size()); - copy(m.ros_corners_3D.begin(), m.ros_corners_3D.end(), ros_corners_3D.begin()); - - valid = m.valid; +Marker::Marker(const Marker& m) +{ + SetMarkerSize(m.edge_length, m.res, m.margin); + + pose = m.pose; + margin_error = m.margin_error; + decode_error = m.decode_error; + track_error = m.track_error; + m.marker_content.copyTo(marker_content); + ros_orientation = m.ros_orientation; + + ros_marker_points_img.resize(m.ros_marker_points_img.size()); + copy(m.ros_marker_points_img.begin(), m.ros_marker_points_img.end(), + ros_marker_points_img.begin()); + marker_corners.resize(m.marker_corners.size()); + copy(m.marker_corners.begin(), m.marker_corners.end(), + marker_corners.begin()); + marker_points.resize(m.marker_points.size()); + copy(m.marker_points.begin(), m.marker_points.end(), marker_points.begin()); + marker_corners_img.resize(m.marker_corners_img.size()); + copy(m.marker_corners_img.begin(), m.marker_corners_img.end(), + marker_corners_img.begin()); + ros_corners_3D.resize(m.ros_corners_3D.size()); + copy(m.ros_corners_3D.begin(), m.ros_corners_3D.end(), + ros_corners_3D.begin()); + + valid = m.valid; #ifdef VISUALIZE_MARKER_POINTS - marker_allpoints_img.resize(m.marker_allpoints_img.size()); - copy(m.marker_allpoints_img.begin(), m.marker_allpoints_img.end(), marker_allpoints_img.begin()); + marker_allpoints_img.resize(m.marker_allpoints_img.size()); + copy(m.marker_allpoints_img.begin(), m.marker_allpoints_img.end(), + marker_allpoints_img.begin()); #endif } -bool MarkerArtoolkit::DecodeContent(int *orientation) { - int a = (int)cvGetReal2D(marker_content, 0, 0); - int b = (int)cvGetReal2D(marker_content, res-1, 0); - int c = (int)cvGetReal2D(marker_content, res-1, res-1); - int d = (int)cvGetReal2D(marker_content, 0, res-1); - if (!a && !b && c) *orientation = 0; - else if (!b && !c && d) *orientation = 1; - else if (!c && !d && a) *orientation = 2; - else if (!d && !a && b) *orientation = 3; - else return false; - - Bitset bs; - bs.clear(); - for (int j = 0; j < res; j++) { - for (int i = 0; i < res ; i++) { - if (*orientation == 0) { - if ((j == 0) && (i == 0)) continue; - if ((j == res-1) && (i == 0)) continue; - if ((j == res-1) && (i == res-1)) continue; - if (cvGetReal2D(marker_content, j, i)) bs.push_back(false); - else bs.push_back(true); - } - else if (*orientation == 1) { - if (((res-i-1) == res-1) && (j == 0)) continue; - if (((res-i-1) == res-1) && (j == res-1)) continue; - if (((res-i-1) == 0) && (j == res-1)) continue; - if (cvGetReal2D(marker_content, res-i-1, j)) bs.push_back(false); - else bs.push_back(true); - } - else if (*orientation == 2) { - if (((res-j-1) == res-1) && ((res-i-1) == res-1)) continue; - if (((res-j-1) == 0) && ((res-i-1) == res-1)) continue; - if (((res-j-1) == 0) && ((res-i-1) == 0)) continue; - if (cvGetReal2D(marker_content, res-j-1, res-i-1)) bs.push_back(false); - else bs.push_back(true); - } - else if (*orientation == 3) { - if ((i == 0) && ((res-j-1) == res-1)) continue; - if ((i == 0) && ((res-j-1) == 0)) continue; - if ((i == res-1) && ((res-j-1) == 0)) continue; - if (cvGetReal2D(marker_content, i, res-j-1)) bs.push_back(false); - else bs.push_back(true); - } - } - } - id = bs.ulong(); - return true; +bool MarkerArtoolkit::DecodeContent(int* orientation) +{ + int a = (int)marker_content.at(0, 0); + int b = (int)marker_content.at(res - 1, 0); + int c = (int)marker_content.at(res - 1, res - 1); + int d = (int)marker_content.at(0, res - 1); + if (!a && !b && c) + *orientation = 0; + else if (!b && !c && d) + *orientation = 1; + else if (!c && !d && a) + *orientation = 2; + else if (!d && !a && b) + *orientation = 3; + else + return false; + + Bitset bs; + bs.clear(); + for (int j = 0; j < res; j++) + { + for (int i = 0; i < res; i++) + { + if (*orientation == 0) + { + if ((j == 0) && (i == 0)) + continue; + if ((j == res - 1) && (i == 0)) + continue; + if ((j == res - 1) && (i == res - 1)) + continue; + if (marker_content.at(j, i)) + bs.push_back(false); + else + bs.push_back(true); + } + else if (*orientation == 1) + { + if (((res - i - 1) == res - 1) && (j == 0)) + continue; + if (((res - i - 1) == res - 1) && (j == res - 1)) + continue; + if (((res - i - 1) == 0) && (j == res - 1)) + continue; + if (marker_content.at(res - i - 1, j)) + bs.push_back(false); + else + bs.push_back(true); + } + else if (*orientation == 2) + { + if (((res - j - 1) == res - 1) && ((res - i - 1) == res - 1)) + continue; + if (((res - j - 1) == 0) && ((res - i - 1) == res - 1)) + continue; + if (((res - j - 1) == 0) && ((res - i - 1) == 0)) + continue; + if (marker_content.at(res - j - 1, res - i - 1)) + bs.push_back(false); + else + bs.push_back(true); + } + else if (*orientation == 3) + { + if ((i == 0) && ((res - j - 1) == res - 1)) + continue; + if ((i == 0) && ((res - j - 1) == 0)) + continue; + if ((i == res - 1) && ((res - j - 1) == 0)) + continue; + if (marker_content.at(i, res - j - 1)) + bs.push_back(false); + else + bs.push_back(true); + } + } + } + id = bs.ulong(); + return true; } -void MarkerArtoolkit::SetContent(unsigned long _id) { - // Fill in the content values - margin_error = 0; - decode_error = 0; - id = _id; - Bitset bs; - bs.push_back_meaningful(_id); - for (int j = res-1; j >= 0; j--) { - for (int i = res-1; i >= 0 ; i--) { - if ((j == 0) && (i == 0)) - cvSetReal2D(marker_content, j, i, 0); - else if ((j == res-1) && (i == 0)) - cvSetReal2D(marker_content, j, i, 0); - else if ((j == res-1) && (i == res-1)) - cvSetReal2D(marker_content, j, i, 255); - else { - if (bs.Length() && bs.pop_back()) - cvSetReal2D(marker_content, j, i, 0); - else - cvSetReal2D(marker_content, j, i, 255); - } - } - } +void MarkerArtoolkit::SetContent(unsigned long _id) +{ + // Fill in the content values + margin_error = 0; + decode_error = 0; + id = _id; + Bitset bs; + bs.push_back_meaningful(_id); + for (int j = res - 1; j >= 0; j--) + { + for (int i = res - 1; i >= 0; i--) + { + if ((j == 0) && (i == 0)) + marker_content.at(j, i) = 0; + else if ((j == res - 1) && (i == 0)) + marker_content.at(j, i) = 0; + else if ((j == res - 1) && (i == res - 1)) + marker_content.at(j, i) = 255; + else + { + if (bs.Length() && bs.pop_back()) + marker_content.at(j, i) = 0; + else + marker_content.at(j, i) = 255; + } + } + } } -void MarkerData::DecodeOrientation(int *error, int *total, int *orientation) { - int i,j; - vector errors(4); - int color = 255; - - // Resolution identification - j = res/2; - for (i=0; i (res/2)) { - (*total)++; - if ((int)cvGetReal2D(marker_content, j, i) != 255) errors[0]++; - if ((int)cvGetReal2D(marker_content, i, j) != 255) errors[1]++; - if ((int)cvGetReal2D(marker_content, j, i) != 0) errors[2]++; - if ((int)cvGetReal2D(marker_content, i, j) != 0) errors[3]++; - } - } - *orientation = min_element(errors.begin(), errors.end()) - errors.begin(); - *error = int(errors[*orientation]); - //*orientation = 0; // ttehop +void MarkerData::DecodeOrientation(int* error, int* total, int* orientation) +{ + int i, j; + vector errors(4); + int color = 255; + + // Resolution identification + j = res / 2; + for (i = 0; i < res; i++) + { + (*total)++; + if ((int)marker_content.at(j, i) != color) + errors[0]++; + if ((int)marker_content.at(i, j) != color) + errors[1]++; + color = (color ? 0 : 255); + } + errors[2] = errors[0]; + errors[3] = errors[1]; + + // Orientation identification + i = res / 2; + for (j = (res / 2) - 2; j <= (res / 2) + 2; j++) + { + if (j < (res / 2)) + { + (*total)++; + if ((int)marker_content.at(j, i) != 0) + { + errors[0]++; + } + + if ((int)marker_content.at(i, j) != 0) + { + errors[1]++; + } + if ((int)marker_content.at(j, i) != 255) + { + errors[2]++; + } + if ((int)marker_content.at(i, j) != 255) + { + errors[3]++; + } + } + else if (j > (res / 2)) + { + (*total)++; + if ((int)marker_content.at(j, i) != 255) + { + errors[0]++; + } + if ((int)marker_content.at(i, j) != 255) + { + errors[1]++; + } + if ((int)marker_content.at(j, i) != 0) + { + errors[2]++; + } + if ((int)marker_content.at(i, j) != 0) + { + errors[3]++; + } + } + } + *orientation = min_element(errors.begin(), errors.end()) - errors.begin(); + *error = int(errors[*orientation]); + //*orientation = 0; // ttehop } -bool MarkerData::DetectResolution(std::vector &_marker_corners_img, cv::Mat *gray, Camera *cam) { - vector marker_corners_img_undist; - marker_corners_img_undist.resize(_marker_corners_img.size()); - copy(_marker_corners_img.begin(), _marker_corners_img.end(), marker_corners_img_undist.begin()); - - // line_points - std::vector line_points; - PointDouble pt; - line_points.clear(); - pt.x=0; pt.y=0; line_points.push_back(pt); - pt.x=-0.5*edge_length; pt.y=0; line_points.push_back(pt); - pt.x=+0.5*edge_length; pt.y=0; line_points.push_back(pt); - pt.x=0; pt.y=-0.5*edge_length; line_points.push_back(pt); - pt.x=0; pt.y=+0.5*edge_length; line_points.push_back(pt); - - // Figure out the marker point position in the image - // TODO: Note that line iterator cannot iterate outside image - // therefore we need to distort the endpoints and iterate straight lines. - // Right way would be to iterate undistorted lines and distort line points. - Homography H; - vector line_points_img(line_points.size()); - line_points_img.resize(line_points.size()); - cam->Undistort(marker_corners_img_undist); - H.Find(marker_corners, marker_corners_img_undist); - H.ProjectPoints(line_points, line_points_img); - cam->Distort(line_points_img); - - // Now we have undistorted line end points - // Find lines and then distort the whole line - int white_count[4] = {0}; // white counts for lines 1->0, 2->0, 3->0, 4->0 - CvPoint pt1, pt2; - pt2.x = int(line_points_img[0].x); - pt2.y = int(line_points_img[0].y); - if ((pt2.x < 0) || (pt2.y < 0) || - (pt2.x >= gray->rows) || (pt2.y >= gray->cols)) - { - return false; - } - bool white=true; - for (int i=0; i<4; i++) { - // CvLineIterator iterator; - pt1.x = int(line_points_img[i+1].x); - pt1.y = int(line_points_img[i+1].y); - if ((pt1.x < 0) || (pt1.y < 0) || - (pt1.x >= gray->rows) || (pt1.y >= gray->cols)) - { - return false; - } - cv::LineIterator iterator = cv::LineIterator(*gray, pt1, pt2, 8, 0); - int count = iterator.count; - std::vector vals; - for(int ii = 0; ii < count; ii++, ++iterator){ - //CV_NEXT_LINE_POINT(iterator); - - vals.push_back(*(iterator.ptr)); - } - uchar vmin = *(std::min_element(vals.begin(), vals.end())); - uchar vmax = *(std::max_element(vals.begin(), vals.end())); - uchar thresh = (vmin+vmax)/2; - white=true; - int bc=0, wc=0, N=2; - for (size_t ii=0; ii= N)) { - white=false; - } - else if (!white && (wc >= N)) { - white=true; - white_count[i]++; - } - } - } - - if ((white_count[0]+white_count[1]) == (white_count[2]+white_count[3])) return false; - else if ((white_count[0]+white_count[1]) > (white_count[2]+white_count[3])) { - if (white_count[0] != white_count[1]) return false; - if (white_count[0] < 2) return false; - int nof_whites = white_count[0]*2-(white?1:0); // 'white' contains middle color - int new_res = 2*nof_whites-1; - SetMarkerSize(edge_length, new_res, margin); - } - else { - if (white_count[2] != white_count[3]) return false; - if (white_count[2] < 2) return false; - if (((white_count[2]%2) == 0) != white) return false; - int nof_whites = white_count[2]*2-(white?1:0); - int new_res = 2*nof_whites-1; - SetMarkerSize(edge_length, new_res, margin); - } - return true; +bool MarkerData::DetectResolution(vector& _marker_corners_img, + cv::Mat& gray, Camera* cam) +{ + vector marker_corners_img_undist; + marker_corners_img_undist.resize(_marker_corners_img.size()); + copy(_marker_corners_img.begin(), _marker_corners_img.end(), + marker_corners_img_undist.begin()); + + // line_points + std::vector line_points; + PointDouble pt; + line_points.clear(); + pt.x = 0; + pt.y = 0; + line_points.push_back(pt); + pt.x = -0.5 * edge_length; + pt.y = 0; + line_points.push_back(pt); + pt.x = +0.5 * edge_length; + pt.y = 0; + line_points.push_back(pt); + pt.x = 0; + pt.y = -0.5 * edge_length; + line_points.push_back(pt); + pt.x = 0; + pt.y = +0.5 * edge_length; + line_points.push_back(pt); + + // Figure out the marker point position in the image + // TODO: Note that line iterator cannot iterate outside image + // therefore we need to distort the endpoints and iterate straight + // lines. Right way would be to iterate undistorted lines and distort + // line points. + Homography H; + vector line_points_img(line_points.size()); + line_points_img.resize(line_points.size()); + cam->Undistort(marker_corners_img_undist); + H.Find(marker_corners, marker_corners_img_undist); + H.ProjectPoints(line_points, line_points_img); + cam->Distort(line_points_img); + + // Now we have undistorted line end points + // Find lines and then distort the whole line + int white_count[4] = { 0 }; // white counts for lines 1->0, 2->0, 3->0, 4->0 + cv::Point pt1, pt2; + pt2.x = int(line_points_img[0].x); + pt2.y = int(line_points_img[0].y); + if ((pt2.x < 0) || (pt2.y < 0) || (pt2.x >= gray.cols) || + (pt2.y >= gray.rows)) + { + return false; + } + bool white = true; + for (int i = 0; i < 4; i++) + { + pt1.x = int(line_points_img[i + 1].x); + pt1.y = int(line_points_img[i + 1].y); + if ((pt1.x < 0) || (pt1.y < 0) || (pt1.x >= gray.cols) || + (pt1.y >= gray.rows)) + { + return false; + } + cv::LineIterator iterator(gray, pt1, pt2, 8, false); + std::vector vals; + for (int ii = 0; ii < iterator.count; ii++, ++iterator) + { + vals.push_back(*(iterator.ptr)); + } + uchar vmin = *(std::min_element(vals.begin(), vals.end())); + uchar vmax = *(std::max_element(vals.begin(), vals.end())); + uchar thresh = (vmin + vmax) / 2; + white = true; + int bc = 0, wc = 0, N = 2; + for (size_t ii = 0; ii < vals.size(); ii++) + { + // change the color status if we had + // N subsequent pixels of the other color + if (vals[ii] < thresh) + { + bc++; + wc = 0; + } + else + { + wc++; + bc = 0; + } + + if (white && (bc >= N)) + { + white = false; + } + else if (!white && (wc >= N)) + { + white = true; + white_count[i]++; + } + } + } + + if ((white_count[0] + white_count[1]) == (white_count[2] + white_count[3])) + return false; + else if ((white_count[0] + white_count[1]) > + (white_count[2] + white_count[3])) + { + if (white_count[0] != white_count[1]) + return false; + if (white_count[0] < 2) + return false; + int nof_whites = + white_count[0] * 2 - (white ? 1 : 0); // 'white' contains middle color + int new_res = 2 * nof_whites - 1; + SetMarkerSize(edge_length, new_res, margin); + } + else + { + if (white_count[2] != white_count[3]) + return false; + if (white_count[2] < 2) + return false; + if (((white_count[2] % 2) == 0) != white) + return false; + int nof_whites = white_count[2] * 2 - (white ? 1 : 0); + int new_res = 2 * nof_whites - 1; + SetMarkerSize(edge_length, new_res, margin); + } + return true; } -bool MarkerData::UpdateContent(vector &_marker_corners_img, cv::Mat *gray, Camera *cam, int frame_no /*= 0*/) { - if (res == 0) { - if (!DetectResolution(_marker_corners_img, gray, cam)) return false; - } - return UpdateContentBasic(_marker_corners_img, gray, cam, frame_no); +bool MarkerData::UpdateContent(vector& _marker_corners_img, + cv::Mat& gray, Camera* cam, int frame_no /*= 0*/) +{ + if (res == 0) + { + if (!DetectResolution(_marker_corners_img, gray, cam)) + return false; + } + return UpdateContentBasic(_marker_corners_img, gray, cam, frame_no); } -int MarkerData::DecodeCode(int orientation, BitsetExt *bs, int *erroneous, int *total, - unsigned char* content_type) +int MarkerData::DecodeCode(int orientation, BitsetExt* bs, int* erroneous, + int* total, unsigned char* content_type) { - // TODO: The orientation isn't fully understood? - //for (int j = res-1; j >= 0; j--) { - for (int j = 0; j < res; j++) { - for (int i = 0; i < res ; i++) { - // TODO: Does this work ok for larger markers? - if ((orientation == 0) || (orientation == 2)) { - if (j == res/2) continue; - if ((i == res/2) && (j >= (res/2)-2) && (j <= (res/2)+2)) continue; - } else { - if (i == res/2) continue; - if ((j == res/2) && (i >= (res/2)-2) && (i <= (res/2)+2)) continue; - } - int color = 0; - if (orientation == 0) color = (int)cvGetReal2D(marker_content, j, i); - else if (orientation == 1) color = (int)cvGetReal2D(marker_content, res-i-1, j); - else if (orientation == 2) color = (int)cvGetReal2D(marker_content, res-j-1, res-i-1); - else if (orientation == 3) color = (int)cvGetReal2D(marker_content, i, res-j-1); - if (color) bs->push_back(false); - else bs->push_back(true); - (*total)++; - } - } - - unsigned char flags = 0; - int errors = 0; - - // if we have larger than 16-bit code, then we have a header; 16-bit code has a - // hamming(8,4) coded number - if(bs->Length() > 16){ - // read header (8-bit hamming(8,4) -> 4-bit flags) - BitsetExt header; - - for(int i = 0; i < HEADER_SIZE; i++) - header.push_back(bs->pop_front()); - - errors = header.hamming_dec(8); - if(errors == -1){ - //OutputDebugString("header decoding failed!!!!!\n"); - return errors; - } - - flags = header.uchar(); - } - else - flags &= MARKER_CONTENT_TYPE_NUMBER; - - // check which hamming we are using - //bs->Output(cout); cout<hamming_dec(16); - else errors = bs->hamming_dec(8); - *content_type = flags & 0x7; - - if (errors > 0) (*erroneous) += errors; - return errors; + // TODO: The orientation isn't fully understood? + // for (int j = res-1; j >= 0; j--) { + for (int j = 0; j < res; j++) + { + for (int i = 0; i < res; i++) + { + // TODO: Does this work ok for larger markers? + if ((orientation == 0) || (orientation == 2)) + { + if (j == res / 2) + continue; + if ((i == res / 2) && (j >= (res / 2) - 2) && (j <= (res / 2) + 2)) + continue; + } + else + { + if (i == res / 2) + continue; + if ((j == res / 2) && (i >= (res / 2) - 2) && (i <= (res / 2) + 2)) + continue; + } + int color = 0; + if (orientation == 0) + color = (int)marker_content.at(j, i); + else if (orientation == 1) + color = (int)marker_content.at(res - i - 1, j); + else if (orientation == 2) + color = (int)marker_content.at(res - j - 1, res - i - 1); + else if (orientation == 3) + color = (int)marker_content.at(i, res - j - 1); + if (color) + bs->push_back(false); + else + bs->push_back(true); + (*total)++; + } + } + + unsigned char flags = 0; + int errors = 0; + + // if we have larger than 16-bit code, then we have a header; 16-bit code has + // a hamming(8,4) coded number + if (bs->Length() > 16) + { + // read header (8-bit hamming(8,4) -> 4-bit flags) + BitsetExt header; + + for (int i = 0; i < HEADER_SIZE; i++) + header.push_back(bs->pop_front()); + + errors = header.hamming_dec(8); + if (errors == -1) + { + // OutputDebugString("header decoding failed!!!!!\n"); + return errors; + } + + flags = header.uchar(); + } + else + flags &= MARKER_CONTENT_TYPE_NUMBER; + + // check which hamming we are using + // bs->Output(cout); cout<hamming_dec(16); + else + errors = bs->hamming_dec(8); + *content_type = flags & 0x7; + + if (errors > 0) + (*erroneous) += errors; + return errors; } -void MarkerData::Read6bitStr(BitsetExt *bs, char *s, size_t s_max_len) { - deque bits = bs->GetBits(); - deque::const_iterator iter; - size_t len = 0; - int bitpos = 5; - unsigned long c=0; - for (iter = bits.begin(); iter != bits.end(); iter++) { - if (*iter) c |= (0x01 << bitpos); - bitpos--; - if (bitpos < 0) { - if (c == 000) s[len] = ':'; - else if ((c >= 001) && (c <= 032)) s[len] = 'a' + (char)c - 1; - else if ((c >= 033) && (c <= 044)) s[len] = '0' + (char)c - 1; - else if (c == 045) s[len] = '+'; - else if (c == 046) s[len] = '-'; - else if (c == 047) s[len] = '*'; - else if (c == 050) s[len] = '/'; - else if (c == 051) s[len] = '('; - else if (c == 052) s[len] = ')'; - else if (c == 053) s[len] = '$'; - else if (c == 054) s[len] = '='; - else if (c == 055) s[len] = ' '; - else if (c == 056) s[len] = ','; - else if (c == 057) s[len] = '.'; - else if (c == 060) s[len] = '#'; - else if (c == 061) s[len] = '['; - else if (c == 062) s[len] = ']'; - else if (c == 063) s[len] = '%'; - else if (c == 064) s[len] = '\"'; - else if (c == 065) s[len] = '_'; - else if (c == 066) s[len] = '!'; - else if (c == 067) s[len] = '&'; - else if (c == 070) s[len] = '\''; - else if (c == 071) s[len] = '?'; - else if (c == 072) s[len] = '<'; - else if (c == 073) s[len] = '>'; - else if (c == 074) s[len] = '@'; - else if (c == 075) s[len] = '\\'; - else if (c == 076) s[len] = '^'; - else if (c == 077) s[len] = ';'; - else s[len] = '?'; - len++; - if (len >= (s_max_len-1)) break; - bitpos=5; c=0; - } - } - s[len] = 0; +void MarkerData::Read6bitStr(BitsetExt* bs, char* s, size_t s_max_len) +{ + deque bits = bs->GetBits(); + deque::const_iterator iter; + size_t len = 0; + int bitpos = 5; + unsigned long c = 0; + for (iter = bits.begin(); iter != bits.end(); iter++) + { + if (*iter) + c |= (0x01 << bitpos); + bitpos--; + if (bitpos < 0) + { + if (c == 000) + s[len] = ':'; + else if ((c >= 001) && (c <= 032)) + s[len] = 'a' + (char)c - 1; + else if ((c >= 033) && (c <= 044)) + s[len] = '0' + (char)c - 1; + else if (c == 045) + s[len] = '+'; + else if (c == 046) + s[len] = '-'; + else if (c == 047) + s[len] = '*'; + else if (c == 050) + s[len] = '/'; + else if (c == 051) + s[len] = '('; + else if (c == 052) + s[len] = ')'; + else if (c == 053) + s[len] = '$'; + else if (c == 054) + s[len] = '='; + else if (c == 055) + s[len] = ' '; + else if (c == 056) + s[len] = ','; + else if (c == 057) + s[len] = '.'; + else if (c == 060) + s[len] = '#'; + else if (c == 061) + s[len] = '['; + else if (c == 062) + s[len] = ']'; + else if (c == 063) + s[len] = '%'; + else if (c == 064) + s[len] = '\"'; + else if (c == 065) + s[len] = '_'; + else if (c == 066) + s[len] = '!'; + else if (c == 067) + s[len] = '&'; + else if (c == 070) + s[len] = '\''; + else if (c == 071) + s[len] = '?'; + else if (c == 072) + s[len] = '<'; + else if (c == 073) + s[len] = '>'; + else if (c == 074) + s[len] = '@'; + else if (c == 075) + s[len] = '\\'; + else if (c == 076) + s[len] = '^'; + else if (c == 077) + s[len] = ';'; + else + s[len] = '?'; + len++; + if (len >= (s_max_len - 1)) + break; + bitpos = 5; + c = 0; + } + } + s[len] = 0; } -bool MarkerData::DecodeContent(int *orientation) { - //bool decode(vector& colors, int *orientation, double *error) { - *orientation = 0; - - BitsetExt bs; - int erroneous=0; - int total=0; - - DecodeOrientation(&erroneous, &total, orientation); - int err = DecodeCode(*orientation, &bs, &erroneous, &total, &content_type); - if(err == -1) { - // couldn't fix - decode_error = DBL_MAX; - return false; - } - - if(content_type == MARKER_CONTENT_TYPE_NUMBER){ - data.id = bs.ulong(); - } - else { - Read6bitStr(&bs, data.str, MAX_MARKER_STRING_LEN); - } - - decode_error = (double)(erroneous)/total; - - return true; +bool MarkerData::DecodeContent(int* orientation) +{ + // bool decode(vector& colors, int *orientation, double *error) { + *orientation = 0; + BitsetExt bs; + int erroneous = 0; + int total = 0; + + DecodeOrientation(&erroneous, &total, orientation); + int err = DecodeCode(*orientation, &bs, &erroneous, &total, &content_type); + if (err == -1) + { + // couldn't fix + decode_error = DBL_MAX; + return false; + } + + if (content_type == MARKER_CONTENT_TYPE_NUMBER) + { + data.id = bs.ulong(); + } + else + { + Read6bitStr(&bs, data.str, MAX_MARKER_STRING_LEN); + } + + decode_error = (double)(erroneous) / total; + + return true; } -void MarkerData::Add6bitStr(BitsetExt *bs, char *s) { - while (*s) { - unsigned char c = (unsigned char)*s; - if (c == ':') bs->push_back((unsigned char)0,6); - else if ((c >= 'A') && (c <= 'Z')) bs->push_back((unsigned char)(001 + c - 'A'),6); - else if ((c >= 'a') && (c <= 'z')) bs->push_back((unsigned char)(001 + c - 'a'),6); - else if ((c >= '0') && (c <= '9')) bs->push_back((unsigned char)(033 + c - '0'),6); - else if (c == '+') bs->push_back((unsigned char)045,6); - else if (c == '-') bs->push_back((unsigned char)046,6); - else if (c == '*') bs->push_back((unsigned char)047,6); - else if (c == '/') bs->push_back((unsigned char)050,6); - else if (c == '(') bs->push_back((unsigned char)051,6); - else if (c == ')') bs->push_back((unsigned char)052,6); - else if (c == '$') bs->push_back((unsigned char)053,6); - else if (c == '=') bs->push_back((unsigned char)054,6); - else if (c == ' ') bs->push_back((unsigned char)055,6); - else if (c == ',') bs->push_back((unsigned char)056,6); - else if (c == '.') bs->push_back((unsigned char)057,6); - else if (c == '#') bs->push_back((unsigned char)060,6); - else if (c == '[') bs->push_back((unsigned char)061,6); - else if (c == ']') bs->push_back((unsigned char)062,6); - else if (c == '%') bs->push_back((unsigned char)063,6); - else if (c == '\"') bs->push_back((unsigned char)064,6); - else if (c == '_') bs->push_back((unsigned char)065,6); - else if (c == '!') bs->push_back((unsigned char)066,6); - else if (c == '&') bs->push_back((unsigned char)067,6); - else if (c == '\'') bs->push_back((unsigned char)070,6); - else if (c == '?') bs->push_back((unsigned char)071,6); - else if (c == '<') bs->push_back((unsigned char)072,6); - else if (c == '>') bs->push_back((unsigned char)073,6); - else if (c == '@') bs->push_back((unsigned char)074,6); - else if (c == '\\') bs->push_back((unsigned char)075,6); - else if (c == '^') bs->push_back((unsigned char)076,6); - else if (c == ';') bs->push_back((unsigned char)077,6); - else bs->push_back((unsigned char)071,6); - s++; - } +void MarkerData::Add6bitStr(BitsetExt* bs, char* s) +{ + while (*s) + { + unsigned char c = (unsigned char)*s; + if (c == ':') + bs->push_back((unsigned char)0, 6); + else if ((c >= 'A') && (c <= 'Z')) + bs->push_back((unsigned char)(001 + c - 'A'), 6); + else if ((c >= 'a') && (c <= 'z')) + bs->push_back((unsigned char)(001 + c - 'a'), 6); + else if ((c >= '0') && (c <= '9')) + bs->push_back((unsigned char)(033 + c - '0'), 6); + else if (c == '+') + bs->push_back((unsigned char)045, 6); + else if (c == '-') + bs->push_back((unsigned char)046, 6); + else if (c == '*') + bs->push_back((unsigned char)047, 6); + else if (c == '/') + bs->push_back((unsigned char)050, 6); + else if (c == '(') + bs->push_back((unsigned char)051, 6); + else if (c == ')') + bs->push_back((unsigned char)052, 6); + else if (c == '$') + bs->push_back((unsigned char)053, 6); + else if (c == '=') + bs->push_back((unsigned char)054, 6); + else if (c == ' ') + bs->push_back((unsigned char)055, 6); + else if (c == ',') + bs->push_back((unsigned char)056, 6); + else if (c == '.') + bs->push_back((unsigned char)057, 6); + else if (c == '#') + bs->push_back((unsigned char)060, 6); + else if (c == '[') + bs->push_back((unsigned char)061, 6); + else if (c == ']') + bs->push_back((unsigned char)062, 6); + else if (c == '%') + bs->push_back((unsigned char)063, 6); + else if (c == '\"') + bs->push_back((unsigned char)064, 6); + else if (c == '_') + bs->push_back((unsigned char)065, 6); + else if (c == '!') + bs->push_back((unsigned char)066, 6); + else if (c == '&') + bs->push_back((unsigned char)067, 6); + else if (c == '\'') + bs->push_back((unsigned char)070, 6); + else if (c == '?') + bs->push_back((unsigned char)071, 6); + else if (c == '<') + bs->push_back((unsigned char)072, 6); + else if (c == '>') + bs->push_back((unsigned char)073, 6); + else if (c == '@') + bs->push_back((unsigned char)074, 6); + else if (c == '\\') + bs->push_back((unsigned char)075, 6); + else if (c == '^') + bs->push_back((unsigned char)076, 6); + else if (c == ';') + bs->push_back((unsigned char)077, 6); + else + bs->push_back((unsigned char)071, 6); + s++; + } } -int MarkerData::UsableDataBits(int marker_res, int hamming) { - if (marker_res < 5) return 0; - if (!(marker_res % 2)) return 0; - int bits = marker_res * marker_res; - if (marker_res > 5) bits -= 8; // With larger resolutions we reserve 8 bits for hamming(8,4) encoded 4 flags - bits -= marker_res; // center line indicating the resolution - bits -= 4; // the four pixels indicating the orientation - int tail = bits % hamming; - if (tail < 3) bits -= tail; // hamming can't use tail pixels if there is only 2 or 1 of them - return bits; +int MarkerData::UsableDataBits(int marker_res, int hamming) +{ + if (marker_res < 5) + return 0; + if (!(marker_res % 2)) + return 0; + int bits = marker_res * marker_res; + if (marker_res > 5) + bits -= 8; // With larger resolutions we reserve 8 bits for hamming(8,4) + // encoded 4 flags + bits -= marker_res; // center line indicating the resolution + bits -= 4; // the four pixels indicating the orientation + int tail = bits % hamming; + if (tail < 3) + bits -= + tail; // hamming can't use tail pixels if there is only 2 or 1 of them + return bits; } -void MarkerData::SetContent(MarkerContentType _content_type, unsigned long _id, const char *_str, bool force_strong_hamming, bool verbose) { - // Fill in the content values - content_type = _content_type; - margin_error = 0; - decode_error = 0; - if (content_type == MARKER_CONTENT_TYPE_NUMBER) { - data.id = _id; - } else { - STRCPY(data.str, MAX_MARKER_STRING_LEN, _str); - } - // Encode - const int max_marker_res = 127; - BitsetExt bs_flags(verbose); - BitsetExt bs_data(verbose); - int enc_bits; // How many encoded bits fits in the marker - int data_bits; // How many data bits fit inside the encoded bits - int hamming; // Do we use 8-bit or 16-bit hamming? - if (content_type == MARKER_CONTENT_TYPE_NUMBER) { - bs_data.push_back_meaningful(data.id); - for (res=5; res= bs_data.Length()) break; - if ((res > 5) && !force_strong_hamming) { - hamming = 16; - enc_bits = UsableDataBits(res, hamming); - data_bits = BitsetExt::count_hamming_dec_len(hamming, enc_bits); - if (data_bits >= bs_data.Length()) break; - } - } - bs_data.fill_zeros_left(data_bits); - bs_data.hamming_enc(hamming); - if (verbose) { - cout<<"Using hamming("< 5) { - if (hamming == 16) bs_flags.push_back(true); - else bs_flags.push_back(false); - bs_flags.push_back((unsigned long)0,3); - bs_flags.hamming_enc(8); - if (verbose) { - cout<<"flags src: "; bs_flags.Output(cout); cout<= bs_data.Length()) break; - if (!force_strong_hamming) { - hamming = 16; - enc_bits = UsableDataBits(res, hamming); - data_bits = BitsetExt::count_hamming_dec_len(hamming, enc_bits); - if (data_bits >= bs_data.Length()) break; - } - } - while (bs_data.Length() < ((data_bits/6)*6)) { - bs_data.push_back((unsigned char)055,6); // Add space - } - while (bs_data.Length() < data_bits) { - bs_data.push_back(false); // Add 0 - } - bs_data.hamming_enc(hamming); - if (hamming == 16) bs_flags.push_back(true); - else bs_flags.push_back(false); - if (content_type == MARKER_CONTENT_TYPE_STRING) bs_flags.push_back((unsigned long)1,3); - else if (content_type == MARKER_CONTENT_TYPE_FILE) bs_flags.push_back((unsigned long)2,3); - else if (content_type == MARKER_CONTENT_TYPE_HTTP) bs_flags.push_back((unsigned long)3,3); - bs_flags.hamming_enc(8); - if (verbose) { - cout<<"Using hamming("< bs(bs_flags.GetBits()); - bs.insert(bs.end(), bs_data.GetBits().begin(), bs_data.GetBits().end()); - deque::const_iterator iter = bs.begin(); - SetMarkerSize(edge_length, res, margin); - cvSet(marker_content, cvScalar(255)); - for (int j=0; j= (res/2)-2)) { - cvSet2D(marker_content, j, i, cvScalar(0)); - } else if ((i == res/2) && (j > res/2) && (j <= (res/2)+2)) { - cvSet2D(marker_content, j, i, cvScalar(255)); - } else { - if (iter != bs.end()) { - if (*iter) cvSet2D(marker_content, j, i, cvScalar(0)); - iter++; - } - } - } - } +void MarkerData::SetContent(MarkerContentType _content_type, unsigned long _id, + const char* _str, bool force_strong_hamming, + bool verbose) +{ + // Fill in the content values + content_type = _content_type; + margin_error = 0; + decode_error = 0; + if (content_type == MARKER_CONTENT_TYPE_NUMBER) + { + data.id = _id; + } + else + { + STRCPY(data.str, MAX_MARKER_STRING_LEN, _str); + } + // Encode + const int max_marker_res = 127; + BitsetExt bs_flags(verbose); + BitsetExt bs_data(verbose); + int enc_bits; // How many encoded bits fits in the marker + int data_bits; // How many data bits fit inside the encoded bits + int hamming; // Do we use 8-bit or 16-bit hamming? + if (content_type == MARKER_CONTENT_TYPE_NUMBER) + { + bs_data.push_back_meaningful(data.id); + for (res = 5; res < max_marker_res; res += 2) + { + hamming = 8; + enc_bits = UsableDataBits(res, hamming); + data_bits = BitsetExt::count_hamming_dec_len(hamming, enc_bits); + if (data_bits >= bs_data.Length()) + break; + if ((res > 5) && !force_strong_hamming) + { + hamming = 16; + enc_bits = UsableDataBits(res, hamming); + data_bits = BitsetExt::count_hamming_dec_len(hamming, enc_bits); + if (data_bits >= bs_data.Length()) + break; + } + } + bs_data.fill_zeros_left(data_bits); + bs_data.hamming_enc(hamming); + if (verbose) + { + cout << "Using hamming(" << hamming << ") for " << res << "x" << res + << " marker" << endl; + cout << bs_data.Length() << " bits are filled into " << data_bits; + cout << " bits, and encoded into " << enc_bits << " bits" << endl; + cout << "data src: "; + bs_data.Output(cout); + cout << endl; + cout << "data enc: "; + bs_data.Output(cout); + cout << endl; + } + if (res > 5) + { + if (hamming == 16) + bs_flags.push_back(true); + else + bs_flags.push_back(false); + bs_flags.push_back((unsigned long)0, 3); + bs_flags.hamming_enc(8); + if (verbose) + { + cout << "flags src: "; + bs_flags.Output(cout); + cout << endl; + cout << "flags enc: "; + bs_flags.Output(cout); + cout << endl; + } + } + } + else + { + Add6bitStr(&bs_data, data.str); + for (res = 7; res < max_marker_res; res += 2) + { + hamming = 8; + enc_bits = UsableDataBits(res, hamming); + data_bits = BitsetExt::count_hamming_dec_len(hamming, enc_bits); + if (data_bits >= bs_data.Length()) + break; + if (!force_strong_hamming) + { + hamming = 16; + enc_bits = UsableDataBits(res, hamming); + data_bits = BitsetExt::count_hamming_dec_len(hamming, enc_bits); + if (data_bits >= bs_data.Length()) + break; + } + } + while (bs_data.Length() < ((data_bits / 6) * 6)) + { + bs_data.push_back((unsigned char)055, 6); // Add space + } + while (bs_data.Length() < data_bits) + { + bs_data.push_back(false); // Add 0 + } + bs_data.hamming_enc(hamming); + if (hamming == 16) + bs_flags.push_back(true); + else + bs_flags.push_back(false); + if (content_type == MARKER_CONTENT_TYPE_STRING) + bs_flags.push_back((unsigned long)1, 3); + else if (content_type == MARKER_CONTENT_TYPE_FILE) + bs_flags.push_back((unsigned long)2, 3); + else if (content_type == MARKER_CONTENT_TYPE_HTTP) + bs_flags.push_back((unsigned long)3, 3); + bs_flags.hamming_enc(8); + if (verbose) + { + cout << "Using hamming(" << hamming << ") for " << res << "x" << res + << " marker" << endl; + cout << bs_data.Length() << " bits are filled into " << data_bits; + cout << " bits, and encoded into " << enc_bits << " bits" << endl; + cout << "data src: "; + bs_data.Output(cout); + cout << endl; + cout << "data enc: "; + bs_data.Output(cout); + cout << endl; + cout << "flags src: "; + bs_flags.Output(cout); + cout << endl; + cout << "flags enc: "; + bs_flags.Output(cout); + cout << endl; + } + } + + // Fill in the marker content + deque bs(bs_flags.GetBits()); + bs.insert(bs.end(), bs_data.GetBits().begin(), bs_data.GetBits().end()); + deque::const_iterator iter = bs.begin(); + SetMarkerSize(edge_length, res, margin); + marker_content = cv::Scalar(255); + for (int j = 0; j < res; j++) + { + for (int i = 0; i < res; i++) + { + if (j == res / 2) + { + if (i % 2) + marker_content.at(j, i) = 0; + } + else if ((i == res / 2) && (j < res / 2) && (j >= (res / 2) - 2)) + { + marker_content.at(j, i) = 0; + } + else if ((i == res / 2) && (j > res / 2) && (j <= (res / 2) + 2)) + { + marker_content.at(j, i) = 255; + } + else + { + if (iter != bs.end()) + { + if (*iter) + marker_content.at(j, i) = 0; + iter++; + } + } + } + } } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/MarkerDetector.cpp b/ar_track_alvar/src/MarkerDetector.cpp index 0052812..4174c8f 100644 --- a/ar_track_alvar/src/MarkerDetector.cpp +++ b/ar_track_alvar/src/MarkerDetector.cpp @@ -29,184 +29,219 @@ template class ALVAR_EXPORT alvar::MarkerDetector; using namespace std; -namespace alvar { - MarkerDetectorImpl::MarkerDetectorImpl() { - SetMarkerSize(); - SetOptions(); - labeling = NULL; - } - - MarkerDetectorImpl::~MarkerDetectorImpl() { - if (labeling) delete labeling; - } - - void MarkerDetectorImpl::TrackMarkersReset() { - _track_markers_clear(); - } - - void MarkerDetectorImpl::TrackMarkerAdd(int id, PointDouble corners[4]) { - Marker *mn = new_M(edge_length, res, margin); - if (map_edge_length.find(id) != map_edge_length.end()) { - mn->SetMarkerSize(map_edge_length[id], res, margin); - } - - mn->SetId(id); - mn->marker_corners_img.clear(); - mn->marker_corners_img.push_back(corners[0]); - mn->marker_corners_img.push_back(corners[1]); - mn->marker_corners_img.push_back(corners[2]); - mn->marker_corners_img.push_back(corners[3]); - _track_markers_push_back(mn); - delete mn; - } +namespace alvar +{ +MarkerDetectorImpl::MarkerDetectorImpl() +{ + SetMarkerSize(); + SetOptions(); + labeling = NULL; +} + +MarkerDetectorImpl::~MarkerDetectorImpl() +{ + if (labeling) + delete labeling; +} - void MarkerDetectorImpl::SetMarkerSize(double _edge_length, int _res, double _margin) { - edge_length = _edge_length; - res = _res; - margin = _margin; - map_edge_length.clear(); // TODO: Should we clear these here? +void MarkerDetectorImpl::TrackMarkersReset() +{ + _track_markers_clear(); +} + +void MarkerDetectorImpl::TrackMarkerAdd(int id, PointDouble corners[4]) +{ + Marker* mn = new_M(edge_length, res, margin); + if (map_edge_length.find(id) != map_edge_length.end()) + { + mn->SetMarkerSize(map_edge_length[id], res, margin); } - void MarkerDetectorImpl::SetMarkerSizeForId(unsigned long id, double _edge_length) { - map_edge_length[id] = _edge_length; - } - - void MarkerDetectorImpl::SetOptions(bool _detect_pose_grayscale) { - detect_pose_grayscale = _detect_pose_grayscale; - } - - int MarkerDetectorImpl::Detect(cv::Mat *image, - Camera *cam, - bool track, - bool visualize, - double max_new_marker_error, - double max_track_error, - LabelingMethod labeling_method, - bool update_pose) - { - //assert(image->origin == 0); // Currently only top-left origin supported - double error=-1; - - // Swap marker tables - _swap_marker_tables(); - _markers_clear(); - - switch(labeling_method) - { - case CVSEQ : - - if(!labeling) - labeling = new LabelingCvSeq(); - ((LabelingCvSeq*)labeling)->SetOptions(detect_pose_grayscale); - break; - } - - labeling->SetCamera(cam); - labeling->LabelSquares(*image, visualize); - vector >& blob_corners = labeling->blob_corners; - - cv::Mat temp = labeling->gray; - - cv::Mat* gray = &temp; - - int orientation; - - // When tracking we find the best matching blob and test if it is near enough? - if (track) { - for (size_t ii=0; ii<_track_markers_size(); ii++) { - Marker *mn = _track_markers_at(ii); - if (mn->GetError(Marker::DECODE_ERROR|Marker::MARGIN_ERROR) > 0) continue; // We track only perfectly decoded markers - int track_i=-1; - int track_orientation=0; - double track_error=1e200; - for(unsigned i = 0; i < blob_corners.size()/*blobs_ret.size()*/; ++i) { - if (blob_corners[i].empty()) continue; - mn->CompareCorners(blob_corners[i], &orientation, &error); - if (error < track_error) { - track_i = i; - track_orientation = orientation; - track_error = error; - } - } - if (track_error <= max_track_error) { - mn->SetError(Marker::DECODE_ERROR, 0); - mn->SetError(Marker::MARGIN_ERROR, 0); - mn->SetError(Marker::TRACK_ERROR, track_error); - mn->UpdateContent(blob_corners[track_i], gray, cam); //Maybe should only do this when kinect is being used? Don't think it hurts anything... - mn->UpdatePose(blob_corners[track_i], cam, track_orientation, update_pose); - _markers_push_back(mn); - blob_corners[track_i].clear(); // We don't want to handle this again... - if (visualize) mn->Visualize(image, cam, CV_RGB(255,255,0)); - } - } - } - - // Now we go through the rest of the blobs -- in case there are new markers... - for(size_t i = 0; i < blob_corners.size(); ++i) - { - if (blob_corners[i].empty()) continue; - - Marker *mn = new_M(edge_length, res, margin); - bool ub = mn->UpdateContent(blob_corners[i], gray, cam); - bool db = mn->DecodeContent(&orientation); - if (ub && db && - (mn->GetError(Marker::MARGIN_ERROR | Marker::DECODE_ERROR) <= max_new_marker_error)) - { - if (map_edge_length.find(mn->GetId()) != map_edge_length.end()) { - mn->SetMarkerSize(map_edge_length[mn->GetId()], res, margin); - } - mn->UpdatePose(blob_corners[i], cam, orientation, update_pose); - mn->ros_orientation = orientation; - _markers_push_back(mn); + mn->SetId(id); + mn->marker_corners_img.clear(); + mn->marker_corners_img.push_back(corners[0]); + mn->marker_corners_img.push_back(corners[1]); + mn->marker_corners_img.push_back(corners[2]); + mn->marker_corners_img.push_back(corners[3]); + _track_markers_push_back(mn); + delete mn; +} + +void MarkerDetectorImpl::SetMarkerSize(double _edge_length, int _res, + double _margin) +{ + edge_length = _edge_length; + res = _res; + margin = _margin; + map_edge_length.clear(); // TODO: Should we clear these here? +} + +void MarkerDetectorImpl::SetMarkerSizeForId(unsigned long id, + double _edge_length) +{ + map_edge_length[id] = _edge_length; +} + +void MarkerDetectorImpl::SetOptions(bool _detect_pose_grayscale) +{ + detect_pose_grayscale = _detect_pose_grayscale; +} + +int MarkerDetectorImpl::Detect(cv::Mat& image, Camera* cam, bool track, + bool visualize, double max_new_marker_error, + double max_track_error, + LabelingMethod labeling_method, bool update_pose) +{ + double error = -1; - if (visualize) mn->Visualize(image, cam, CV_RGB(255,0,0)); - } - - delete mn; - } - - return (int) _markers_size(); - } - - int MarkerDetectorImpl::DetectAdditional(cv::Mat *image, Camera *cam, bool visualize, double max_track_error) - { - //assert(image->origin == 0); // Currently only top-left origin supported - if(!labeling) return -1; - double error=-1; - int orientation; - int count=0; - vector >& blob_corners = labeling->blob_corners; - - for (size_t ii=0; ii<_track_markers_size(); ii++) { - Marker *mn = _track_markers_at(ii); - if (mn->GetError(Marker::DECODE_ERROR|Marker::MARGIN_ERROR) > 0) continue; // We track only perfectly decoded markers - int track_i=-1; - int track_orientation=0; - double track_error=1e200; - for(unsigned i = 0; i < blob_corners.size(); ++i) { - if (blob_corners[i].empty()) continue; - mn->CompareCorners(blob_corners[i], &orientation, &error); - if (error < track_error) { - track_i = i; - track_orientation = orientation; - track_error = error; - } - } - if (track_error <= max_track_error) { - mn->SetError(Marker::DECODE_ERROR, 0); - mn->SetError(Marker::MARGIN_ERROR, 0); - mn->SetError(Marker::TRACK_ERROR, track_error); - mn->UpdatePose(blob_corners[track_i], cam, track_orientation); - _markers_push_back(mn); - count++; - blob_corners[track_i].clear(); // We don't want to handle this again... - - if (visualize) { - mn->Visualize(image, cam, CV_RGB(0,255,255)); - } - } - } - return count; - } + // Swap marker tables + _swap_marker_tables(); + _markers_clear(); + + + switch (labeling_method) + { + case CVSEQ: + + if (!labeling) + labeling = new LabelingCvSeq(); + ((LabelingCvSeq*)labeling)->SetOptions(detect_pose_grayscale); + break; + } + + labeling->SetCamera(cam); + labeling->LabelSquares(image, visualize); + vector >& blob_corners = labeling->blob_corners; + cv::Mat gray = labeling->gray; + + int orientation; + + // When tracking we find the best matching blob and test if it is near enough? + if (track) + { + for (size_t ii = 0; ii < _track_markers_size(); ii++) + { + + Marker* mn = _track_markers_at(ii); + if (mn->GetError(Marker::DECODE_ERROR | Marker::MARGIN_ERROR) > 0) + continue; // We track only perfectly decoded markers + int track_i = -1; + int track_orientation = 0; + double track_error = 1e200; + for (unsigned i = 0; i < blob_corners.size() /*blobs_ret.size()*/; ++i) + { + if (blob_corners[i].empty()) + continue; + mn->CompareCorners(blob_corners[i], &orientation, &error); + if (error < track_error) + { + track_i = i; + track_orientation = orientation; + track_error = error; + } + } + if (track_error <= max_track_error) + { + mn->SetError(Marker::DECODE_ERROR, 0); + mn->SetError(Marker::MARGIN_ERROR, 0); + mn->SetError(Marker::TRACK_ERROR, track_error); + mn->UpdateContent(blob_corners[track_i], gray, + cam); // Maybe should only do this when kinect is + // being used? Don't think it hurts anything... + mn->UpdatePose(blob_corners[track_i], cam, track_orientation, + update_pose); + _markers_push_back(mn); + blob_corners[track_i].clear(); // We don't want to handle this again... + if (visualize) + mn->Visualize(image, cam, CV_RGB(255, 255, 0)); + } + } + } + + + // Now we go through the rest of the blobs -- in case there are new markers... + for (size_t i = 0; i < blob_corners.size(); ++i) + { + + if (blob_corners[i].empty()) + { + continue; + } + + Marker* mn = new_M(edge_length, res, margin); + bool ub = mn->UpdateContent(blob_corners[i], gray, cam); + bool db = mn->DecodeContent(&orientation); + if (ub && db && + (mn->GetError(Marker::MARGIN_ERROR | Marker::DECODE_ERROR) <= + max_new_marker_error)) + { + if (map_edge_length.find(mn->GetId()) != map_edge_length.end()) + { + mn->SetMarkerSize(map_edge_length[mn->GetId()], res, margin); + } + mn->UpdatePose(blob_corners[i], cam, orientation, update_pose); + mn->ros_orientation = orientation; + _markers_push_back(mn); + + if (visualize) + mn->Visualize(image, cam, CV_RGB(255, 0, 0)); + } + delete mn; + } + + int val = (int)_markers_size(); + + return val; +} + +int MarkerDetectorImpl::DetectAdditional(cv::Mat& image, Camera* cam, + bool visualize, double max_track_error) +{ + if (!labeling) + return -1; + double error = -1; + int orientation; + int count = 0; + vector >& blob_corners = labeling->blob_corners; + + for (size_t ii = 0; ii < _track_markers_size(); ii++) + { + Marker* mn = _track_markers_at(ii); + if (mn->GetError(Marker::DECODE_ERROR | Marker::MARGIN_ERROR) > 0) + continue; // We track only perfectly decoded markers + int track_i = -1; + int track_orientation = 0; + double track_error = 1e200; + for (unsigned i = 0; i < blob_corners.size(); ++i) + { + if (blob_corners[i].empty()) + continue; + mn->CompareCorners(blob_corners[i], &orientation, &error); + if (error < track_error) + { + track_i = i; + track_orientation = orientation; + track_error = error; + } + } + if (track_error <= max_track_error) + { + mn->SetError(Marker::DECODE_ERROR, 0); + mn->SetError(Marker::MARGIN_ERROR, 0); + mn->SetError(Marker::TRACK_ERROR, track_error); + mn->UpdatePose(blob_corners[track_i], cam, track_orientation); + _markers_push_back(mn); + count++; + blob_corners[track_i].clear(); // We don't want to handle this again... + + if (visualize) + { + mn->Visualize(image, cam, CV_RGB(0, 255, 255)); + } + } + } + return count; } + +} // namespace alvar diff --git a/ar_track_alvar/src/MultiMarker.cpp b/ar_track_alvar/src/MultiMarker.cpp index dbd275f..ca9a1c1 100644 --- a/ar_track_alvar/src/MultiMarker.cpp +++ b/ar_track_alvar/src/MultiMarker.cpp @@ -28,356 +28,409 @@ using namespace std; -namespace alvar { +namespace alvar +{ using namespace std; -int MultiMarker::pointcloud_index(int marker_id, int marker_corner, bool add_if_missing /*=false*/) { - return (get_id_index(marker_id ,add_if_missing)*4)+marker_corner; +int MultiMarker::pointcloud_index(int marker_id, int marker_corner, + bool add_if_missing /*=false*/) +{ + return (get_id_index(marker_id, add_if_missing) * 4) + marker_corner; } int MultiMarker::get_id_index(int id, bool add_if_missing /*=false*/) { - for(size_t i = 0; i < marker_indices.size(); ++i) { - if(marker_indices.at(i) == id) - return (int) i; - } - if (!add_if_missing) return -1; - marker_indices.push_back(id); - marker_status.push_back(0); - return (marker_indices.size()-1); + for (size_t i = 0; i < marker_indices.size(); ++i) + { + if (marker_indices.at(i) == id) + return (int)i; + } + if (!add_if_missing) + return -1; + marker_indices.push_back(id); + marker_status.push_back(0); + return (marker_indices.size() - 1); } void MultiMarker::Reset() { - fill(marker_status.begin(), marker_status.end(), 0); - pointcloud.clear(); + fill(marker_status.begin(), marker_status.end(), 0); + pointcloud.clear(); } -bool MultiMarker::SaveXML(const char* fname) { - tinyxml2::XMLDocument document; - tinyxml2::XMLDeclaration* declaration=document.NewDeclaration(); - document.InsertFirstChild(declaration); - tinyxml2::XMLElement * temp; - temp->SetName("multimarker"); - document.LinkEndChild(temp); - tinyxml2::XMLElement *xml_root = document.RootElement(); - - int n_markers = marker_indices.size(); - xml_root->SetAttribute("markers", n_markers); - - for(int i = 0; i < n_markers; ++i) { - tinyxml2::XMLElement *xml_marker; - xml_marker->SetName("marker"); - - - - xml_root->LinkEndChild(xml_marker); - - xml_marker->SetAttribute("index", marker_indices[i]); - xml_marker->SetAttribute("status", marker_status[i]); - - for(int j = 0; j < 4; ++j) { - tinyxml2::XMLElement *xml_corner; - xml_corner->SetName("corner"); - xml_marker->LinkEndChild(xml_corner); - cv::Point3f X = pointcloud[pointcloud_index(marker_indices[i], j)]; - xml_corner->DoubleAttribute("x", X.x); - xml_corner->DoubleAttribute("y", X.y); - xml_corner->DoubleAttribute("z", X.z); - } - } - return document.SaveFile(fname); +bool MultiMarker::SaveXML(const char* fname) +{ + TiXmlDocument document; + document.LinkEndChild(new TiXmlDeclaration("1.0", "UTF-8", "no")); + document.LinkEndChild(new TiXmlElement("multimarker")); + TiXmlElement* xml_root = document.RootElement(); + + int n_markers = marker_indices.size(); + xml_root->SetAttribute("markers", n_markers); + + for (int i = 0; i < n_markers; ++i) + { + TiXmlElement* xml_marker = new TiXmlElement("marker"); + xml_root->LinkEndChild(xml_marker); + + xml_marker->SetAttribute("index", marker_indices[i]); + xml_marker->SetAttribute("status", marker_status[i]); + + for (int j = 0; j < 4; ++j) + { + TiXmlElement* xml_corner = new TiXmlElement("corner"); + xml_marker->LinkEndChild(xml_corner); + cv::Point3d X = pointcloud[pointcloud_index(marker_indices[i], j)]; + xml_corner->SetDoubleAttribute("x", X.x); + xml_corner->SetDoubleAttribute("y", X.y); + xml_corner->SetDoubleAttribute("z", X.z); + } + } + return document.SaveFile(fname); } -bool MultiMarker::SaveText(const char* fname) { - size_t n_markers = marker_indices.size(); - - fstream file_op(fname, ios::out); +bool MultiMarker::SaveText(const char* fname) +{ + size_t n_markers = marker_indices.size(); - file_op<QueryIntAttribute("markers", &n_markers) != EXIT_SUCCESS) return false; - - pointcloud.clear(); - marker_indices.resize(n_markers); - marker_status.resize(n_markers); - - tinyxml2::XMLElement *xml_marker = xml_root->FirstChildElement("marker"); - for(int i = 0; i < n_markers; ++i) { - if (!xml_marker) return false; - - int index, status; - if (xml_marker->QueryIntAttribute("index", &index) != EXIT_SUCCESS) return false; - if (xml_marker->QueryIntAttribute("status", &status) != EXIT_SUCCESS) return false; - marker_indices[i] = index; - marker_status[i] = status; - if(i==0) master_id = index; - - tinyxml2::XMLElement *xml_corner = xml_marker->FirstChildElement("corner"); - for(int j = 0; j < 4; ++j) { - if (!xml_corner) return false; - - cv::Point3f X; - if (xml_corner->QueryDoubleAttribute("x", (double *)&X.x) != EXIT_SUCCESS) return false; - if (xml_corner->QueryDoubleAttribute("y", (double *)&X.y) != EXIT_SUCCESS) return false; - if (xml_corner->QueryDoubleAttribute("z", (double *)&X.z) != EXIT_SUCCESS) return false; - pointcloud[pointcloud_index(marker_indices[i], j)] = X; - - xml_corner = (tinyxml2::XMLElement*)xml_corner->NextSiblingElement("corner"); - } - - xml_marker = (tinyxml2::XMLElement*)xml_marker->NextSiblingElement("marker"); - } - return true; +bool MultiMarker::Save(const char* fname, FILE_FORMAT format) +{ + switch (format) + { + case FILE_FORMAT_XML: + return SaveXML(fname); + case FILE_FORMAT_TEXT: + case FILE_FORMAT_DEFAULT: + return SaveText(fname); + default: + return false; + } } -bool MultiMarker::LoadText(const char* fname) { - fstream file_op(fname, ios::in); - - if (!file_op) { +bool MultiMarker::LoadXML(const char* fname) +{ + TiXmlDocument document; + if (!document.LoadFile(fname)) + return false; + TiXmlElement* xml_root = document.RootElement(); + + int n_markers; + if (xml_root->QueryIntAttribute("markers", &n_markers) != TIXML_SUCCESS) + return false; + + pointcloud.clear(); + marker_indices.resize(n_markers); + marker_status.resize(n_markers); + + TiXmlElement* xml_marker = xml_root->FirstChildElement("marker"); + for (int i = 0; i < n_markers; ++i) + { + if (!xml_marker) + return false; + + int index, status; + if (xml_marker->QueryIntAttribute("index", &index) != TIXML_SUCCESS) + return false; + if (xml_marker->QueryIntAttribute("status", &status) != TIXML_SUCCESS) + return false; + marker_indices[i] = index; + marker_status[i] = status; + if (i == 0) + master_id = index; + + TiXmlElement* xml_corner = xml_marker->FirstChildElement("corner"); + for (int j = 0; j < 4; ++j) + { + if (!xml_corner) return false; - } - - size_t n_markers; - file_op>>n_markers; - pointcloud.clear(); - marker_indices.resize(n_markers); - marker_status.resize(n_markers); + cv::Point3d X; + if (xml_corner->QueryDoubleAttribute("x", &X.x) != TIXML_SUCCESS) + return false; + if (xml_corner->QueryDoubleAttribute("y", &X.y) != TIXML_SUCCESS) + return false; + if (xml_corner->QueryDoubleAttribute("z", &X.z) != TIXML_SUCCESS) + return false; + pointcloud[pointcloud_index(marker_indices[i], j)] = X; - for(size_t i = 0; i < n_markers; ++i){ - file_op>>marker_indices[i]; - } + xml_corner = (TiXmlElement*)xml_corner->NextSibling("corner"); + } - for(size_t i = 0; i < n_markers; ++i){ - file_op>>marker_status[i]; - } + xml_marker = (TiXmlElement*)xml_marker->NextSibling("marker"); + } + return true; +} - for(size_t i = 0; i < n_markers; ++i) - for(size_t j = 0; j < 4; ++j) - { - cv::Point3f X; - file_op>>X.x; - file_op>>X.y; - file_op>>X.z; - pointcloud[pointcloud_index(marker_indices[i], j)] = X; - } +bool MultiMarker::LoadText(const char* fname) +{ + fstream file_op(fname, ios::in); + + if (!file_op) + { + return false; + } + + size_t n_markers; + file_op >> n_markers; + + pointcloud.clear(); + marker_indices.resize(n_markers); + marker_status.resize(n_markers); + + for (size_t i = 0; i < n_markers; ++i) + { + file_op >> marker_indices[i]; + } + + for (size_t i = 0; i < n_markers; ++i) + { + file_op >> marker_status[i]; + } + + for (size_t i = 0; i < n_markers; ++i) + for (size_t j = 0; j < 4; ++j) + { + cv::Point3d X; + file_op >> X.x; + file_op >> X.y; + file_op >> X.z; + pointcloud[pointcloud_index(marker_indices[i], j)] = X; + } - file_op.close(); + file_op.close(); - return true; + return true; } -bool MultiMarker::Load(const char* fname, FILE_FORMAT format) { - switch (format) { - case FILE_FORMAT_XML: - return LoadXML(fname); - case FILE_FORMAT_TEXT: - case FILE_FORMAT_DEFAULT: - return LoadText(fname); - default: - return false; - } +bool MultiMarker::Load(const char* fname, FILE_FORMAT format) +{ + switch (format) + { + case FILE_FORMAT_XML: + return LoadXML(fname); + case FILE_FORMAT_TEXT: + case FILE_FORMAT_DEFAULT: + return LoadText(fname); + default: + return false; + } } MultiMarker::MultiMarker(vector& indices) { - marker_indices.resize(indices.size()); - copy(indices.begin(), indices.end(), marker_indices.begin()); - marker_status.resize(indices.size()); - fill(marker_status.begin(), marker_status.end(), 0); + marker_indices.resize(indices.size()); + copy(indices.begin(), indices.end(), marker_indices.begin()); + marker_status.resize(indices.size()); + fill(marker_status.begin(), marker_status.end(), 0); } -void MultiMarker::PointCloudReset() { - pointcloud.clear(); +void MultiMarker::PointCloudReset() +{ + pointcloud.clear(); } -void MultiMarker::PointCloudCorners3d(double edge_length, Pose &pose, cv::Point3f corners[4]) { - // Transformation from origin to current marker - CvMat *m3 = cvCreateMat(4,4,CV_64F); cvSetIdentity(m3); - pose.GetMatrix(m3); - - for(size_t j = 0; j < 4; ++j) - { - // TODO: This should be exactly the same as in Marker class. - // Should we get the values from there somehow? - double X_data[4] = {0, 0, 0, 1}; - if (j == 0) { - int zzzz=2; - //X_data[0] = -0.5*edge_length; - //X_data[1] = -0.5*edge_length; - } else if (j == 1) { - X_data[0] = +0.5*edge_length; - X_data[1] = -0.5*edge_length; - } else if (j == 2) { - X_data[0] = +0.5*edge_length; - X_data[1] = +0.5*edge_length; - } else if (j == 3) { - X_data[0] = -0.5*edge_length; - X_data[1] = +0.5*edge_length; - } - - CvMat X = cvMat(4, 1, CV_64F, X_data); - cvMatMul(m3, &X, &X); - - corners[j].x = X_data[0] / X_data[3]; - corners[j].y = X_data[1] / X_data[3]; - corners[j].z = X_data[2] / X_data[3]; - } - cvReleaseMat(&m3); +void MultiMarker::PointCloudCorners3d(double edge_length, Pose& pose, + cv::Point3d corners[4]) +{ + // Transformation from origin to current marker + cv::Mat m3 = cv::Mat::eye(4, 4, CV_64F); + pose.GetMatrix(m3); + + for (size_t j = 0; j < 4; ++j) + { + // TODO: This should be exactly the same as in Marker class. + // Should we get the values from there somehow? + double X_data[4] = { 0, 0, 0, 1 }; + if (j == 0) + { + int zzzz = 2; + // X_data[0] = -0.5*edge_length; + // X_data[1] = -0.5*edge_length; + } + else if (j == 1) + { + X_data[0] = +0.5 * edge_length; + X_data[1] = -0.5 * edge_length; + } + else if (j == 2) + { + X_data[0] = +0.5 * edge_length; + X_data[1] = +0.5 * edge_length; + } + else if (j == 3) + { + X_data[0] = -0.5 * edge_length; + X_data[1] = +0.5 * edge_length; + } + + cv::Mat X = cv::Mat(4, 1, CV_64F, X_data); + X = m3 * X; + + corners[j].x = X_data[0] / X_data[3]; + corners[j].y = X_data[1] / X_data[3]; + corners[j].z = X_data[2] / X_data[3]; + } + m3.release(); } -void MultiMarker::PointCloudAdd(int marker_id, double edge_length, Pose &pose) { - cv::Point3f corners[4]; - PointCloudCorners3d(edge_length, pose, corners); - for(size_t j = 0; j < 4; ++j) { - pointcloud[pointcloud_index(marker_id, j, true)] = corners[j]; - marker_status[get_id_index(marker_id, true)]=1; - } +void MultiMarker::PointCloudAdd(int marker_id, double edge_length, Pose& pose) +{ + cv::Point3d corners[4]; + PointCloudCorners3d(edge_length, pose, corners); + for (size_t j = 0; j < 4; ++j) + { + pointcloud[pointcloud_index(marker_id, j, true)] = corners[j]; + marker_status[get_id_index(marker_id, true)] = 1; + } } -void MultiMarker::PointCloudCopy(const MultiMarker *m) { - pointcloud.clear(); - pointcloud = m->pointcloud; // TODO: Is this copy operation ok? - marker_indices.resize(m->marker_indices.size()); - marker_status.resize(m->marker_status.size()); - copy(m->marker_indices.begin(), m->marker_indices.end(), marker_indices.begin()); - copy(m->marker_status.begin(), m->marker_status.end(), marker_status.begin()); +void MultiMarker::PointCloudCopy(const MultiMarker* m) +{ + pointcloud.clear(); + pointcloud = m->pointcloud; // TODO: Is this copy operation ok? + marker_indices.resize(m->marker_indices.size()); + marker_status.resize(m->marker_status.size()); + copy(m->marker_indices.begin(), m->marker_indices.end(), + marker_indices.begin()); + copy(m->marker_status.begin(), m->marker_status.end(), marker_status.begin()); } -void MultiMarker::PointCloudGet(int marker_id, int point, - double &x, double &y, double &z) { - cv::Point3f p3d = pointcloud[pointcloud_index(marker_id, point)]; +void MultiMarker::PointCloudGet(int marker_id, int point, double& x, double& y, + double& z) +{ + cv::Point3d p3d = pointcloud[pointcloud_index(marker_id, point)]; x = p3d.x; y = p3d.y; z = p3d.z; } -bool MultiMarker::IsValidMarker(int marker_id) { - int idx = get_id_index(marker_id); - return idx != -1 && marker_status[idx] != 0; +bool MultiMarker::IsValidMarker(int marker_id) +{ + int idx = get_id_index(marker_id); + return idx != -1 && marker_status[idx] != 0; } - -double MultiMarker::_GetPose(MarkerIterator &begin, MarkerIterator &end, Camera* cam, Pose& pose, cv::Mat* image) { - vector world_points; - vector image_points; - - // Reset the marker_status to 1 for all markers in point_cloud - for (size_t i=0; i 0) marker_status[i]=1; - } - - // For every detected marker - for (MarkerIterator &i = begin.reset(); i != end; ++i) - { - const Marker* marker = *i; - int id = marker->GetId(); - int index = get_id_index(id); - if (index < 0) continue; - - // But only if we have corresponding points in the pointcloud - if (marker_status[index] > 0) { - for(size_t j = 0; j < marker->marker_corners.size(); ++j) - { - cv::Point3f Xnew = pointcloud[pointcloud_index(id, (int)j)]; - world_points.push_back(Xnew); - image_points.push_back(marker->marker_corners_img.at(j)); - if (image) cv::circle(*image, cvPoint(int(marker->marker_corners_img[j].x), int(marker->marker_corners_img[j].y)), 3, cv::Scalar(0,255,0)); - } - marker_status[index] = 2; // Used for tracking - } - } - - if (world_points.size() < 4) return -1; - - double rod[3], tra[3]; - CvMat rot_mat = cvMat(3, 1,CV_64F, rod); - CvMat tra_mat = cvMat(3, 1,CV_64F, tra); - double error=0; // TODO: Now we don't calculate any error value - cam->CalcExteriorOrientation(world_points, image_points, &rot_mat, &tra_mat); - pose.SetRodriques(&rot_mat); - pose.SetTranslation(&tra_mat); - return error; +double MultiMarker::_GetPose(MarkerIterator& begin, MarkerIterator& end, + Camera* cam, Pose& pose, cv::Mat& image) +{ + vector world_points; + vector image_points; + + // Reset the marker_status to 1 for all markers in point_cloud + for (size_t i = 0; i < marker_status.size(); i++) + { + if (marker_status[i] > 0) + marker_status[i] = 1; + } + + // For every detected marker + for (MarkerIterator& i = begin.reset(); i != end; ++i) + { + const Marker* marker = *i; + int id = marker->GetId(); + int index = get_id_index(id); + if (index < 0) + continue; + + // But only if we have corresponding points in the pointcloud + if (marker_status[index] > 0) + { + for (size_t j = 0; j < marker->marker_corners.size(); ++j) + { + world_points.push_back(pointcloud[pointcloud_index(id, (int)j)]); + image_points.push_back(marker->marker_corners_img.at(j)); + if (!image.empty()) + cv::circle(image, + cv::Point(int(marker->marker_corners_img[j].x), + int(marker->marker_corners_img[j].y)), + 3, CV_RGB(0, 255, 0)); + } + marker_status[index] = 2; // Used for tracking + } + } + + if (world_points.size() < 4) + return -1; + + cv::Mat rot_mat = cv::Mat(3, 1, CV_64F); + cv::Mat tra_mat = cv::Mat(3, 1, CV_64F); + double error = 0; // TODO: Now we don't calculate any error value + cam->CalcExteriorOrientation(world_points, image_points, rot_mat, tra_mat); + pose.SetRodriques(rot_mat); + pose.SetTranslation(tra_mat); + return error; } - -int MultiMarker::_SetTrackMarkers(MarkerDetectorImpl &marker_detector, Camera* cam, Pose& pose, cv::Mat *image) { - int count=0; - marker_detector.TrackMarkersReset(); - for(size_t i = 0; i < marker_indices.size(); ++i) { - int id = marker_indices[i]; - // If the marker wasn't tracked lets add it to be trackable - if (marker_status[i] == 1) { - vector pw(4); - pw[0] = pointcloud[pointcloud_index(id, 0)]; - pw[1] = pointcloud[pointcloud_index(id, 1)]; - pw[2] = pointcloud[pointcloud_index(id, 2)]; - pw[3] = pointcloud[pointcloud_index(id, 3)]; - vector pi(4); - cam->ProjectPoints(pw, &pose, pi); - PointDouble p[4]; // TODO: This type copying is so silly!!! - p[0].x = pi[0].x; - p[0].y = pi[0].y; - p[1].x = pi[1].x; - p[1].y = pi[1].y; - p[2].x = pi[2].x; - p[2].y = pi[2].y; - p[3].x = pi[3].x; - p[3].y = pi[3].y; - if (image) { - cv::line(*image, cvPoint(int(p[0].x), int(p[0].y)), cvPoint(int(p[1].x), int(p[1].y)), cv::Scalar(255,0,0)); - cv::line(*image, cvPoint(int(p[1].x), int(p[1].y)), cvPoint(int(p[2].x), int(p[2].y)), cv::Scalar(255,0,0)); - cv::line(*image, cvPoint(int(p[2].x), int(p[2].y)), cvPoint(int(p[3].x), int(p[3].y)), cv::Scalar(255,0,0)); - cv::line(*image, cvPoint(int(p[3].x), int(p[3].y)), cvPoint(int(p[0].x), int(p[0].y)), cv::Scalar(255,0,0)); - } - marker_detector.TrackMarkerAdd(id, p); - count++; - } - } - return count; +int MultiMarker::_SetTrackMarkers(MarkerDetectorImpl& marker_detector, + Camera* cam, Pose& pose, cv::Mat& image) +{ + int count = 0; + marker_detector.TrackMarkersReset(); + for (size_t i = 0; i < marker_indices.size(); ++i) + { + int id = marker_indices[i]; + // If the marker wasn't tracked lets add it to be trackable + if (marker_status[i] == 1) + { + vector pw(4); + pw[0] = pointcloud[pointcloud_index(id, 0)]; + pw[1] = pointcloud[pointcloud_index(id, 1)]; + pw[2] = pointcloud[pointcloud_index(id, 2)]; + pw[3] = pointcloud[pointcloud_index(id, 3)]; + vector pi(4); + cam->ProjectPoints(pw, &pose, pi); + PointDouble p[4]; // TODO: This type copying is so silly!!! + p[0].x = pi[0].x; + p[0].y = pi[0].y; + p[1].x = pi[1].x; + p[1].y = pi[1].y; + p[2].x = pi[2].x; + p[2].y = pi[2].y; + p[3].x = pi[3].x; + p[3].y = pi[3].y; + if (!image.empty()) + { + cv::line(image, cv::Point(int(p[0].x), int(p[0].y)), + cv::Point(int(p[1].x), int(p[1].y)), CV_RGB(255, 0, 0)); + cv::line(image, cv::Point(int(p[1].x), int(p[1].y)), + cv::Point(int(p[2].x), int(p[2].y)), CV_RGB(255, 0, 0)); + cv::line(image, cv::Point(int(p[2].x), int(p[2].y)), + cv::Point(int(p[3].x), int(p[3].y)), CV_RGB(255, 0, 0)); + cv::line(image, cv::Point(int(p[3].x), int(p[3].y)), + cv::Point(int(p[0].x), int(p[0].y)), CV_RGB(255, 0, 0)); + } + marker_detector.TrackMarkerAdd(id, p); + count++; + } + } + return count; } -} +} // namespace alvar diff --git a/ar_track_alvar/src/MultiMarkerBundle.cpp b/ar_track_alvar/src/MultiMarkerBundle.cpp index 7e9103b..2e5f563 100644 --- a/ar_track_alvar/src/MultiMarkerBundle.cpp +++ b/ar_track_alvar/src/MultiMarkerBundle.cpp @@ -22,250 +22,291 @@ */ #include "ar_track_alvar/MultiMarkerBundle.h" +#include using namespace std; -namespace alvar { +namespace alvar +{ using namespace std; -MultiMarkerBundle::MultiMarkerBundle(std::vector& indices) - : MultiMarker(indices) +MultiMarkerBundle::MultiMarkerBundle(std::vector& indices) + : MultiMarker(indices) { - MeasurementsReset(); + MeasurementsReset(); } MultiMarkerBundle::~MultiMarkerBundle() { } -void MultiMarkerBundle::MeasurementsReset() { - optimization_error=-1; - optimization_keyframes=0; - optimization_markers=0; - optimizing=false; - camera_poses.clear(); - measurements.clear(); +void MultiMarkerBundle::MeasurementsReset() +{ + optimization_error = -1; + optimization_keyframes = 0; + optimization_markers = 0; + optimizing = false; + camera_poses.clear(); + measurements.clear(); } -int n_images; // TODO: This should not be global (use the param instead) -int n_markers; // TODO: This should not be global (use the param instead) -Camera *camera; // TODO: This should not be global (use the param instead) +int n_images; // TODO: This should not be global (use the param instead) +int n_markers; // TODO: This should not be global (use the param instead) +Camera* camera; // TODO: This should not be global (use the param instead) -void Est(CvMat* state, CvMat* estimation, void *param) +void Est(cv::Mat& state, cv::Mat& estimation, void* param) { + // State: cam1, cam2, cam3, cam4, ..., X1(x,y,z), X2, X3, ... + // Estimation: (u11,v11), (u) - // State: cam1, cam2, cam3, cam4, ..., X1(x,y,z), X2, X3, ... - // Estimation: (u11,v11), (u) - - // For every image observation (camera)... - for(int i = 0; i < n_images; ++i) - { - // Get camera from state - Pose p; p.SetQuaternion(&(state->data.db[i*7+3])); + // For every image observation (camera)... + for (int i = 0; i < n_images; ++i) + { + // Get camera from state + Pose p; + p.SetQuaternion(&state.at(i * 7 + 3)); - double tra[3]; - double rodr[3]; - CvMat mat_translation_vector = cvMat(3, 1, CV_64F, tra); - CvMat mat_rotation_vector = cvMat(3, 1, CV_64F, rodr); + double tra[3]; + double rodr[3]; + cv::Mat mat_translation_vector = cv::Mat(3, 1, CV_64F, tra); + cv::Mat mat_rotation_vector = cv::Mat(3, 1, CV_64F, rodr); - memcpy(tra, &(state->data.db[i*7]), 3*sizeof(double)); - p.GetRodriques(&mat_rotation_vector); + memcpy(tra, &state.at(i * 7), 3 * sizeof(double)); + p.GetRodriques(mat_rotation_vector); - // For every point in marker field - int n_points = n_markers*4; - for(int j = 0; j < n_points; ++j) - { - int index = n_images*7 + 3*j; + // For every point in marker field + int n_points = n_markers * 4; + for (int j = 0; j < n_points; ++j) + { + int index = n_images * 7 + 3 * j; - double object_points[3] = {state->data.db[index+0], - state->data.db[index+1], - state->data.db[index+2]}; + double object_points[3] = { state.at(index + 0), + state.at(index + 1), + state.at(index + 2) }; + cv::Mat mat_object_points; + mat_object_points = cv::Mat(1, 1, CV_64FC3, object_points); - CvMat mat_object_points; - cvInitMatHeader(&mat_object_points, 1, 1, CV_64FC3, object_points); + double proj[2] = { 0 }; + cv::Mat mat_proj = cv::Mat(1, 1, CV_64FC2, proj); - double proj[2]={0}; - CvMat mat_proj = cvMat(1, 1, CV_64FC2, proj); + cv::projectPoints(mat_object_points, mat_rotation_vector, + mat_translation_vector, camera->calib_K, + camera->calib_D, mat_proj); - cv::projectPoints(cv::cvarrToMat(&mat_object_points), cv::cvarrToMat(&mat_rotation_vector),cv::cvarrToMat(&mat_translation_vector), cv::cvarrToMat(&(camera->calib_K)),cv::cvarrToMat(&(camera->calib_D)), cv::cvarrToMat(&mat_proj)); - - index = i*n_points*2 + j*2; - estimation->data.db[index+0] = proj[0]; - estimation->data.db[index+1] = proj[1]; - } - } + index = i * n_points * 2 + j * 2; + estimation.at(index + 0) = proj[0]; + estimation.at(index + 1) = proj[1]; + } + } } -bool MultiMarkerBundle::Optimize(Camera *_cam, double stop, int max_iter, Optimization::OptimizeMethod method) +bool MultiMarkerBundle::Optimize(Camera* _cam, double stop, int max_iter, + Optimization::OptimizeMethod method) { - // Est() needs these - // TODO: Better way to deliver them??? Other than using global variables??? - camera = _cam; - n_images = camera_poses.size(); - n_markers = marker_indices.size(); + // Est() needs these + // TODO: Better way to deliver them??? Other than using global variables??? + camera = _cam; + n_images = camera_poses.size(); + n_markers = marker_indices.size(); + + if (n_images < 1) + { + cout << "Too few images! At least 1 images needed." << endl; + return false; + } - if(n_images < 1) - { - cout<<"Too few images! At least 1 images needed."< 0) { - cvmSet(parameters_mat, index+0, 0, pointcloud[pointcloud_index(id,j)].x); - cvmSet(parameters_mat, index+1, 0, pointcloud[pointcloud_index(id,j)].y); - cvmSet(parameters_mat, index+2, 0, pointcloud[pointcloud_index(id,j)].z); - } else { - // We don't optimize known-initialized parameters? - cvSet2D(parameters_mask_mat, index+0, 0, cvScalar(0)); - cvSet2D(parameters_mask_mat, index+1, 0, cvScalar(0)); - cvSet2D(parameters_mask_mat, index+2, 0, cvScalar(0)); - } - } - } - // Fill in the frame data. Camera poses into parameters and corners into measurements - int n_measurements = 0; // number of actual measurement data points. - for (size_t f=0; f < frames; f++) { - //cout<<"frame "<data.db[f*7+0])); - CvMat qua = cvMat(4, 1, CV_64F, &(parameters_mat->data.db[f*7+3])); - camera_poses[f].GetTranslation(&tra); - camera_poses[f].GetQuaternion(&qua); - // Measurements - for(size_t i = 0; i < marker_indices.size(); ++i) { - int id = marker_indices[i]; - if (measurements.find(measurements_index(f,id,0)) != measurements.end()) { - for (int j=0; j<4; j++) { - //cout< 0) optimization_markers++; - Optimization optimization(n_params, n_meas); - cout<<"Optimizing with "< 3) && (optimization_error > stop)) { - CvMat *err = optimization.GetErr(); - int max_k=-1; - double max=0; - for (int k=0; kheight; k++) { - if (cvmGet(err, k, 0) > max) { - max = cvmGet(err, k, 0); - max_k = k; - } - } - if (max_k >= 0) { - // We remove all 8 corner measurements - int f = (max_k - (max_k % (n_markers*4*2))) / (n_markers*4*2); - max_k -= f*(n_markers*4*2); - int id = (max_k - (max_k % (4*2))) / (4*2); - cout<<"Optimization error over the threshold -- remove the biggest outlier: "; - cout<<"frame "<(index + 0, 0) = 0; + parameters_mask_mat.at(index + 1, 0) = 0; + parameters_mask_mat.at(index + 2, 0) = 0; + } + if (marker_status[i] > 0) + { + parameters_mat.at(index + 0, 0) = + pointcloud[pointcloud_index(id, j)].x; + parameters_mat.at(index + 1, 0) = + pointcloud[pointcloud_index(id, j)].y; + parameters_mat.at(index + 2, 0) = + pointcloud[pointcloud_index(id, j)].z; + } + else + { + // We don't optimize known-initialized parameters? + parameters_mask_mat.at(index + 0, 0) = 0; + parameters_mask_mat.at(index + 1, 0) = 0; + parameters_mask_mat.at(index + 2, 0) = 0; + } + } + } + // Fill in the frame data. Camera poses into parameters and corners into + // measurements + int n_measurements = 0; // number of actual measurement data points. + for (size_t f = 0; f < frames; f++) + { + // cout<<"frame "<(f * 7 + 0))); + cv::Mat qua = + cv::Mat(4, 1, CV_64F, &(parameters_mat.at(f * 7 + 3))); + camera_poses[f].GetTranslation(tra); + camera_poses[f].GetQuaternion(qua); + // Measurements + for (size_t i = 0; i < marker_indices.size(); ++i) + { + int id = marker_indices[i]; + if (measurements.find(measurements_index(f, id, 0)) != measurements.end()) + { + for (int j = 0; j < 4; j++) + { + // cout<(index + 0, 0) = + measurements[measurements_index(f, id, j)].x; + measurements_mat.at(index + 1, 0) = + measurements[measurements_index(f, id, j)].y; + n_measurements += 2; + } + } + else + { + for (int j = 0; j < 4; j++) + { + // hop int index = f*(n_markers*4*2) + id*(4*2) + j*2; + int index = f * (n_markers * 4 * 2) + i * (4 * 2) + j * 2; + weight_mat.at(index + 0, 0) = 0.0; + weight_mat.at(index + 1, 0) = 0.0; + } + } + } + } + // out_matrix(measurements_mat, "measurements"); + // out_matrix(parameters_mat, "parameters"); + optimization_keyframes = n_images; + optimization_markers = 0; + for (size_t i = 0; i < marker_indices.size(); ++i) + if (marker_status[i] > 0) + optimization_markers++; + Optimization optimization(n_params, n_meas); + cout << "Optimizing with " << optimization_keyframes << " keyframes and " + << optimization_markers << " markers" << endl; + optimization_error = optimization.Optimize( + parameters_mat, measurements_mat, stop, max_iter, Est, 0, method, + parameters_mask_mat, cv::Mat(), weight_mat); + optimization_error /= n_measurements; + cout << "Optimization error per corner: " << optimization_error << endl; + /* + if ((frames > 3) && (optimization_error > stop)) { + cv::Mat *err = optimization.GetErr(); + int max_k=-1; + double max=0; + for (int k=0; kheight; k++) { + if (cvmGet(err, k, 0) > max) { + max = cvmGet(err, k, 0); + max_k = k; + } + } + if (max_k >= 0) { + // We remove all 8 corner measurements + int f = (max_k - (max_k % (n_markers*4*2))) / (n_markers*4*2); + max_k -= f*(n_markers*4*2); + int id = (max_k - (max_k % (4*2))) / (4*2); + cout<<"Optimization error over the threshold -- remove the biggest + outlier: "; cout<<"frame "<(index + 0, 0); + pointcloud[pointcloud_index(id, j)].y = + parameters_mat.at(index + 1, 0); + pointcloud[pointcloud_index(id, j)].z = + parameters_mat.at(index + 2, 0); + } + } - cvReleaseMat(¶meters_mat); - cvReleaseMat(¶meters_mask_mat); - cvReleaseMat(&measurements_mat); + parameters_mat.release(); + parameters_mask_mat.release(); + measurements_mat.release(); - optimizing = false; - return true; + optimizing = false; + return true; } -void MultiMarkerBundle::_MeasurementsAdd(MarkerIterator &begin, MarkerIterator &end, const Pose& camera_pose) { - camera_poses.push_back(camera_pose); - int frame_no = camera_poses.size()-1; - //cout<<"Adding measurements for frame "<GetId(); - int index = get_id_index(id); - if (index < 0) continue; - //cout<<"Id "<marker_corners_img[j]; - //cout<at(i).marker_corners_img[j].x<<" "; - } - //cout<GetId(); + int index = get_id_index(id); + if (index < 0) + continue; + // cout<<"Id "<marker_corners_img[j]; + // cout<at(i).marker_corners_img[j].x<<" "; + } + // cout<& indices) - : MultiMarker(indices) + : MultiMarker(indices) { - pointcloud_filtered = new FilterMedian[indices.size()*4*3]; - for (size_t i=0; i= filter_buffer_max) { - marker_status[get_id_index(marker_id)]=1; - } - } - } +void MultiMarkerFiltered::PointCloudAverage(int marker_id, double edge_length, + Pose& pose) +{ + if (marker_id == 0) + { + if (marker_status[get_id_index(marker_id)] == 0) + PointCloudAdd(marker_id, edge_length, pose); + } + else + { + CvPoint3D64f corners[4]; + PointCloudCorners3d(edge_length, pose, corners); + for (size_t j = 0; j < 4; ++j) + { + int id_index = get_id_index(marker_id); + if (id_index < 0) + continue; + int index = id_index * 4 * 3 + j * 3; + CvPoint3D64f p; + p.x = pointcloud_filtered[index + 0].next(corners[j].x); + p.y = pointcloud_filtered[index + 1].next(corners[j].y); + p.z = pointcloud_filtered[index + 2].next(corners[j].z); + pointcloud[pointcloud_index(marker_id, j)] = p; + // The marker isn't used for pose calculation until we are quite sure + // about it ??? + if (pointcloud_filtered[index + 0].getCurrentSize() >= filter_buffer_max) + { + marker_status[get_id_index(marker_id)] = 1; + } + } + } } -double MultiMarkerFiltered::_Update(MarkerIterator &begin, MarkerIterator &end, +double MultiMarkerFiltered::_Update(MarkerIterator& begin, MarkerIterator& end, Camera* cam, Pose& pose, IplImage* image) { - if (_GetPose(begin, end, cam, pose, NULL) == -1) return -1; + if (_GetPose(begin, end, cam, pose, NULL) == -1) + return -1; - // For every detected marker - for (MarkerIterator &i = begin.reset(); i != end; ++i) - { - const Marker* marker = *i; - int id = marker->GetId(); - int index = get_id_index(id); - if (index < 0) continue; + // For every detected marker + for (MarkerIterator& i = begin.reset(); i != end; ++i) + { + const Marker* marker = *i; + int id = marker->GetId(); + int index = get_id_index(id); + if (index < 0) + continue; - // Initialize marker pose - if (marker_status[index] == 0) - { - double cam_posed[16]; - double mar_posed[16]; + // Initialize marker pose + if (marker_status[index] == 0) + { + double cam_posed[16]; + double mar_posed[16]; - CvMat cam_mat = cvMat(4, 4, CV_64F, cam_posed); - CvMat mar_mat = cvMat(4, 4, CV_64F, mar_posed); + cv::Mat cam_mat = cvMat(4, 4, CV_64F, cam_posed); + cv::Mat mar_mat = cvMat(4, 4, CV_64F, mar_posed); - pose.GetMatrix(&cam_mat); - marker->pose.GetMatrix(&mar_mat); + pose.GetMatrix(&cam_mat); + marker->pose.GetMatrix(&mar_mat); - cvInvert(&cam_mat, &cam_mat); - cvMatMul(&cam_mat, &mar_mat, &mar_mat); + cvInvert(&cam_mat, &cam_mat); + cvMatMul(&cam_mat, &mar_mat, &mar_mat); - Pose p; - p.SetMatrix(&mar_mat); - PointCloudAverage(id, marker->GetMarkerEdgeLength(), p); - } - } + Pose p; + p.SetMatrix(&mar_mat); + PointCloudAverage(id, marker->GetMarkerEdgeLength(), p); + } + } - // When the pointcloud is updated we will get the new "better" pose... - return _GetPose(begin, end, cam, pose, image); + // When the pointcloud is updated we will get the new "better" pose... + return _GetPose(begin, end, cam, pose, image); } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/MultiMarkerInitializer.cpp b/ar_track_alvar/src/MultiMarkerInitializer.cpp index 561ef23..ddb02c6 100644 --- a/ar_track_alvar/src/MultiMarkerInitializer.cpp +++ b/ar_track_alvar/src/MultiMarkerInitializer.cpp @@ -26,133 +26,169 @@ using namespace std; -namespace alvar { +namespace alvar +{ using namespace std; -MultiMarkerInitializer::MultiMarkerInitializer(std::vector& indices, int _filter_buffer_min, int _filter_buffer_max) - : MultiMarker(indices), filter_buffer_min(_filter_buffer_min) { +MultiMarkerInitializer::MultiMarkerInitializer(std::vector& indices, + int _filter_buffer_min, + int _filter_buffer_max) + : MultiMarker(indices), filter_buffer_min(_filter_buffer_min) +{ + marker_detected.resize(indices.size()); + pointcloud_filtered = new FilterMedian[indices.size() * 4 * 3]; + for (size_t i = 0; i < indices.size() * 4 * 3; i++) + { + pointcloud_filtered[i].setWindowSize(_filter_buffer_max); + } - marker_detected.resize(indices.size()); - pointcloud_filtered = new FilterMedian[indices.size()*4*3]; - for (size_t i=0; i > new_measurements; - for (MarkerIterator &i = begin.reset(); i != end; ++i) { - const Marker* marker = *i; - int index = get_id_index(marker->GetId()); - if (index == -1) continue; - MarkerMeasurement m; - m.SetId(marker->GetId()); - m.SetMarkerSize(marker->GetMarkerEdgeLength(), marker->GetRes(), marker->GetMargin()); - m.pose = marker->pose; - m.marker_corners_img = i->marker_corners_img; - new_measurements.push_back(m); - marker_detected[index] = true; - } +void MultiMarkerInitializer::MeasurementsAdd(MarkerIterator& begin, + MarkerIterator& end) +{ + // copy markers into measurements. + vector > + new_measurements; + for (MarkerIterator& i = begin.reset(); i != end; ++i) + { + const Marker* marker = *i; + int index = get_id_index(marker->GetId()); + if (index == -1) + continue; + MarkerMeasurement m; + m.SetId(marker->GetId()); + m.SetMarkerSize(marker->GetMarkerEdgeLength(), marker->GetRes(), + marker->GetMargin()); + m.pose = marker->pose; + m.marker_corners_img = i->marker_corners_img; + new_measurements.push_back(m); + marker_detected[index] = true; + } - // If we have seen the 0 marker the first time we push it into point could. - for (MarkerIterator &i = begin.reset(); i != end; ++i) { - const Marker* marker = *i; - int id = marker->GetId(); - int index = get_id_index(id); - // Initialize marker pose - //if (id == 0 && marker_status[index] == 0) - if (index == 0 && marker_status[index] == 0) - { - Pose pose; - cv::Point3f corners[4]; - PointCloudCorners3d(marker->GetMarkerEdgeLength(), pose, corners); - for(size_t j = 0; j < 4; ++j) { - int p_index = pointcloud_index(id, j); - pointcloud[p_index] = corners[j]; - } - marker_status[index] = 1; - } - } + // If we have seen the 0 marker the first time we push it into point could. + for (MarkerIterator& i = begin.reset(); i != end; ++i) + { + const Marker* marker = *i; + int id = marker->GetId(); + int index = get_id_index(id); + // Initialize marker pose + // if (id == 0 && marker_status[index] == 0) + if (index == 0 && marker_status[index] == 0) + { + Pose pose; + cv::Point3d corners[4]; + PointCloudCorners3d(marker->GetMarkerEdgeLength(), pose, corners); + for (size_t j = 0; j < 4; ++j) + { + int p_index = pointcloud_index(id, j); + pointcloud[p_index] = corners[j]; + } + marker_status[index] = 1; + } + } - measurements.push_back(new_measurements); + measurements.push_back(new_measurements); } -void MultiMarkerInitializer::MeasurementsReset() { - measurements.clear(); - PointCloudReset(); - fill(marker_status.begin(), marker_status.end(), 0); - fill(marker_detected.begin(), marker_detected.end(), false); +void MultiMarkerInitializer::MeasurementsReset() +{ + measurements.clear(); + PointCloudReset(); + fill(marker_status.begin(), marker_status.end(), 0); + fill(marker_detected.begin(), marker_detected.end(), false); - for (size_t i=0; i > &markers = *mi; - Pose pose; - MarkerIteratorImpl m_begin(markers.begin()); - MarkerIteratorImpl m_end(markers.end()); - double err = _GetPose(m_begin, m_end, cam, pose, NULL); - if (err >= 0) { - // If pose is found, estimate marker poses for those that are still unkown. - found_new = updateMarkerPoses(markers, pose); - } - } - } +int MultiMarkerInitializer::Initialize(Camera* cam) +{ + for (bool found_new = true; found_new;) + { + found_new = false; + // Iterate through all measurements, try to compute Pose for each. + for (MeasurementIterator mi = measurements.begin(); + mi != measurements.end(); ++mi) + { + vector >& + markers = *mi; + Pose pose; + MarkerIteratorImpl m_begin(markers.begin()); + MarkerIteratorImpl m_end(markers.end()); + + double err = _GetPose(m_begin, m_end, cam, pose); + if (err >= 0) + { + // If pose is found, estimate marker poses for those that are still + // unkown. + found_new = updateMarkerPoses(markers, pose); + } + } + } - // Check if every marker has been seen - int n_detected = 0; - for (unsigned int i = 0; i < marker_indices.size(); ++i) { - cout << i << " " << marker_detected[i] << " " << marker_status[i] << "\n"; - if (marker_detected[i] && marker_status[i] != 0) ++n_detected; - } - return n_detected; + // Check if every marker has been seen + int n_detected = 0; + for (unsigned int i = 0; i < marker_indices.size(); ++i) + { + cout << i << " " << marker_detected[i] << " " << marker_status[i] << "\n"; + if (marker_detected[i] && marker_status[i] != 0) + ++n_detected; + } + return n_detected; } -bool MultiMarkerInitializer::updateMarkerPoses(vector > &markers, const Pose &pose) { - bool found_new = false; - for (vector >::iterator i = markers.begin(); i != markers.end(); ++i) { - MarkerMeasurement &marker = *i; - int id = marker.GetId(); - int index = get_id_index(id); - if (index > 0 && !marker.globalPose) { - // Compute absolute marker pose. - double cam_posed[16]; - double mar_posed[16]; - CvMat cam_mat = cvMat(4, 4, CV_64F, cam_posed); - CvMat mar_mat = cvMat(4, 4, CV_64F, mar_posed); - pose.GetMatrix(&cam_mat); - marker.pose.GetMatrix(&mar_mat); - cvInvert(&cam_mat, &cam_mat); - cvMatMul(&cam_mat, &mar_mat, &mar_mat); - marker.pose.SetMatrix(&mar_mat); - // Put marker into point cloud - cv::Point3f corners[4]; - PointCloudCorners3d(marker.GetMarkerEdgeLength(), marker.pose, corners); - for(size_t j = 0; j < 4; ++j) { - cv::Point3f p; - int p_index = pointcloud_index(id, j); - p.x = pointcloud_filtered[3*p_index+0].next(corners[j].x); - p.y = pointcloud_filtered[3*p_index+1].next(corners[j].y); - p.z = pointcloud_filtered[3*p_index+2].next(corners[j].z); - if (pointcloud_filtered[3*p_index+0].getCurrentSize() >= filter_buffer_min) { - pointcloud[p_index] = p; - marker_status[index] = 1; - } - } - marker.globalPose = 1; - found_new = true; - } - } - return found_new; +bool MultiMarkerInitializer::updateMarkerPoses( + vector >& + markers, + const Pose& pose) +{ + bool found_new = false; + for (vector >::iterator i = + markers.begin(); + i != markers.end(); ++i) + { + MarkerMeasurement& marker = *i; + int id = marker.GetId(); + int index = get_id_index(id); + if (index > 0 && !marker.globalPose) + { + // Compute absolute marker pose. + double cam_posed[16]; + double mar_posed[16]; + cv::Mat cam_mat = cv::Mat(4, 4, CV_64F, cam_posed); + cv::Mat mar_mat = cv::Mat(4, 4, CV_64F, mar_posed); + pose.GetMatrix(cam_mat); + marker.pose.GetMatrix(mar_mat); + cam_mat = cam_mat.inv(); + mar_mat = cam_mat * mar_mat; + marker.pose.SetMatrix(mar_mat); + // Put marker into point cloud + cv::Point3d corners[4]; + PointCloudCorners3d(marker.GetMarkerEdgeLength(), marker.pose, corners); + for (size_t j = 0; j < 4; ++j) + { + cv::Point3d p; + int p_index = pointcloud_index(id, j); + p.x = pointcloud_filtered[3 * p_index + 0].next(corners[j].x); + p.y = pointcloud_filtered[3 * p_index + 1].next(corners[j].y); + p.z = pointcloud_filtered[3 * p_index + 2].next(corners[j].z); + if (pointcloud_filtered[3 * p_index + 0].getCurrentSize() >= + filter_buffer_min) + { + pointcloud[p_index] = p; + marker_status[index] = 1; + } + } + marker.globalPose = 1; + found_new = true; + } + } + return found_new; } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/Mutex.cpp b/ar_track_alvar/src/Mutex.cpp index 26cd798..50547e7 100644 --- a/ar_track_alvar/src/Mutex.cpp +++ b/ar_track_alvar/src/Mutex.cpp @@ -25,26 +25,25 @@ #include "ar_track_alvar/Mutex_private.h" -namespace alvar { - -Mutex::Mutex() - : d(new MutexPrivate()) +namespace alvar +{ +Mutex::Mutex() : d(new MutexPrivate()) { } Mutex::~Mutex() { - delete d; + delete d; } void Mutex::lock() { - return d->lock(); + return d->lock(); } void Mutex::unlock() { - return d->unlock(); + return d->unlock(); } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/Mutex_unix.cpp b/ar_track_alvar/src/Mutex_unix.cpp index 33ce33e..9d92e39 100644 --- a/ar_track_alvar/src/Mutex_unix.cpp +++ b/ar_track_alvar/src/Mutex_unix.cpp @@ -25,39 +25,37 @@ #include -namespace alvar { - +namespace alvar +{ class MutexPrivateData { public: - MutexPrivateData() - : mMutex() - { - } + MutexPrivateData() : mMutex() + { + } - pthread_mutex_t mMutex; + pthread_mutex_t mMutex; }; -MutexPrivate::MutexPrivate() - : d(new MutexPrivateData()) +MutexPrivate::MutexPrivate() : d(new MutexPrivateData()) { - pthread_mutex_init(&d->mMutex, NULL); + pthread_mutex_init(&d->mMutex, NULL); } MutexPrivate::~MutexPrivate() { - pthread_mutex_destroy(&d->mMutex); - delete d; + pthread_mutex_destroy(&d->mMutex); + delete d; } void MutexPrivate::lock() { - pthread_mutex_lock(&d->mMutex); + pthread_mutex_lock(&d->mMutex); } void MutexPrivate::unlock() { - pthread_mutex_unlock(&d->mMutex); + pthread_mutex_unlock(&d->mMutex); } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/Mutex_win.cpp b/ar_track_alvar/src/Mutex_win.cpp index b69da4a..028d741 100644 --- a/ar_track_alvar/src/Mutex_win.cpp +++ b/ar_track_alvar/src/Mutex_win.cpp @@ -25,39 +25,37 @@ #include -namespace alvar { - +namespace alvar +{ class MutexPrivateData { public: - MutexPrivateData() - : mCriticalSection() - { - } + MutexPrivateData() : mCriticalSection() + { + } - CRITICAL_SECTION mCriticalSection; + CRITICAL_SECTION mCriticalSection; }; -MutexPrivate::MutexPrivate() - : d(new MutexPrivateData()) +MutexPrivate::MutexPrivate() : d(new MutexPrivateData()) { - InitializeCriticalSection(&d->mCriticalSection); + InitializeCriticalSection(&d->mCriticalSection); } MutexPrivate::~MutexPrivate() { - DeleteCriticalSection(&d->mCriticalSection); - delete d; + DeleteCriticalSection(&d->mCriticalSection); + delete d; } void MutexPrivate::lock() { - EnterCriticalSection(&d->mCriticalSection); + EnterCriticalSection(&d->mCriticalSection); } void MutexPrivate::unlock() { - LeaveCriticalSection(&d->mCriticalSection); + LeaveCriticalSection(&d->mCriticalSection); } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/Optimization.cpp b/ar_track_alvar/src/Optimization.cpp index a749524..fbbe833 100644 --- a/ar_track_alvar/src/Optimization.cpp +++ b/ar_track_alvar/src/Optimization.cpp @@ -24,287 +24,287 @@ #include "ar_track_alvar/Alvar.h" #include "ar_track_alvar/Optimization.h" #include "time.h" -#include #include -#include -#include #include using namespace std; -namespace alvar { - +namespace alvar +{ Optimization::Optimization(int n_params, int n_meas) { - estimate_param = 0; - J = cvCreateMat(n_meas, n_params, CV_64F); cvZero(J); - JtJ = cvCreateMat(n_params, n_params, CV_64F); cvZero(JtJ); - tmp = cvCreateMat(n_params, n_meas, CV_64F); cvZero(tmp); - W = cvCreateMat(n_meas, n_meas, CV_64F); cvZero(W); - diag = cvCreateMat(n_params, n_params, CV_64F); cvZero(diag); - err = cvCreateMat(n_meas, 1, CV_64F); cvZero(err); - delta = cvCreateMat(n_params, 1, CV_64F); cvZero(delta); - x_minus = cvCreateMat(n_params, 1, CV_64F); cvZero(x_minus); - x_plus = cvCreateMat(n_params, 1, CV_64F); cvZero(x_plus); - x_tmp1 = cvCreateMat(n_meas, 1, CV_64F); cvZero(x_tmp1); - x_tmp2 = cvCreateMat(n_meas, 1, CV_64F); cvZero(x_tmp2); - tmp_par = cvCreateMat(n_params, 1, CV_64F); cvZero(tmp_par); + estimate_param = 0; + J = cv::Mat::zeros(n_meas, n_params, CV_64F); + JtJ = cv::Mat::zeros(n_params, n_params, CV_64F); + tmp = cv::Mat::zeros(n_params, n_meas, CV_64F); + W = cv::Mat::zeros(n_meas, n_meas, CV_64F); + diag = cv::Mat::zeros(n_params, n_params, CV_64F); + err = cv::Mat::zeros(n_meas, 1, CV_64F); + delta = cv::Mat::zeros(n_params, 1, CV_64F); + x_minus = cv::Mat::zeros(n_params, 1, CV_64F); + x_plus = cv::Mat::zeros(n_params, 1, CV_64F); + x_tmp1 = cv::Mat::zeros(n_meas, 1, CV_64F); + x_tmp2 = cv::Mat::zeros(n_meas, 1, CV_64F); + tmp_par = cv::Mat::zeros(n_params, 1, CV_64F); } Optimization::~Optimization() { - cvReleaseMat(&J); - cvReleaseMat(&JtJ); - cvReleaseMat(&diag); - cvReleaseMat(&tmp); - cvReleaseMat(&W); - cvReleaseMat(&err); - cvReleaseMat(&delta); - cvReleaseMat(&x_plus); - cvReleaseMat(&x_minus); - cvReleaseMat(&x_tmp1); - cvReleaseMat(&x_tmp2); - cvReleaseMat(&tmp_par); - estimate_param = 0; + J.release(); + JtJ.release(); + diag.release(); + tmp.release(); + W.release(); + err.release(); + delta.release(); + x_plus.release(); + x_minus.release(); + x_tmp1.release(); + x_tmp2.release(); + tmp_par.release(); + estimate_param = 0; } double Optimization::CalcTukeyWeight(double residual, double c) { - //const double c = 3; // squared distance in the model tracker - double ret=0; - - if(fabs(residual) <= c) - { - double tmp = 1.0-((residual/c)*(residual/c)); - ret = ((c*c)/6.0)*(1.0-tmp*tmp*tmp); - } - else - ret = (c*c)/6.0; - - if(residual) - ret = fabs(sqrt(ret)/residual); - else - ret = 1.0; // ??? - - return ret; + // const double c = 3; // squared distance in the model tracker + double ret = 0; + + if (fabs(residual) <= c) + { + double tmp = 1.0 - ((residual / c) * (residual / c)); + ret = ((c * c) / 6.0) * (1.0 - tmp * tmp * tmp); + } + else + ret = (c * c) / 6.0; + + if (residual) + ret = fabs(sqrt(ret) / residual); + else + ret = 1.0; // ??? + + return ret; } double Optimization::CalcTukeyWeightSimple(double residual, double c) { - //const double c = 3; - double ret=0; - - double x2 = residual*residual; - if(x2cols; i++) - { - CvMat J_column; - cvGetCol(J, &J_column, i); - - cvZero(delta); - cvmSet(delta, i, 0, step); - cvAdd(x, delta, x_plus); - cvmSet(delta, i, 0, -step); - cvAdd(x, delta, x_minus); - - Estimate(x_plus, x_tmp1, estimate_param); - Estimate(x_minus, x_tmp2, estimate_param); - cvSub(x_tmp1, x_tmp2, &J_column); - cvScale(&J_column, &J_column, 1.0/(2*step)); - } + const double step = 0.001; + + J.setTo(cv::Scalar::all(0)); + for (int i = 0; i < J.cols; i++) + { + cv::Mat J_column = J.col(i); + + delta.setTo(cv::Scalar::all(0)); + delta.at(i, 0) = step; + x_plus = x + delta; + delta.at(i, 0) = -step; + x_minus = x + delta; + + Estimate(x_plus, x_tmp1, estimate_param); + Estimate(x_minus, x_tmp2, estimate_param); + J_column = x_tmp1 - x_tmp2; + J_column = J_column * 1.0 / (2 * step); + } } - -double Optimization::Optimize(CvMat* parameters, // Initial values are set - CvMat* measurements, // Some observations - double stop, - int max_iter, - EstimateCallback Estimate, - void *param, - OptimizeMethod method, - CvMat* parameters_mask, // Mask indicating non-constant parameters) - CvMat* J_mat, - CvMat* weights) +double Optimization::Optimize( + cv::Mat& parameters, // Initial values are set + cv::Mat& measurements, // Some observations + double stop, int max_iter, EstimateCallback Estimate, void* param, + OptimizeMethod method, + const cv::Mat& parameters_mask, // Mask indicating non-constant parameters) + const cv::Mat& J_mat, const cv::Mat& weights) { - - int n_params = parameters->rows; - int n_meas = measurements->rows; - double error_new = 0; - double error_old = 0; - double n1, n2; - int cntr = 0; - estimate_param = param; - lambda = 0.001; - - while(true) - { - if(!J_mat) - CalcJacobian(parameters, J, Estimate); - else - J = J_mat; - - // Zero the columns for constant parameters - // TODO: Make this into a J-sized mask matrix before the iteration loop - if(parameters_mask) - for (int i=0; irows; i++) { - if (cvGet2D(parameters_mask, i, 0).val[0] == 0) { - CvRect rect; - rect.height = J->rows; rect.width = 1; - rect.y = 0; rect.x = i; - CvMat foo; - cvGetSubRect(J, &foo, rect); - cvZero(&foo); - } - } - - Estimate(parameters, x_tmp1, estimate_param); - cvSub(measurements, x_tmp1, err); // err = residual - error_old = cvNorm(err, 0, CV_L2); - - switch(method) - { - case (GAUSSNEWTON) : - - cvMulTransposed(J, JtJ, 1); - cvInv(JtJ, JtJ, CV_SVD); - cvGEMM(JtJ, J, 1.0, 0, 0, tmp, CV_GEMM_B_T); // inv(JtJ)Jt - cvMatMul(tmp, err, delta); - cvAdd(delta, parameters, parameters); - - // Lopetusehto - n1 = cvNorm(delta); - n2 = cvNorm(parameters); - - if( ((n1/n2) < stop) || - (cntr >= max_iter) ) - goto end; - - break; - - case (LEVENBERGMARQUARDT) : - - cvSetIdentity(diag, cvRealScalar(lambda)); - - if(weights) - for(int k = 0; k < W->rows; ++k) - cvmSet(W, k, k, weights->data.db[k]); - - // JtWJ - if(weights) - { - cvGEMM(J, W, 1, 0, 0, tmp, CV_GEMM_A_T); - cvGEMM(tmp, J, 1, 0, 0, JtJ, 0); - } - else - cvMulTransposed(J, JtJ, 1); - - // JtJ + lambda*I - // or JtWJ + lambda*I if weights are used... - cvAdd(JtJ, diag, JtJ); - cvInv(JtJ, JtJ, CV_SVD); - cvGEMM(JtJ, J, 1.0, 0, 0, tmp, CV_GEMM_B_T); - - if(weights) - cvGEMM(tmp, W, 1, 0, 0, tmp, 0); - - cvMatMul(tmp, err, delta); - cvAdd(delta, parameters, tmp_par); - - Estimate(tmp_par, x_tmp1, estimate_param); - cvSub(measurements, x_tmp1, err); - - error_new = cvNorm(err, 0, CV_L2); - - if(error_new < error_old) - { - cvCopy(tmp_par, parameters); - lambda = lambda/10.0; - } - else - { - lambda = lambda*10.0; - } - if(lambda>10) lambda = 10; - if(lambda<0.00001) lambda = 0.00001; - - n1 = cvNorm(delta); - n2 = cvNorm(parameters); - - if( (n1/n2) < stop || - (cntr >= max_iter) ) - { - goto end; - } - - break; - - case (TUKEY_LM) : - - cvSetIdentity(diag, cvRealScalar(lambda)); - - // Tukey weights - for(int k = 0; k < W->rows; ++k) - { - if(weights) // If using weight vector - if(weights->data.db[k] != -1.0) // If true weight given - cvmSet(W, k, k, weights->data.db[k]); // Use given weight - else - cvmSet(W, k, k, CalcTukeyWeight(err->data.db[k], 3)); // otherwise use Tukey weight - else - cvmSet(W, k, k, CalcTukeyWeight(err->data.db[k], 3)); // Otherwise use Tukey weight - } - - cvGEMM(J, W, 1, 0, 0, tmp, CV_GEMM_A_T); - cvGEMM(tmp, J, 1, 0, 0, JtJ, 0); - cvAdd(JtJ, diag, JtJ); - cvInv(JtJ, JtJ, CV_SVD); - cvGEMM(JtJ, J, 1.0, 0, 0, tmp, CV_GEMM_B_T); - cvGEMM(tmp, W, 1, 0, 0, tmp, 0); - cvMatMul(tmp, err, delta); - cvAdd(delta, parameters, tmp_par); - - Estimate(tmp_par, x_tmp1, estimate_param); - cvSub(measurements, x_tmp1, err); - - error_new = cvNorm(err, 0, CV_L2); - - if(error_new < error_old) - { - cvCopy(tmp_par, parameters); - lambda = lambda/10.0; - } - else - { - lambda = lambda*10.0; - } - if(lambda>10) lambda = 10; - if(lambda<0.00001) lambda = 0.00001; - - n1 = cvNorm(delta); - n2 = cvNorm(parameters); - - if( ((n1/n2) < stop) || - (cntr >= max_iter) ) - { - goto end; - } - - break; - } - ++cntr; - } - -end : - - return error_old; + int n_params = parameters.rows; + int n_meas = measurements.rows; + double error_new = 0; + double error_old = 0; + double n1, n2; + int cntr = 0; + estimate_param = param; + lambda = 0.001; + + while (true) + { + if (J_mat.empty()) + CalcJacobian(parameters, J, Estimate); + else + J = J_mat; + + // Zero the columns for constant parameters + // TODO: Make this into a J-sized mask matrix before the iteration loop + if (!parameters_mask.empty()) + for (int i = 0; i < parameters_mask.rows; i++) + { + if (parameters_mask.at(i, 0) == 0) + { + cv::Rect rect; + rect.height = J.rows; + rect.width = 1; + rect.y = 0; + rect.x = i; + cv::Mat foo = J(rect); + foo.setTo(cv::Scalar::all(0)); + } + } + + Estimate(parameters, x_tmp1, estimate_param); + err = measurements - x_tmp1; // err = residual + error_old = cv::norm(err, cv::NORM_L2); + + switch (method) + { + case (GAUSSNEWTON): + + cv::mulTransposed(J, JtJ, true); + cv::invert(JtJ, JtJ, cv::DECOMP_SVD); + cv::gemm(JtJ, J, 1.0, 0, 0, tmp, cv::GEMM_2_T); // inv(JtJ)Jt + + delta = tmp * err; + parameters = delta + parameters; + + // Lopetusehto + n1 = cv::norm(delta); + n2 = cv::norm(parameters); + + if (((n1 / n2) < stop) || (cntr >= max_iter)) + goto end; + + break; + + case (LEVENBERGMARQUARDT): + cv::setIdentity(diag, cv::Scalar(lambda)); + + if (!weights.empty()) + for (int k = 0; k < W.rows; ++k) + W.at(k, k) = weights.at(k); + + // JtWJ + if (!weights.empty()) + { + cv::gemm(J, W, 1, 0, 0, tmp, cv::GEMM_1_T); + cv::gemm(tmp, J, 1, 0, 0, JtJ, 0); + } + else + cv::mulTransposed(J, JtJ, true); + + // JtJ + lambda*I + // or JtWJ + lambda*I if weights are used... + JtJ = JtJ + diag; + cv::invert(JtJ, JtJ, cv::DECOMP_SVD); + cv::gemm(JtJ, J, 1.0, 0, 0, tmp, cv::GEMM_2_T); + + if (!weights.empty()) + cv::gemm(tmp, W, 1, 0, 0, tmp, 0); + + delta = tmp * err; + tmp_par = delta * parameters; + + Estimate(tmp_par, x_tmp1, estimate_param); + err = measurements - x_tmp1; + + error_new = cv::norm(err, cv::NORM_L2); + + if (error_new < error_old) + { + tmp_par.copyTo(parameters); + lambda = lambda / 10.0; + } + else + { + lambda = lambda * 10.0; + } + if (lambda > 10) + lambda = 10; + if (lambda < 0.00001) + lambda = 0.00001; + + n1 = cv::norm(delta); + n2 = cv::norm(parameters); + + if ((n1 / n2) < stop || (cntr >= max_iter)) + { + goto end; + } + + break; + + case (TUKEY_LM): + + cv::setIdentity(diag, cv::Scalar(lambda)); + + // Tukey weights + for (int k = 0; k < W.rows; ++k) + { + if (!weights.empty()) // If using weight vector + if (weights.at(k) != -1.0) // If true weight given + W.at(k, k) = weights.at(k); // Use given weight + else + W.at(k, k) = CalcTukeyWeight( + err.at(k), 3); // otherwise use Tukey weight + else + W.at(k, k) = CalcTukeyWeight( + err.at(k), 3); // Otherwise use Tukey weight + } + + cv::gemm(J, W, 1, 0, 0, tmp, cv::GEMM_1_T); + cv::gemm(tmp, J, 1, 0, 0, JtJ, 0); + JtJ = JtJ * diag; + cv::invert(JtJ, JtJ, cv::DECOMP_SVD); + cv::gemm(JtJ, J, 1.0, 0, 0, tmp, cv::GEMM_2_T); + cv::gemm(tmp, W, 1, 0, 0, tmp, 0); + delta = tmp * err; + tmp_par = delta * parameters; + + Estimate(tmp_par, x_tmp1, estimate_param); + err = measurements - x_tmp1; + + error_new = cv::norm(err, cv::NORM_L2); + + if (error_new < error_old) + { + tmp_par.copyTo(parameters); + lambda = lambda / 10.0; + } + else + { + lambda = lambda * 10.0; + } + if (lambda > 10) + lambda = 10; + if (lambda < 0.00001) + lambda = 0.00001; + + n1 = cv::norm(delta); + n2 = cv::norm(parameters); + + if (((n1 / n2) < stop) || (cntr >= max_iter)) + { + goto end; + } + + break; + } + ++cntr; + } + +end: + + return error_old; } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/Platform.cpp b/ar_track_alvar/src/Platform.cpp index ab8860c..61eac27 100644 --- a/ar_track_alvar/src/Platform.cpp +++ b/ar_track_alvar/src/Platform.cpp @@ -28,46 +28,50 @@ #include #include #ifdef WIN32 - #include +#include #else - #include +#include #endif -namespace alvar { - -void errorAtLine(int status, int error, const char *filename, - unsigned int line, const char *format, ...) +namespace alvar +{ +void errorAtLine(int status, int error, const char* filename, unsigned int line, + const char* format, ...) { - fflush(stdout); - if (filename) { - fprintf(stderr, "%s:%d: ", filename, line); - } - if (format) { - va_list args; - va_start(args, format); - vfprintf(stderr, format, args); - va_end(args); - } - if (error) { - fprintf(stderr, ": %s", strerror(error)); - } - fprintf(stderr, "\n"); - fflush(stderr); - if (status) { - exit(status); - } + fflush(stdout); + if (filename) + { + fprintf(stderr, "%s:%d: ", filename, line); + } + if (format) + { + va_list args; + va_start(args, format); + vfprintf(stderr, format, args); + va_end(args); + } + if (error) + { + fprintf(stderr, ": %s", strerror(error)); + } + fprintf(stderr, "\n"); + fflush(stderr); + if (status) + { + exit(status); + } } void sleep(unsigned long milliseconds) { - #ifdef WIN32 - Sleep(milliseconds); - #else - struct timespec t; - t.tv_sec = 0; - t.tv_nsec = 1000 * 1000 * milliseconds; - nanosleep(&t, NULL); - #endif +#ifdef WIN32 + Sleep(milliseconds); +#else + struct timespec t; + t.tv_sec = 0; + t.tv_nsec = 1000 * 1000 * milliseconds; + nanosleep(&t, NULL); +#endif } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/Plugin.cpp b/ar_track_alvar/src/Plugin.cpp index 0758b41..93a36c2 100644 --- a/ar_track_alvar/src/Plugin.cpp +++ b/ar_track_alvar/src/Plugin.cpp @@ -25,42 +25,41 @@ #include "ar_track_alvar/Plugin_private.h" -namespace alvar { - +namespace alvar +{ Plugin::Plugin(const std::string filename) - : d(new PluginPrivate()) - , mReferenceCount(new int(1)) + : d(new PluginPrivate()), mReferenceCount(new int(1)) { - d->load(filename); + d->load(filename); } -Plugin::Plugin(const Plugin &plugin) - : d(plugin.d) - , mReferenceCount(plugin.mReferenceCount) +Plugin::Plugin(const Plugin& plugin) + : d(plugin.d), mReferenceCount(plugin.mReferenceCount) { - ++*mReferenceCount; + ++*mReferenceCount; } -Plugin &Plugin::operator=(const Plugin &plugin) +Plugin& Plugin::operator=(const Plugin& plugin) { - d = plugin.d; - mReferenceCount = plugin.mReferenceCount; - ++*mReferenceCount; - return *this; + d = plugin.d; + mReferenceCount = plugin.mReferenceCount; + ++*mReferenceCount; + return *this; } Plugin::~Plugin() { - if (!--*mReferenceCount) { - d->unload(); - delete d; - delete mReferenceCount; - } + if (!--*mReferenceCount) + { + d->unload(); + delete d; + delete mReferenceCount; + } } -void *Plugin::resolve(const char *symbol) +void* Plugin::resolve(const char* symbol) { - return d->resolve(symbol); + return d->resolve(symbol); } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/Plugin_unix.cpp b/ar_track_alvar/src/Plugin_unix.cpp index 931e21e..3a383a4 100644 --- a/ar_track_alvar/src/Plugin_unix.cpp +++ b/ar_track_alvar/src/Plugin_unix.cpp @@ -29,54 +29,53 @@ #include #include -namespace alvar { - +namespace alvar +{ class PluginPrivateData { public: - PluginPrivateData() - : mHandle(NULL) - { - } - - void *mHandle; + PluginPrivateData() : mHandle(NULL) + { + } + + void* mHandle; }; -PluginPrivate::PluginPrivate() - : d(new PluginPrivateData()) +PluginPrivate::PluginPrivate() : d(new PluginPrivateData()) { } PluginPrivate::~PluginPrivate() { - delete d; + delete d; } void PluginPrivate::load(const std::string filename) { - d->mHandle = dlopen(filename.data(), RTLD_LAZY); - if (!d->mHandle) { - std::stringstream message; - message << "could not load " << filename - << ", error code " << errno; - throw AlvarException(message.str().data()); - } + d->mHandle = dlopen(filename.data(), RTLD_LAZY); + if (!d->mHandle) + { + std::stringstream message; + message << "could not load " << filename << ", error code " << errno; + throw AlvarException(message.str().data()); + } } void PluginPrivate::unload() { - dlclose(d->mHandle); + dlclose(d->mHandle); } -void *PluginPrivate::resolve(const char *symbol) +void* PluginPrivate::resolve(const char* symbol) { - void *address = (void *)dlsym(d->mHandle, symbol); - if (!address) { - std::stringstream message; - message << "could not resolve " << symbol; - throw AlvarException(message.str().data()); - } - return address; + void* address = (void*)dlsym(d->mHandle, symbol); + if (!address) + { + std::stringstream message; + message << "could not resolve " << symbol; + throw AlvarException(message.str().data()); + } + return address; } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/Plugin_win.cpp b/ar_track_alvar/src/Plugin_win.cpp index d82cfd0..85083b5 100644 --- a/ar_track_alvar/src/Plugin_win.cpp +++ b/ar_track_alvar/src/Plugin_win.cpp @@ -28,54 +28,54 @@ #include #include -namespace alvar { - +namespace alvar +{ class PluginPrivateData { public: - PluginPrivateData() - : mHandle(NULL) - { - } - - HINSTANCE mHandle; + PluginPrivateData() : mHandle(NULL) + { + } + + HINSTANCE mHandle; }; -PluginPrivate::PluginPrivate() - : d(new PluginPrivateData()) +PluginPrivate::PluginPrivate() : d(new PluginPrivateData()) { } PluginPrivate::~PluginPrivate() { - delete d; + delete d; } void PluginPrivate::load(const std::string filename) { - d->mHandle = LoadLibrary(filename.data()); - if (!d->mHandle) { - std::stringstream message; - message << "could not load " << filename - << ", error code " << GetLastError(); - throw AlvarException(message.str().data()); - } + d->mHandle = LoadLibrary(filename.data()); + if (!d->mHandle) + { + std::stringstream message; + message << "could not load " << filename << ", error code " + << GetLastError(); + throw AlvarException(message.str().data()); + } } void PluginPrivate::unload() { - FreeLibrary(d->mHandle); + FreeLibrary(d->mHandle); } -void *PluginPrivate::resolve(const char *symbol) +void* PluginPrivate::resolve(const char* symbol) { - void *address = (void *)GetProcAddress(d->mHandle, symbol); - if (!address) { - std::stringstream message; - message << "could not resolve " << symbol; - throw AlvarException(message.str().data()); - } - return address; + void* address = (void*)GetProcAddress(d->mHandle, symbol); + if (!address) + { + std::stringstream message; + message << "could not resolve " << symbol; + throw AlvarException(message.str().data()); + } + return address; } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/Pose.cpp b/ar_track_alvar/src/Pose.cpp index 9e726db..2084acc 100644 --- a/ar_track_alvar/src/Pose.cpp +++ b/ar_track_alvar/src/Pose.cpp @@ -22,162 +22,184 @@ */ #include "ar_track_alvar/Pose.h" -#include "opencv2/core/types_c.h" using namespace std; -namespace alvar { +namespace alvar +{ using namespace std; -void Pose::Output() const { - cout<(3, 0) = 1; } -Pose::Pose(CvMat *mat) : Rotation(mat, MAT) { - cvInitMatHeader(&translation_mat, 4, 1, CV_64F, translation); - cvZero(&translation_mat); - cvmSet(&translation_mat, 3, 0, 1); - // Fill in translation part - if (mat->cols == 4) { - cvmSet(&translation_mat, 0, 0, cvmGet(mat, 0, 3)); - cvmSet(&translation_mat, 1, 0, cvmGet(mat, 1, 3)); - cvmSet(&translation_mat, 2, 0, cvmGet(mat, 2, 3)); - } +Pose::Pose(const cv::Mat& tra, const cv::Mat& rot, RotationType t) + : Rotation(rot, t) +{ + translation_mat = cv::Mat(4, 1, CV_64F, translation); + translation_mat.setTo(cv::Scalar::all(0)); + translation_mat.at(3, 0) = 1; + // Fill in translation part + translation_mat.at(0, 0) = tra.at(0, 0); + translation_mat.at(1, 0) = tra.at(1, 0); + translation_mat.at(2, 0) = tra.at(2, 0); } -Pose::Pose(const Pose& p) :Rotation(p) { - cvInitMatHeader(&translation_mat, 4, 1, CV_64F, translation); - cvCopy(&p.translation_mat, &translation_mat); +Pose::Pose(const cv::Mat& mat) : Rotation(mat, MAT) +{ + translation_mat = cv::Mat(4, 1, CV_64F, translation); + translation_mat.setTo(cv::Scalar::all(0)); + translation_mat.at(3, 0) = 1; + // Fill in translation part + if (mat.cols == 4) + { + translation_mat.at(0, 0) = mat.at(0, 3); + translation_mat.at(1, 0) = mat.at(1, 3); + translation_mat.at(2, 0) = mat.at(2, 3); + } +} + +Pose::Pose(const Pose& p) : Rotation(p) +{ + translation_mat = cv::Mat(4, 1, CV_64F, translation); + p.translation_mat.copyTo(translation_mat); } void Pose::Reset() { - cvZero(&quaternion_mat); cvmSet(&quaternion_mat, 0, 0, 1); - cvZero(&translation_mat); + quaternion_mat.setTo(cv::Scalar::all(0)); + quaternion_mat.at(0, 0) = 1; + translation_mat.setTo(cv::Scalar::all(0)); } -void Pose::SetMatrix(const CvMat *mat) +void Pose::SetMatrix(const cv::Mat& mat) { - double tmp[9]; - for(int i = 0; i < 3; ++i) - for(int j = 0; j < 3; ++j) - tmp[i*3+j] = cvmGet(mat, i, j); - - Mat9ToQuat(tmp, quaternion); - if (mat->cols == 4) { - cvmSet(&translation_mat, 0, 0, cvmGet(mat, 0, 3)); - cvmSet(&translation_mat, 1, 0, cvmGet(mat, 1, 3)); - cvmSet(&translation_mat, 2, 0, cvmGet(mat, 2, 3)); - cvmSet(&translation_mat, 3, 0, 1); - } -} - -void Pose::GetMatrix(CvMat *mat) const + double tmp[9]; + for (int i = 0; i < 3; ++i) + for (int j = 0; j < 3; ++j) + tmp[i * 3 + j] = mat.at(i, j); + + Mat9ToQuat(tmp, quaternion); + if (mat.cols == 4) + { + translation_mat.at(0, 0) = mat.at(0, 3); + translation_mat.at(1, 0) = mat.at(1, 3); + translation_mat.at(2, 0) = mat.at(2, 3); + translation_mat.at(3, 0) = 1; + } +} + +void Pose::GetMatrix(cv::Mat& mat) const { - if (mat->width == 3) { - QuatToMat9(quaternion, mat->data.db); - } else if (mat->width == 4) { - cvSetIdentity(mat); - QuatToMat16(quaternion, mat->data.db); - cvmSet(mat, 0, 3, cvmGet(&translation_mat, 0, 0)); - cvmSet(mat, 1, 3, cvmGet(&translation_mat, 1, 0)); - cvmSet(mat, 2, 3, cvmGet(&translation_mat, 2, 0)); - } + if (mat.cols == 3) + { + QuatToMat9(quaternion, mat.ptr(0)); + } + else if (mat.cols == 4) + { + cv::setIdentity(mat); + QuatToMat16(quaternion, mat.ptr(0)); + mat.at(0, 3) = translation_mat.at(0, 0); + mat.at(1, 3) = translation_mat.at(1, 0); + mat.at(2, 3) = translation_mat.at(2, 0); + } } void Pose::GetMatrixGL(double gl[16], bool mirror) { - if (mirror) Mirror(false, true, true); - CvMat gl_mat = cvMat(4, 4, CV_64F, gl); - GetMatrix(&gl_mat); - cvTranspose(&gl_mat, &gl_mat); - if (mirror) Mirror(false, true, true); + if (mirror) + Mirror(false, true, true); + cv::Mat gl_mat = cv::Mat(4, 4, CV_64F, gl); + GetMatrix(gl_mat); + gl_mat = gl_mat.t(); + if (mirror) + Mirror(false, true, true); } void Pose::SetMatrixGL(double gl[16], bool mirror) { - double gll[16]; - memcpy(gll, gl, sizeof(double)*16); - CvMat gl_mat = cvMat(4, 4, CV_64F, gll); - cvTranspose(&gl_mat, &gl_mat); - SetMatrix(&gl_mat); - if (mirror) Mirror(false, true, true); + double gll[16]; + memcpy(gll, gl, sizeof(double) * 16); + cv::Mat gl_mat = cv::Mat(4, 4, CV_64F, gll); + gl_mat = gl_mat.t(); + SetMatrix(gl_mat); + if (mirror) + Mirror(false, true, true); } void Pose::Transpose() { - double tmp[16]; - CvMat tmp_mat = cvMat(4, 4, CV_64F, tmp); - GetMatrix(&tmp_mat); - cvTranspose(&tmp_mat, &tmp_mat); - SetMatrix(&tmp_mat); + double tmp[16]; + cv::Mat tmp_mat = cv::Mat(4, 4, CV_64F, tmp); + GetMatrix(tmp_mat); + tmp_mat = tmp_mat.t(); + SetMatrix(tmp_mat); } void Pose::Invert() { - double tmp[16]; - CvMat tmp_mat = cvMat(4, 4, CV_64F, tmp); - GetMatrix(&tmp_mat); - cvInvert(&tmp_mat, &tmp_mat); - SetMatrix(&tmp_mat); + double tmp[16]; + cv::Mat tmp_mat = cv::Mat(4, 4, CV_64F, tmp); + GetMatrix(tmp_mat); + tmp_mat = tmp_mat.inv(); + SetMatrix(tmp_mat); } void Pose::Mirror(bool x, bool y, bool z) { - double tmp[16]; - CvMat tmp_mat = cvMat(4, 4, CV_64F, tmp); - GetMatrix(&tmp_mat); - MirrorMat(&tmp_mat, x, y, z); - SetMatrix(&tmp_mat); + double tmp[16]; + cv::Mat tmp_mat = cv::Mat(4, 4, CV_64F, tmp); + GetMatrix(tmp_mat); + MirrorMat(tmp_mat, x, y, z); + SetMatrix(tmp_mat); } -void Pose::SetTranslation(const CvMat *tra) { - cvmSet(&translation_mat, 0, 0, cvmGet(tra, 0, 0)); - cvmSet(&translation_mat, 1, 0, cvmGet(tra, 1, 0)); - cvmSet(&translation_mat, 2, 0, cvmGet(tra, 2, 0)); - cvmSet(&translation_mat, 3, 0, 1); +void Pose::SetTranslation(const cv::Mat& tra) +{ + translation_mat.at(0, 0) = tra.at(0, 0); + translation_mat.at(1, 0) = tra.at(1, 0); + translation_mat.at(2, 0) = tra.at(2, 0); + translation_mat.at(3, 0) = 1; } -void Pose::SetTranslation(const double *tra) { - translation[0] = tra[0]; - translation[1] = tra[1]; - translation[2] = tra[2]; - translation[3] = 1; +void Pose::SetTranslation(const double* tra) +{ + translation[0] = tra[0]; + translation[1] = tra[1]; + translation[2] = tra[2]; + translation[3] = 1; } -void Pose::SetTranslation(const double x, const double y, const double z) { - translation[0] = x; - translation[1] = y; - translation[2] = z; - translation[3] = 1; +void Pose::SetTranslation(const double x, const double y, const double z) +{ + translation[0] = x; + translation[1] = y; + translation[2] = z; + translation[3] = 1; } -void Pose::GetTranslation( CvMat *tra) const{ - cvmSet(tra, 0, 0, cvmGet(&translation_mat, 0, 0)); - cvmSet(tra, 1, 0, cvmGet(&translation_mat, 1, 0)); - cvmSet(tra, 2, 0, cvmGet(&translation_mat, 2, 0)); - if (tra->rows == 4) cvmSet(tra, 3, 0, 1); +void Pose::GetTranslation(cv::Mat& tra) const +{ + tra.at(0, 0) = translation_mat.at(0, 0); + tra.at(1, 0) = translation_mat.at(1, 0); + tra.at(2, 0) = translation_mat.at(2, 0); + if (tra.rows == 4) + tra.at(3, 0) = 1; } -Pose& Pose::operator = (const Pose& p) +Pose& Pose::operator=(const Pose& p) { - memcpy(quaternion, p.quaternion, 4*sizeof(double)); - memcpy(translation, p.translation, 4*sizeof(double)); - return *this; + memcpy(quaternion, p.quaternion, 4 * sizeof(double)); + memcpy(translation, p.translation, 4 * sizeof(double)); + return *this; } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/Ransac.cpp b/ar_track_alvar/src/Ransac.cpp index bd35e28..ac479f5 100644 --- a/ar_track_alvar/src/Ransac.cpp +++ b/ar_track_alvar/src/Ransac.cpp @@ -26,13 +26,14 @@ #include #ifdef TRACE - #include +#include #endif -namespace alvar { - -RansacImpl::RansacImpl(int min_params, int max_params, - int sizeof_param, int sizeof_model) { +namespace alvar +{ +RansacImpl::RansacImpl(int min_params, int max_params, int sizeof_param, + int sizeof_model) +{ this->min_params = min_params; this->max_params = max_params; this->sizeof_param = sizeof_param; @@ -40,112 +41,131 @@ RansacImpl::RansacImpl(int min_params, int max_params, indices = NULL; samples = new void*[max_params]; - if (!samples) { + if (!samples) + { #ifdef TRACE - printf("Cound not allocate memory for %d sample pointers.", - max_params); + printf("Cound not allocate memory for %d sample pointers.", max_params); #endif } hypothesis = new char[sizeof_model]; - if (!hypothesis) { + if (!hypothesis) + { #ifdef TRACE printf("Cound not allocate %d bytes of memory for model parameters.", - sizeof_model); + sizeof_model); #endif } } -RansacImpl::~RansacImpl() { - if (samples) delete[] samples; - if (hypothesis) delete[] (char *)hypothesis; - if (indices) delete[] indices; +RansacImpl::~RansacImpl() +{ + if (samples) + delete[] samples; + if (hypothesis) + delete[](char*) hypothesis; + if (indices) + delete[] indices; } -int RansacImpl::_estimate(void* params, int param_c, - int support_limit, int max_rounds, - void* model) { - if (param_c < min_params) return 0; - - int max_support = 0; - - // Randomly search for the best model. - for (int i = 0; - i < max_rounds && max_support < support_limit; - i++) { - // 1. pick a random sample of min_params. - int sample_c; - for (sample_c = 0; sample_c < min_params; sample_c++) { - int r = rand() % (param_c-sample_c); - void* s = (char*)params + r*sizeof_param; - for (int j = 0; j < sample_c; j++) - if (s >= samples[j]) s = (char*)s + sizeof_param; - samples[sample_c] = s; - } - - // 2. create a model from the sampled parameters. - _doEstimate(samples, sample_c, hypothesis); - - // 3. count the support for the model. - int hypo_support = 0; - for (int j = 0; j < param_c; j++) { - if (_doSupports((char*)params + j*sizeof_param, hypothesis)) { - hypo_support++; - } - } +int RansacImpl::_estimate(void* params, int param_c, int support_limit, + int max_rounds, void* model) +{ + if (param_c < min_params) + return 0; + + int max_support = 0; + + // Randomly search for the best model. + for (int i = 0; i < max_rounds && max_support < support_limit; i++) + { + // 1. pick a random sample of min_params. + int sample_c; + for (sample_c = 0; sample_c < min_params; sample_c++) + { + int r = rand() % (param_c - sample_c); + void* s = (char*)params + r * sizeof_param; + for (int j = 0; j < sample_c; j++) + if (s >= samples[j]) + s = (char*)s + sizeof_param; + samples[sample_c] = s; + } + + // 2. create a model from the sampled parameters. + _doEstimate(samples, sample_c, hypothesis); + + // 3. count the support for the model. + int hypo_support = 0; + for (int j = 0; j < param_c; j++) + { + if (_doSupports((char*)params + j * sizeof_param, hypothesis)) + { + hypo_support++; + } + } #ifdef TRACE - printf("Hypothesis got %d support\n", hypo_support); + printf("Hypothesis got %d support\n", hypo_support); #endif - if (hypo_support > max_support) { - max_support = hypo_support; - memcpy(model, hypothesis, sizeof_model); - } - } + if (hypo_support > max_support) + { + max_support = hypo_support; + memcpy(model, hypothesis, sizeof_model); + } + } - return max_support; + return max_support; } -int RansacImpl::_refine(void* params, int param_c, - int support_limit, int max_rounds, - void* model, char *inlier_mask) { - if (param_c < min_params) return 0; +int RansacImpl::_refine(void* params, int param_c, int support_limit, + int max_rounds, void* model, char* inlier_mask) +{ + if (param_c < min_params) + return 0; int max_support = 0; // Iteratively make the model estimation better. - for (int i = 0; - i < max_rounds && max_support < support_limit; - i++) { + for (int i = 0; i < max_rounds && max_support < support_limit; i++) + { int sample_c = 0; // 1. Find all parameters that support the current model. - for (int j = 0; j < param_c && sample_c < max_params; j++) { - if (_doSupports((char*)params + j*sizeof_param, model)) { - samples[sample_c++] = (char*)params + j*sizeof_param; - if (inlier_mask) inlier_mask[j] = 1; - } else { - if (inlier_mask) inlier_mask[j] = 0; + for (int j = 0; j < param_c && sample_c < max_params; j++) + { + if (_doSupports((char*)params + j * sizeof_param, model)) + { + samples[sample_c++] = (char*)params + j * sizeof_param; + if (inlier_mask) + inlier_mask[j] = 1; + } + else + { + if (inlier_mask) + inlier_mask[j] = 0; } } #ifdef TRACE printf("Found %d supporting parameters\n", sample_c); #endif - if (sample_c > max_support) { + if (sample_c > max_support) + { // 2. create a model from all supporting parameters. _doEstimate(samples, sample_c, model); max_support = sample_c; - - } else { + } + else + { // until there are no new supporting parameters. break; } } - + return max_support; } /** IndexRansac version */ -RansacImpl::RansacImpl(int min_params, int max_params, int sizeof_model) { +RansacImpl::RansacImpl(int min_params, int max_params, int sizeof_model) +{ this->min_params = min_params; this->max_params = max_params; this->sizeof_param = -1; @@ -153,101 +173,117 @@ RansacImpl::RansacImpl(int min_params, int max_params, int sizeof_model) { samples = NULL; indices = new int[max_params]; hypothesis = new char[sizeof_model]; - if (!hypothesis) { + if (!hypothesis) + { #ifdef TRACE printf("Cound not allocate %d bytes of memory for model parameters.", - sizeof_model); + sizeof_model); #endif } } -int RansacImpl::_estimate(int param_c, - int support_limit, int max_rounds, - void* model) { - if (param_c < min_params) return 0; +int RansacImpl::_estimate(int param_c, int support_limit, int max_rounds, + void* model) +{ + if (param_c < min_params) + return 0; int max_support = 0; // Randomly search for the best model. - for (int i = 0; - i < max_rounds && max_support < support_limit; - i++) { - // 1. pick a random sample of min_params. - int sample_c; - for (sample_c = 0; sample_c < min_params; sample_c++) { - int r = rand() % (param_c-sample_c); - for (int j = 0; j < sample_c; j++) - if (r >= indices[j]) r++; - indices[sample_c] = r; - } + for (int i = 0; i < max_rounds && max_support < support_limit; i++) + { + // 1. pick a random sample of min_params. + int sample_c; + for (sample_c = 0; sample_c < min_params; sample_c++) + { + int r = rand() % (param_c - sample_c); + for (int j = 0; j < sample_c; j++) + if (r >= indices[j]) + r++; + indices[sample_c] = r; + } - // 2. create a model from the sampled parameters. - _doEstimate(indices, sample_c, hypothesis); + // 2. create a model from the sampled parameters. + _doEstimate(indices, sample_c, hypothesis); - // 3. count the support for the model. - int hypo_support = 0; - for (int j = 0; j < param_c; j++) { - if (_doSupports(j, hypothesis)) { - hypo_support++; - } + // 3. count the support for the model. + int hypo_support = 0; + for (int j = 0; j < param_c; j++) + { + if (_doSupports(j, hypothesis)) + { + hypo_support++; } + } #ifdef TRACE - printf("Hypothesis got %d support\n", hypo_support); + printf("Hypothesis got %d support\n", hypo_support); #endif - if (hypo_support > max_support) { - max_support = hypo_support; - memcpy(model, hypothesis, sizeof_model); - } + if (hypo_support > max_support) + { + max_support = hypo_support; + memcpy(model, hypothesis, sizeof_model); + } } return max_support; } -int RansacImpl::_refine(int param_c, - int support_limit, int max_rounds, - void* model, char *inlier_mask) { - if (param_c < min_params) return 0; +int RansacImpl::_refine(int param_c, int support_limit, int max_rounds, + void* model, char* inlier_mask) +{ + if (param_c < min_params) + return 0; int max_support = 0; // Iteratively make the model estimation better. - for (int i = 0; - i < max_rounds && max_support < support_limit; - i++) { + for (int i = 0; i < max_rounds && max_support < support_limit; i++) + { int sample_c = 0; // 1. Find all parameters that support the current model. - for (int j = 0; j < param_c && sample_c < max_params; j++) { - if (_doSupports(j, model)) { - indices[sample_c++] = j; - if (inlier_mask) inlier_mask[j] = 1; - } else { - if (inlier_mask) inlier_mask[j] = 0; + for (int j = 0; j < param_c && sample_c < max_params; j++) + { + if (_doSupports(j, model)) + { + indices[sample_c++] = j; + if (inlier_mask) + inlier_mask[j] = 1; + } + else + { + if (inlier_mask) + inlier_mask[j] = 0; } } #ifdef TRACE printf("Found %d supporting parameters\n", sample_c); #endif - if (sample_c < min_params) break; // indicates too few points. - if (sample_c > max_support) { + if (sample_c < min_params) + break; // indicates too few points. + if (sample_c > max_support) + { // 2. create a model from all supporting parameters. _doEstimate(indices, sample_c, model); max_support = sample_c; - - } else { + } + else + { // until there are no new supporting parameters. break; } } - + return max_support; } /** public methods */ int RansacImpl::estimateRequiredRounds(float success_propability, - float inlier_percentage) { - return (int) - (log(1-success_propability) / log(1-pow(inlier_percentage,3))); + float inlier_percentage) +{ + return (int)(log(1 - success_propability) / + log(1 - pow(inlier_percentage, 3))); } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/Rotation.cpp b/ar_track_alvar/src/Rotation.cpp index 583305c..3e89b4e 100644 --- a/ar_track_alvar/src/Rotation.cpp +++ b/ar_track_alvar/src/Rotation.cpp @@ -23,363 +23,375 @@ #include "ar_track_alvar/Alvar.h" #include "ar_track_alvar/Rotation.h" +#include using namespace std; -namespace alvar { +namespace alvar +{ using namespace std; Rotation::Rotation() { - cvInitMatHeader(&quaternion_mat, 4, 1, CV_64F, quaternion); - Reset(); + quaternion_mat = cv::Mat(4, 1, CV_64F, quaternion); + Reset(); } -Rotation::Rotation(const Rotation& r) { - cvInitMatHeader(&quaternion_mat, 4, 1, CV_64F, quaternion); - cvCopy(&r.quaternion_mat, &quaternion_mat); +Rotation::Rotation(const Rotation& r) +{ + quaternion_mat = cv::Mat(4, 1, CV_64F); + r.quaternion_mat.copyTo(quaternion_mat); } -Rotation::Rotation(CvMat *data, RotationType t) +Rotation::Rotation(const cv::Mat& data, RotationType t) { - cvInitMatHeader(&quaternion_mat, 4, 1, CV_64F, quaternion); - - Reset(); - - switch (t) - { - case QUAT : - SetQuaternion(data); - break; - case MAT : - SetMatrix(data); - break; - case EUL : - SetEuler(data); - break; - case ROD : - SetRodriques(data); - break; - } + quaternion_mat = cv::Mat(4, 1, CV_64F, quaternion); + + Reset(); + + switch (t) + { + case QUAT: + SetQuaternion(data); + break; + case MAT: + SetMatrix(data); + break; + case EUL: + SetEuler(data); + break; + case ROD: + SetRodriques(data); + break; + } } void Rotation::Transpose() { - double tmp[9]; - CvMat tmp_mat = cvMat(3, 3, CV_64F, tmp); - GetMatrix(&tmp_mat); - cvTranspose(&tmp_mat, &tmp_mat); - SetMatrix(&tmp_mat); + double tmp[9]; + cv::Mat tmp_mat = cv::Mat(3, 3, CV_64F, tmp); + GetMatrix(tmp_mat); + cv::transpose(tmp_mat, tmp_mat); + SetMatrix(tmp_mat); } - -void Rotation::MirrorMat(CvMat *mat, bool x, bool y, bool z) { - CvMat *mat_mul = cvCloneMat(mat); - cvSetIdentity(mat_mul); - if (x) cvmSet(mat_mul, 0, 0, -1); - if (y) cvmSet(mat_mul, 1, 1, -1); - if (z) cvmSet(mat_mul, 2, 2, -1); - cvMatMul(mat_mul, mat, mat); - cvReleaseMat(&mat_mul); + +void Rotation::MirrorMat(cv::Mat& mat, bool x, bool y, bool z) +{ + cv::Mat mat_mul = mat.clone(); + cv::setIdentity(mat_mul); + if (x) + mat_mul.at(0, 0) = -1; + if (y) + mat_mul.at(1, 1) = -1; + if (z) + mat_mul.at(2, 2) = -1; + mat = mat_mul * mat; + mat_mul.release(); } - + void Rotation::Mirror(bool x, bool y, bool z) { - double tmp[9]; - CvMat tmp_mat = cvMat(3, 3, CV_64F, tmp); - GetMatrix(&tmp_mat); - MirrorMat(&tmp_mat, x, y, z); - SetMatrix(&tmp_mat); + double tmp[9]; + cv::Mat tmp_mat = cv::Mat(3, 3, CV_64F, tmp); + GetMatrix(tmp_mat); + MirrorMat(tmp_mat, x, y, z); + SetMatrix(tmp_mat); } void Rotation::Reset() { - cvZero(&quaternion_mat); cvmSet(&quaternion_mat, 0, 0, 1); + quaternion_mat.setTo(cv::Scalar::all(0)); + quaternion_mat.at(0, 0) = 1; } -void Rotation::Mat9ToRod(double *mat, double *rod) +void Rotation::Mat9ToRod(double* mat, double* rod) { - CvMat mat_m, rod_m; - cvInitMatHeader(&mat_m, 3, 3, CV_64F, mat); - cvInitMatHeader(&rod_m, 3, 1, CV_64F, rod); - cv::Rodrigues(cv::cvarrToMat(&mat_m), cv::cvarrToMat(&rod_m)); + cv::Mat mat_m, rod_m; + mat_m = cv::Mat(3, 3, CV_64F, mat); + rod_m = cv::Mat(3, 1, CV_64F, rod); + cv::Rodrigues(mat_m, rod_m); } -void Rotation::RodToMat9(double *rod, double *mat) +void Rotation::RodToMat9(const double* rod, double* mat) { - CvMat mat_m, rod_m; - cvInitMatHeader(&mat_m, 3, 3, CV_64F, mat); - cvInitMatHeader(&rod_m, 3, 1, CV_64F, rod); - cv::Rodrigues(cv::cvarrToMat(&rod_m), cv::cvarrToMat(&mat_m)); + cv::Mat mat_m, rod_m; + mat_m = cv::Mat(3, 3, CV_64F, mat); + rod_m = cv::Mat(3, 1, CV_64F, const_cast(rod)); + cv::Rodrigues(rod_m, mat_m); } -void Rotation::QuatInv(const double *q, double *qi) +void Rotation::QuatInv(const double* q, double* qi) { - qi[0] = q[0]; - qi[1] = -1*q[1]; - qi[2] = -1*q[2]; - qi[3] = -1*q[3]; + qi[0] = q[0]; + qi[1] = -1 * q[1]; + qi[2] = -1 * q[2]; + qi[3] = -1 * q[3]; } -void Rotation::QuatNorm(double *q) +void Rotation::QuatNorm(double* q) { - double l = sqrt(q[0]*q[0] + q[1]*q[1] + q[2]*q[2] + q[3]*q[3] ); - - if(l != 0) - for(unsigned i = 0; i < 4; ++i) - q[i] = q[i] / l; + double l = sqrt(q[0] * q[0] + q[1] * q[1] + q[2] * q[2] + q[3] * q[3]); + + if (l != 0) + for (unsigned i = 0; i < 4; ++i) + q[i] = q[i] / l; } -void Rotation::QuatMul(const double *q1, const double *q2, double *q3) +void Rotation::QuatMul(const double* q1, const double* q2, double* q3) { - double w1 = q1[0]; - double x1 = q1[1]; - double y1 = q1[2]; - double z1 = q1[3]; - - double w2 = q2[0]; - double x2 = q2[1]; - double y2 = q2[2]; - double z2 = q2[3]; - - q3[0] = w1*w2 - x1*x2 - y1*y2 - z1*z2; - q3[1] = w1*x2 + x1*w2 + y1*z2 - z1*y2; - q3[2] = w1*y2 + y1*w2 + z1*x2 - x1*z2; - q3[3] = w1*z2 + z1*w2 + x1*y2 - y1*x2; - - QuatNorm(q3); + double w1 = q1[0]; + double x1 = q1[1]; + double y1 = q1[2]; + double z1 = q1[3]; + + double w2 = q2[0]; + double x2 = q2[1]; + double y2 = q2[2]; + double z2 = q2[3]; + + q3[0] = w1 * w2 - x1 * x2 - y1 * y2 - z1 * z2; + q3[1] = w1 * x2 + x1 * w2 + y1 * z2 - z1 * y2; + q3[2] = w1 * y2 + y1 * w2 + z1 * x2 - x1 * z2; + q3[3] = w1 * z2 + z1 * w2 + x1 * y2 - y1 * x2; + + QuatNorm(q3); } -void Rotation::QuatToMat9(const double *quat, double *mat) +void Rotation::QuatToMat9(const double* quat, double* mat) { - double W = quat[0]; - double X = quat[1]; - double Y = quat[2]; - double Z = quat[3]; - - double xx = X * X; - double xy = X * Y; - double xz = X * Z; - double xw = X * W; - - double yy = Y * Y; - double yz = Y * Z; - double yw = Y * W; - - double zz = Z * Z; - double zw = Z * W; - - mat[0] = 1 - 2 * ( yy + zz ); //(0,0) - mat[1] = 2 * ( xy - zw ); //(0,1) - mat[2] = 2 * ( xz + yw ); //(0,2) - - mat[3] = 2 * ( xy + zw ); //(1,0) - mat[4] = 1 - 2 * ( xx + zz ); //(1,1) - mat[5] = 2 * ( yz - xw ); //(1,2) - - mat[6] = 2 * ( xz - yw ); //(2,0) - mat[7] = 2 * ( yz + xw ); //(2,1) - mat[8] = 1 - 2 * ( xx + yy ); //(2,2) + double W = quat[0]; + double X = quat[1]; + double Y = quat[2]; + double Z = quat[3]; + + double xx = X * X; + double xy = X * Y; + double xz = X * Z; + double xw = X * W; + + double yy = Y * Y; + double yz = Y * Z; + double yw = Y * W; + + double zz = Z * Z; + double zw = Z * W; + + mat[0] = 1 - 2 * (yy + zz); //(0,0) + mat[1] = 2 * (xy - zw); //(0,1) + mat[2] = 2 * (xz + yw); //(0,2) + + mat[3] = 2 * (xy + zw); //(1,0) + mat[4] = 1 - 2 * (xx + zz); //(1,1) + mat[5] = 2 * (yz - xw); //(1,2) + + mat[6] = 2 * (xz - yw); //(2,0) + mat[7] = 2 * (yz + xw); //(2,1) + mat[8] = 1 - 2 * (xx + yy); //(2,2) } -// TODO: Now we don't want to eliminate the translation part from the matrix here. Did this change break something??? -void Rotation::QuatToMat16(const double *quat, double *mat) +// TODO: Now we don't want to eliminate the translation part from the matrix +// here. Did this change break something??? +void Rotation::QuatToMat16(const double* quat, double* mat) { - //memset(mat, 0, 16*sizeof(double)); + // memset(mat, 0, 16*sizeof(double)); - double W = quat[0]; - double X = quat[1]; - double Y = quat[2]; - double Z = quat[3]; + double W = quat[0]; + double X = quat[1]; + double Y = quat[2]; + double Z = quat[3]; - double xx = X * X; - double xy = X * Y; - double xz = X * Z; - double xw = X * W; + double xx = X * X; + double xy = X * Y; + double xz = X * Z; + double xw = X * W; - double yy = Y * Y; - double yz = Y * Z; - double yw = Y * W; + double yy = Y * Y; + double yz = Y * Z; + double yw = Y * W; - double zz = Z * Z; - double zw = Z * W; + double zz = Z * Z; + double zw = Z * W; - mat[0] = 1 - 2 * ( yy + zz ); //(0,0) - mat[1] = 2 * ( xy - zw ); //(0,1) - mat[2] = 2 * ( xz + yw ); //(0,2) + mat[0] = 1 - 2 * (yy + zz); //(0,0) + mat[1] = 2 * (xy - zw); //(0,1) + mat[2] = 2 * (xz + yw); //(0,2) - mat[4] = 2 * ( xy + zw ); //(1,0) - mat[5] = 1 - 2 * ( xx + zz ); //(1,1) - mat[6] = 2 * ( yz - xw ); //(1,2) + mat[4] = 2 * (xy + zw); //(1,0) + mat[5] = 1 - 2 * (xx + zz); //(1,1) + mat[6] = 2 * (yz - xw); //(1,2) - mat[8] = 2 * ( xz - yw ); //(2,0) - mat[9] = 2 * ( yz + xw ); //(2,1) - mat[10] = 1 - 2 * ( xx + yy ); //(2,2) + mat[8] = 2 * (xz - yw); //(2,0) + mat[9] = 2 * (yz + xw); //(2,1) + mat[10] = 1 - 2 * (xx + yy); //(2,2) - //mat[15] = 1; + // mat[15] = 1; } -void Rotation::QuatToEul(const double *q, double *eul) +void Rotation::QuatToEul(const double* q, double* eul) { - double qw = q[0]; - double qx = q[1]; - double qy = q[2]; - double qz = q[3]; - - double heading = 0, bank = 0, attitude = 0; - - if ((2*qx*qy + 2*qz*qw) == 1.0) - { - heading = 2 * atan2(qx,qw); - bank = 0; - } - else if ((2*qx*qy + 2*qz*qw) == -1.0) - { - heading = -2 * atan2(qx,qw); - bank = 0; - } - else - { - heading = atan2(2*qy*qw-2*qx*qz , 1 - 2*qy*qy - 2*qz*qz); - bank = atan2(2*qx*qw-2*qy*qz , 1 - 2*qx*qx - 2*qz*qz); - } - - attitude = asin(2*qx*qy + 2*qz*qw); - - heading = 180.0 * heading / PI; - attitude = 180.0 * attitude / PI; - bank = 180.0 * bank / PI; - - eul[0] = heading; - eul[1] = attitude; - eul[2] = bank; + double qw = q[0]; + double qx = q[1]; + double qy = q[2]; + double qz = q[3]; + + double heading = 0, bank = 0, attitude = 0; + + if ((2 * qx * qy + 2 * qz * qw) == 1.0) + { + heading = 2 * atan2(qx, qw); + bank = 0; + } + else if ((2 * qx * qy + 2 * qz * qw) == -1.0) + { + heading = -2 * atan2(qx, qw); + bank = 0; + } + else + { + heading = atan2(2 * qy * qw - 2 * qx * qz, 1 - 2 * qy * qy - 2 * qz * qz); + bank = atan2(2 * qx * qw - 2 * qy * qz, 1 - 2 * qx * qx - 2 * qz * qz); + } + + attitude = asin(2 * qx * qy + 2 * qz * qw); + + heading = 180.0 * heading / PI; + attitude = 180.0 * attitude / PI; + bank = 180.0 * bank / PI; + + eul[0] = heading; + eul[1] = attitude; + eul[2] = bank; } -void Rotation::Mat9ToQuat(const double *mat, double *quat) +void Rotation::Mat9ToQuat(const double* mat, double* quat) { - quat[0] = sqrt(max(0., 1 + mat[0] + mat[4] + mat[8])) / 2.0; // w - quat[1] = sqrt(max(0., 1 + mat[0] - mat[4] - mat[8])) / 2.0; // x - quat[2] = sqrt(max(0., 1 - mat[0] + mat[4] - mat[8])) / 2.0; // y - quat[3] = sqrt(max(0., 1 - mat[0] - mat[4] + mat[8])) / 2.0; // z + quat[0] = sqrt(max(0., 1 + mat[0] + mat[4] + mat[8])) / 2.0; // w + quat[1] = sqrt(max(0., 1 + mat[0] - mat[4] - mat[8])) / 2.0; // x + quat[2] = sqrt(max(0., 1 - mat[0] + mat[4] - mat[8])) / 2.0; // y + quat[3] = sqrt(max(0., 1 - mat[0] - mat[4] + mat[8])) / 2.0; // z - quat[1] = quat[1]*Sign(mat[7] - mat[5]); // x - quat[2] = quat[2]*Sign(mat[2] - mat[6]); // y - quat[3] = quat[3]*Sign(mat[3] - mat[1]); // z + quat[1] = quat[1] * Sign(mat[7] - mat[5]); // x + quat[2] = quat[2] * Sign(mat[2] - mat[6]); // y + quat[3] = quat[3] * Sign(mat[3] - mat[1]); // z - QuatNorm(quat); + QuatNorm(quat); } -void Rotation::EulToQuat(const double *eul, double *quat) +void Rotation::EulToQuat(const double* eul, double* quat) { - double heading = PI*eul[0]/180.0; - double attitude = PI*eul[1]/180.0; - double bank = PI*eul[2]/180.0; - - double c1 = cos(heading/2.0); - double s1 = sin(heading/2.0); - double c2 = cos(attitude/2.0); - double s2 = sin(attitude/2.0); - double c3 = cos(bank/2.0); - double s3 = sin(bank/2.0); - double c1c2 = c1*c2; - double s1s2 = s1*s2; - - quat[0] = c1c2*c3 - s1s2*s3; - quat[1] = c1c2*s3 + s1s2*c3; - quat[2] = s1*c2*c3 + c1*s2*s3; - quat[3] = c1*s2*c3 - s1*c2*s3; - - QuatNorm(quat); + double heading = PI * eul[0] / 180.0; + double attitude = PI * eul[1] / 180.0; + double bank = PI * eul[2] / 180.0; + + double c1 = cos(heading / 2.0); + double s1 = sin(heading / 2.0); + double c2 = cos(attitude / 2.0); + double s2 = sin(attitude / 2.0); + double c3 = cos(bank / 2.0); + double s3 = sin(bank / 2.0); + double c1c2 = c1 * c2; + double s1s2 = s1 * s2; + + quat[0] = c1c2 * c3 - s1s2 * s3; + quat[1] = c1c2 * s3 + s1s2 * c3; + quat[2] = s1 * c2 * c3 + c1 * s2 * s3; + quat[3] = c1 * s2 * c3 - s1 * c2 * s3; + + QuatNorm(quat); } -void Rotation::SetQuaternion(CvMat *mat) +void Rotation::SetQuaternion(const cv::Mat& mat) { - cvCopy(mat, &quaternion_mat); - QuatNorm(quaternion); + mat.copyTo(quaternion_mat); + QuatNorm(quaternion); } -void Rotation::SetQuaternion(const double *quat) +void Rotation::SetQuaternion(const double* quat) { - quaternion[0] = quat[0]; - quaternion[1] = quat[1]; - quaternion[2] = quat[2]; - quaternion[3] = quat[3]; - QuatNorm(quaternion); + quaternion[0] = quat[0]; + quaternion[1] = quat[1]; + quaternion[2] = quat[2]; + quaternion[3] = quat[3]; + QuatNorm(quaternion); } -void Rotation::SetEuler(const CvMat *mat) +void Rotation::SetEuler(const cv::Mat& mat) { - EulToQuat(mat->data.db, quaternion); + EulToQuat(mat.ptr(0), quaternion); } -void Rotation::SetRodriques(const CvMat *mat) +void Rotation::SetRodriques(const cv::Mat& mat) { - double tmp[9]; - RodToMat9(mat->data.db, tmp); - Mat9ToQuat(tmp, quaternion); + double tmp[9]; + RodToMat9(mat.ptr(0), tmp); + Mat9ToQuat(tmp, quaternion); } -void Rotation::SetMatrix(const CvMat *mat) +void Rotation::SetMatrix(const cv::Mat& mat) { - double tmp[9]; - for(int i = 0; i < 3; ++i) - for(int j = 0; j < 3; ++j) - tmp[i*3+j] = cvmGet(mat, i, j); + double tmp[9]; + for (int i = 0; i < 3; ++i) + for (int j = 0; j < 3; ++j) + tmp[i * 3 + j] = mat.at(i, j); - Mat9ToQuat(tmp, quaternion); + Mat9ToQuat(tmp, quaternion); } -void Rotation::GetMatrix(CvMat *mat) const +void Rotation::GetMatrix(cv::Mat& mat) const { - if (mat->width == 3) { - QuatToMat9(quaternion, mat->data.db); - } else if (mat->width == 4) { - cvSetIdentity(mat); - QuatToMat16(quaternion, mat->data.db); - } + if (mat.cols == 3) + { + QuatToMat9(quaternion, mat.ptr(0)); + } + else if (mat.cols == 4) + { + cv::setIdentity(mat); + QuatToMat16(quaternion, mat.ptr(0)); + } } -void Rotation::GetRodriques(CvMat *mat) const +void Rotation::GetRodriques(cv::Mat& mat) const { - double tmp[9]; - QuatToMat9(quaternion, tmp); - Mat9ToRod(tmp, mat->data.db); + double tmp[9]; + QuatToMat9(quaternion, tmp); + Mat9ToRod(tmp, mat.ptr(0)); } -void Rotation::GetEuler(CvMat *mat) const +void Rotation::GetEuler(cv::Mat& mat) const { - QuatToEul(quaternion, mat->data.db); + QuatToEul(quaternion, mat.ptr(0)); } -void Rotation::GetQuaternion(CvMat *mat) const +void Rotation::GetQuaternion(cv::Mat& mat) const { - cvCopy(&quaternion_mat, mat); + quaternion_mat.copyTo(mat); } // TODO: This is not needed??? -inline Rotation& Rotation::operator = (const Rotation& r) +inline Rotation& Rotation::operator=(const Rotation& r) { - memcpy(quaternion, r.quaternion, 4*sizeof(double)); - return *this; + memcpy(quaternion, r.quaternion, 4 * sizeof(double)); + return *this; } -inline Rotation& Rotation::operator += (const Rotation& r) +inline Rotation& Rotation::operator+=(const Rotation& r) { - Rotation res; - QuatMul(quaternion, r.quaternion, res.quaternion); - memcpy(quaternion, res.quaternion, 4*sizeof(double)); - //x += v.x; - //y += v.y; - //z += v.z; - - return *this; + Rotation res; + QuatMul(quaternion, r.quaternion, res.quaternion); + memcpy(quaternion, res.quaternion, 4 * sizeof(double)); + // x += v.x; + // y += v.y; + // z += v.z; + + return *this; } -inline Rotation operator + (const Rotation& r1, const Rotation& r2) +inline Rotation operator+(const Rotation& r1, const Rotation& r2) { - Rotation ret = r1; - ret += r2; + Rotation ret = r1; + ret += r2; - return ret; + return ret; } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/SampleCamCalib.cpp b/ar_track_alvar/src/SampleCamCalib.cpp index 5b76e11..d76f0f3 100644 --- a/ar_track_alvar/src/SampleCamCalib.cpp +++ b/ar_track_alvar/src/SampleCamCalib.cpp @@ -5,182 +5,230 @@ using namespace alvar; using namespace std; -const int calib_count_max=50; -const int etalon_rows=6; -const int etalon_columns=8; +const int calib_count_max = 50; +const int etalon_rows = 6; +const int etalon_columns = 8; std::stringstream calibrationFilename; -void videocallback(IplImage *image) +void videocallback(IplImage* image) { - static bool calibrated = false; - static int calib_count=0; - static Camera cam; - static ProjPoints pp; - static int64 prev_tick=0; - static bool initialized = false; - - if (!initialized) { - cam.SetRes(image->width, image->height); - prev_tick = cvGetTickCount(); - initialized = true; + static bool calibrated = false; + static int calib_count = 0; + static Camera cam; + static ProjPoints pp; + static int64 prev_tick = 0; + static bool initialized = false; + + if (!initialized) + { + cam.SetRes(image->width, image->height); + prev_tick = cvGetTickCount(); + initialized = true; + } + + bool flip_image = (image->origin ? true : false); + if (flip_image) + { + cvFlip(image); + image->origin = !image->origin; + } + + assert(image); + if (!calibrated) + { + // If we have already collected enough data to make the calibration + // - We are ready to end the capture loop + // - Calibrate + // - Save the calibration file + if (calib_count >= calib_count_max) + { + std::cout << "Calibrating..." << endl; + calib_count = 0; + cam.Calibrate(pp); + pp.Reset(); + cam.SaveCalib(calibrationFilename.str().c_str()); + std::cout << "Saving calibration: " << calibrationFilename.str() << endl; + calibrated = true; } - - bool flip_image = (image->origin?true:false); - if (flip_image) { - cvFlip(image); - image->origin = !image->origin; - } - - assert(image); - if (!calibrated) { - // If we have already collected enough data to make the calibration - // - We are ready to end the capture loop - // - Calibrate - // - Save the calibration file - if (calib_count >= calib_count_max) { - std::cout<<"Calibrating..."< (cvGetTickFrequency() * 1000 * 1000 * 1.5)) { - if (pp.AddPointsUsingChessboard(image, 2.8, etalon_rows, etalon_columns, true)) { - prev_tick = tick; - calib_count++; - cout< (cvGetTickFrequency() * 1000 * 1000 * 1.5)) + { + if (pp.AddPointsUsingChessboard(image, 2.8, etalon_rows, etalon_columns, + true)) + { + prev_tick = tick; + calib_count++; + cout << calib_count << "/" << calib_count_max << endl; } + } } - - if (flip_image) { - cvFlip(image); - image->origin = !image->origin; + } + else + { + if (pp.AddPointsUsingChessboard(image, 2.5, etalon_rows, etalon_columns, + true)) + { + Pose pose; + cam.CalcExteriorOrientation(pp.object_points, pp.image_points, &pose); + cam.ProjectPoints(pp.object_points, &pose, pp.image_points); + for (size_t i = 0; i < pp.image_points.size(); i++) + { + cvCircle(image, + cvPoint((int)pp.image_points[i].x, (int)pp.image_points[i].y), + 6, CV_RGB(0, 0, 255)); + } + pp.Reset(); } + } + + if (flip_image) + { + cvFlip(image); + image->origin = !image->origin; + } } -int main(int argc, char *argv[]) +int main(int argc, char* argv[]) { - try { - // Output usage message - std::string filename(argv[0]); - filename = filename.substr(filename.find_last_of('\\') + 1); - std::cout << "SampleCamCalib" << std::endl; - std::cout << "==============" << std::endl; - std::cout << std::endl; - std::cout << "Description:" << std::endl; - std::cout << " This is an example of how to use the 'Camera' and 'ProjPoints' classes" << std::endl; - std::cout << " to perform camera calibration. Point the camera to the chessboard" << std::endl; - std::cout << " calibration pattern (see ALVAR.pdf) from several directions until 50" << std::endl; - std::cout << " calibration images are collected. A 'calib.xml' file that contains the" << std::endl; - std::cout << " internal parameters of the camera is generated and can be used by other" << std::endl; - std::cout << " applications that require a calibrated camera." << std::endl; - std::cout << std::endl; - std::cout << "Usage:" << std::endl; - std::cout << " " << filename << " [device]" << std::endl; - std::cout << std::endl; - std::cout << " device integer selecting device from enumeration list (default 0)" << std::endl; - std::cout << " highgui capture devices are prefered" << std::endl; - std::cout << std::endl; - std::cout << "Keyboard Shortcuts:" << std::endl; - std::cout << " q: quit" << std::endl; - std::cout << std::endl; - - // Initialise CvTestbed - CvTestbed::Instance().SetVideoCallback(videocallback); - - // Enumerate possible capture plugins - CaptureFactory::CapturePluginVector plugins = CaptureFactory::instance()->enumeratePlugins(); - if (plugins.size() < 1) { - std::cout << "Could not find any capture plugins." << std::endl; - return 0; - } - - // Display capture plugins - std::cout << "Available Plugins: "; - outputEnumeratedPlugins(plugins); - std::cout << std::endl; + try + { + // Output usage message + std::string filename(argv[0]); + filename = filename.substr(filename.find_last_of('\\') + 1); + std::cout << "SampleCamCalib" << std::endl; + std::cout << "==============" << std::endl; + std::cout << std::endl; + std::cout << "Description:" << std::endl; + std::cout << " This is an example of how to use the 'Camera' and " + "'ProjPoints' classes" + << std::endl; + std::cout << " to perform camera calibration. Point the camera to the " + "chessboard" + << std::endl; + std::cout << " calibration pattern (see ALVAR.pdf) from several " + "directions until 50" + << std::endl; + std::cout << " calibration images are collected. A 'calib.xml' file that " + "contains the" + << std::endl; + std::cout << " internal parameters of the camera is generated and can be " + "used by other" + << std::endl; + std::cout << " applications that require a calibrated camera." + << std::endl; + std::cout << std::endl; + std::cout << "Usage:" << std::endl; + std::cout << " " << filename << " [device]" << std::endl; + std::cout << std::endl; + std::cout << " device integer selecting device from enumeration list " + "(default 0)" + << std::endl; + std::cout << " highgui capture devices are prefered" + << std::endl; + std::cout << std::endl; + std::cout << "Keyboard Shortcuts:" << std::endl; + std::cout << " q: quit" << std::endl; + std::cout << std::endl; + + // Initialise CvTestbed + CvTestbed::Instance().SetVideoCallback(videocallback); + + // Enumerate possible capture plugins + CaptureFactory::CapturePluginVector plugins = + CaptureFactory::instance()->enumeratePlugins(); + if (plugins.size() < 1) + { + std::cout << "Could not find any capture plugins." << std::endl; + return 0; + } - // Enumerate possible capture devices - CaptureFactory::CaptureDeviceVector devices = CaptureFactory::instance()->enumerateDevices(); - if (devices.size() < 1) { - std::cout << "Could not find any capture devices." << std::endl; - return 0; - } + // Display capture plugins + std::cout << "Available Plugins: "; + outputEnumeratedPlugins(plugins); + std::cout << std::endl; + + // Enumerate possible capture devices + CaptureFactory::CaptureDeviceVector devices = + CaptureFactory::instance()->enumerateDevices(); + if (devices.size() < 1) + { + std::cout << "Could not find any capture devices." << std::endl; + return 0; + } - // Check command line argument for which device to use - int selectedDevice = defaultDevice(devices); - if (argc > 1) { - selectedDevice = atoi(argv[1]); - } - if (selectedDevice >= (int)devices.size()) { - selectedDevice = defaultDevice(devices); - } - - // Display capture devices - std::cout << "Enumerated Capture Devices:" << std::endl; - outputEnumeratedDevices(devices, selectedDevice); - std::cout << std::endl; - - // Create capture object from camera - Capture *cap = CaptureFactory::instance()->createCapture(devices[selectedDevice]); - std::string uniqueName = devices[selectedDevice].uniqueName(); - - // Handle capture lifecycle and start video capture - // Note that loadSettings/saveSettings are not supported by all plugins - if (cap) { - std::stringstream settingsFilename; - settingsFilename << "camera_settings_" << uniqueName << ".xml"; - calibrationFilename << "camera_calibration_" << uniqueName << ".xml"; - - cap->start(); - cap->setResolution(640, 480); - - if (cap->loadSettings(settingsFilename.str())) { - std::cout << "Loading settings: " << settingsFilename.str() << std::endl; - } - - std::stringstream title; - title << "SampleCamCalib (" << cap->captureDevice().captureType() << ")"; - - CvTestbed::Instance().StartVideo(cap, title.str().c_str()); - - if (cap->saveSettings(settingsFilename.str())) { - std::cout << "Saving settings: " << settingsFilename.str() << std::endl; - } - - cap->stop(); - delete cap; - } - else if (CvTestbed::Instance().StartVideo(0, argv[0])) { - } - else { - std::cout << "Could not initialize the selected capture backend." << std::endl; - } + // Check command line argument for which device to use + int selectedDevice = defaultDevice(devices); + if (argc > 1) + { + selectedDevice = atoi(argv[1]); + } + if (selectedDevice >= (int)devices.size()) + { + selectedDevice = defaultDevice(devices); + } - return 0; + // Display capture devices + std::cout << "Enumerated Capture Devices:" << std::endl; + outputEnumeratedDevices(devices, selectedDevice); + std::cout << std::endl; + + // Create capture object from camera + Capture* cap = + CaptureFactory::instance()->createCapture(devices[selectedDevice]); + std::string uniqueName = devices[selectedDevice].uniqueName(); + + // Handle capture lifecycle and start video capture + // Note that loadSettings/saveSettings are not supported by all plugins + if (cap) + { + std::stringstream settingsFilename; + settingsFilename << "camera_settings_" << uniqueName << ".xml"; + calibrationFilename << "camera_calibration_" << uniqueName << ".xml"; + + cap->start(); + cap->setResolution(640, 480); + + if (cap->loadSettings(settingsFilename.str())) + { + std::cout << "Loading settings: " << settingsFilename.str() + << std::endl; + } + + std::stringstream title; + title << "SampleCamCalib (" << cap->captureDevice().captureType() << ")"; + + CvTestbed::Instance().StartVideo(cap, title.str().c_str()); + + if (cap->saveSettings(settingsFilename.str())) + { + std::cout << "Saving settings: " << settingsFilename.str() << std::endl; + } + + cap->stop(); + delete cap; } - catch (const std::exception &e) { - std::cout << "Exception: " << e.what() << endl; + else if (CvTestbed::Instance().StartVideo(0, argv[0])) + { } - catch (...) { - std::cout << "Exception: unknown" << std::endl; + else + { + std::cout << "Could not initialize the selected capture backend." + << std::endl; } + + return 0; + } + catch (const std::exception& e) + { + std::cout << "Exception: " << e.what() << endl; + } + catch (...) + { + std::cout << "Exception: unknown" << std::endl; + } } diff --git a/ar_track_alvar/src/SampleCvTestbed.cpp b/ar_track_alvar/src/SampleCvTestbed.cpp index f915dce..c88c7b7 100644 --- a/ar_track_alvar/src/SampleCvTestbed.cpp +++ b/ar_track_alvar/src/SampleCvTestbed.cpp @@ -3,128 +3,163 @@ using namespace alvar; using namespace std; -void videocallback(IplImage *image) +void videocallback(IplImage* image) { - static IplImage *img_gray=NULL; + static IplImage* img_gray = NULL; + + assert(image); + if (img_gray == NULL) + { + // Following image is toggled visible using key '0' + img_gray = + CvTestbed::Instance().CreateImageWithProto("Grayscale", image, 0, 1); + } + if (image->nChannels > 1) + { + cvCvtColor(image, img_gray, CV_RGB2GRAY); + } + else + { + cvCopy(image, img_gray); + } + // TODO: Do your image operations +} - assert(image); - if (img_gray == NULL) { - // Following image is toggled visible using key '0' - img_gray = CvTestbed::Instance().CreateImageWithProto("Grayscale", image, 0, 1); +int main(int argc, char* argv[]) +{ + try + { + // Output usage message + std::string filename(argv[0]); + filename = filename.substr(filename.find_last_of('\\') + 1); + std::cout << "SampleCvTestbed" << std::endl; + std::cout << "===============" << std::endl; + std::cout << std::endl; + std::cout << "Description:" << std::endl; + std::cout << " This is an example of how to use the 'CvTestbed', " + "'CaptureFactory' and" + << std::endl; + std::cout << " 'Capture' classes. The 'CaptureFactory' can create " + "'Capture' objects" + << std::endl; + std::cout << " from many different backends (see SampleCvTestbed.cpp). " + "You can also" + << std::endl; + std::cout << " show/hide the 1st ten images created using 'CvTestbed' " + "using the number" + << std::endl; + std::cout << " keys. In this example you can use key '0' to show/hide a " + "grayscale" + << std::endl; + std::cout << " version of the captured image." << std::endl; + std::cout << std::endl; + std::cout << "Usage:" << std::endl; + std::cout << " " << filename << " [device]" << std::endl; + std::cout << std::endl; + std::cout << " device integer selecting device from enumeration list " + "(default 0)" + << std::endl; + std::cout << " highgui capture devices are prefered" + << std::endl; + std::cout << std::endl; + std::cout << "Keyboard Shortcuts:" << std::endl; + std::cout << " 0: show/hide grayscale image" << std::endl; + std::cout << " q: quit" << std::endl; + std::cout << std::endl; + + // Initialise CvTestbed + CvTestbed::Instance().SetVideoCallback(videocallback); + + // Enumerate possible capture plugins + CaptureFactory::CapturePluginVector plugins = + CaptureFactory::instance()->enumeratePlugins(); + if (plugins.size() < 1) + { + std::cout << "Could not find any capture plugins." << std::endl; + return 0; } - if (image->nChannels > 1) { - cvCvtColor(image, img_gray, CV_RGB2GRAY); - } else { - cvCopy(image, img_gray); + + // Display capture plugins + std::cout << "Available Plugins: "; + outputEnumeratedPlugins(plugins); + std::cout << std::endl; + + // Enumerate possible capture devices + CaptureFactory::CaptureDeviceVector devices = + CaptureFactory::instance()->enumerateDevices(); + if (devices.size() < 1) + { + std::cout << "Could not find any capture devices." << std::endl; + return 0; } - // TODO: Do your image operations -} -int main(int argc, char *argv[]) -{ - try { - // Output usage message - std::string filename(argv[0]); - filename = filename.substr(filename.find_last_of('\\') + 1); - std::cout << "SampleCvTestbed" << std::endl; - std::cout << "===============" << std::endl; - std::cout << std::endl; - std::cout << "Description:" << std::endl; - std::cout << " This is an example of how to use the 'CvTestbed', 'CaptureFactory' and" << std::endl; - std::cout << " 'Capture' classes. The 'CaptureFactory' can create 'Capture' objects" << std::endl; - std::cout << " from many different backends (see SampleCvTestbed.cpp). You can also" << std::endl; - std::cout << " show/hide the 1st ten images created using 'CvTestbed' using the number" << std::endl; - std::cout << " keys. In this example you can use key '0' to show/hide a grayscale" << std::endl; - std::cout << " version of the captured image." << std::endl; - std::cout << std::endl; - std::cout << "Usage:" << std::endl; - std::cout << " " << filename << " [device]" << std::endl; - std::cout << std::endl; - std::cout << " device integer selecting device from enumeration list (default 0)" << std::endl; - std::cout << " highgui capture devices are prefered" << std::endl; - std::cout << std::endl; - std::cout << "Keyboard Shortcuts:" << std::endl; - std::cout << " 0: show/hide grayscale image" << std::endl; - std::cout << " q: quit" << std::endl; - std::cout << std::endl; - - // Initialise CvTestbed - CvTestbed::Instance().SetVideoCallback(videocallback); - - // Enumerate possible capture plugins - CaptureFactory::CapturePluginVector plugins = CaptureFactory::instance()->enumeratePlugins(); - if (plugins.size() < 1) { - std::cout << "Could not find any capture plugins." << std::endl; - return 0; - } - - // Display capture plugins - std::cout << "Available Plugins: "; - outputEnumeratedPlugins(plugins); - std::cout << std::endl; - - // Enumerate possible capture devices - CaptureFactory::CaptureDeviceVector devices = CaptureFactory::instance()->enumerateDevices(); - if (devices.size() < 1) { - std::cout << "Could not find any capture devices." << std::endl; - return 0; - } - - // Check command line argument for which device to use - int selectedDevice = defaultDevice(devices); - if (argc > 1) { - selectedDevice = atoi(argv[1]); - } - if (selectedDevice >= (int)devices.size()) { - selectedDevice = defaultDevice(devices); - } - - // Display capture devices - std::cout << "Enumerated Capture Devices:" << std::endl; - outputEnumeratedDevices(devices, selectedDevice); - std::cout << std::endl; - - // Create capture object from camera - Capture *cap = CaptureFactory::instance()->createCapture(devices[selectedDevice]); - std::string uniqueName = devices[selectedDevice].uniqueName(); - - // Handle capture lifecycle and start video capture - // Note that loadSettings/saveSettings are not supported by all plugins - if (cap) { - std::stringstream settingsFilename; - settingsFilename << "camera_settings_" << uniqueName << ".xml"; - - cap->start(); - cap->setResolution(640, 480); - - if (cap->loadSettings(settingsFilename.str())) { - std::cout << "Loading settings: " << settingsFilename.str() << std::endl; - } - - std::stringstream title; - title << "SampleCvTestbed (" << cap->captureDevice().captureType() << ")"; - - CvTestbed::Instance().StartVideo(cap, title.str().c_str()); - - if (cap->saveSettings(settingsFilename.str())) { - std::cout << "Saving settings: " << settingsFilename.str() << std::endl; - } - - cap->stop(); - delete cap; - } - else if (CvTestbed::Instance().StartVideo(0, argv[0])) { - } - else { - std::cout << "Could not initialize the selected capture backend." << std::endl; - } - - return 0; + // Check command line argument for which device to use + int selectedDevice = defaultDevice(devices); + if (argc > 1) + { + selectedDevice = atoi(argv[1]); } - catch (const std::exception &e) { - std::cout << "Exception: " << e.what() << endl; + if (selectedDevice >= (int)devices.size()) + { + selectedDevice = defaultDevice(devices); } - catch (...) { - std::cout << "Exception: unknown" << std::endl; + + // Display capture devices + std::cout << "Enumerated Capture Devices:" << std::endl; + outputEnumeratedDevices(devices, selectedDevice); + std::cout << std::endl; + + // Create capture object from camera + Capture* cap = + CaptureFactory::instance()->createCapture(devices[selectedDevice]); + std::string uniqueName = devices[selectedDevice].uniqueName(); + + // Handle capture lifecycle and start video capture + // Note that loadSettings/saveSettings are not supported by all plugins + if (cap) + { + std::stringstream settingsFilename; + settingsFilename << "camera_settings_" << uniqueName << ".xml"; + + cap->start(); + cap->setResolution(640, 480); + + if (cap->loadSettings(settingsFilename.str())) + { + std::cout << "Loading settings: " << settingsFilename.str() + << std::endl; + } + + std::stringstream title; + title << "SampleCvTestbed (" << cap->captureDevice().captureType() << ")"; + + CvTestbed::Instance().StartVideo(cap, title.str().c_str()); + + if (cap->saveSettings(settingsFilename.str())) + { + std::cout << "Saving settings: " << settingsFilename.str() << std::endl; + } + + cap->stop(); + delete cap; + } + else if (CvTestbed::Instance().StartVideo(0, argv[0])) + { } + else + { + std::cout << "Could not initialize the selected capture backend." + << std::endl; + } + + return 0; + } + catch (const std::exception& e) + { + std::cout << "Exception: " << e.what() << endl; + } + catch (...) + { + std::cout << "Exception: unknown" << std::endl; + } } diff --git a/ar_track_alvar/src/SampleFilter.cpp b/ar_track_alvar/src/SampleFilter.cpp index c575540..fd99d96 100644 --- a/ar_track_alvar/src/SampleFilter.cpp +++ b/ar_track_alvar/src/SampleFilter.cpp @@ -10,246 +10,289 @@ using namespace alvar; using namespace std; -const int res=320; +const int res = 320; -void filter_none(double x, double y, double *fx, double *fy) { - *fx = x; *fy = y; +void filter_none(double x, double y, double* fx, double* fy) +{ + *fx = x; + *fy = y; } -void filter_average(double x, double y, double *fx, double *fy) { - static FilterAverage ax(30); - static FilterAverage ay(30); - *fx = ax.next(x); - *fy = ay.next(y); +void filter_average(double x, double y, double* fx, double* fy) +{ + static FilterAverage ax(30); + static FilterAverage ay(30); + *fx = ax.next(x); + *fy = ay.next(y); } -void filter_median(double x, double y, double *fx, double *fy) { - static FilterMedian ax(30); - static FilterMedian ay(30); - *fx = ax.next(x); - *fy = ay.next(y); +void filter_median(double x, double y, double* fx, double* fy) +{ + static FilterMedian ax(30); + static FilterMedian ay(30); + *fx = ax.next(x); + *fy = ay.next(y); } -void filter_running_average(double x, double y, double *fx, double *fy) { - static FilterRunningAverage ax(0.03); - static FilterRunningAverage ay(0.03); - *fx = ax.next(x); - *fy = ay.next(y); +void filter_running_average(double x, double y, double* fx, double* fy) +{ + static FilterRunningAverage ax(0.03); + static FilterRunningAverage ay(0.03); + *fx = ax.next(x); + *fy = ay.next(y); } -void filter_des(double x, double y, double *fx, double *fy) { - static FilterDoubleExponentialSmoothing ax(0.03,0.01); - static FilterDoubleExponentialSmoothing ay(0.03,0.01); - *fx = ax.next(x); - *fy = ay.next(y); +void filter_des(double x, double y, double* fx, double* fy) +{ + static FilterDoubleExponentialSmoothing ax(0.03, 0.01); + static FilterDoubleExponentialSmoothing ay(0.03, 0.01); + *fx = ax.next(x); + *fy = ay.next(y); } -void filter_kalman(double x, double y, double *fx, double *fy) { - static bool init=true; - static KalmanSensor sensor(4,2); - static Kalman kalman(4); // x, y, dx, dy - if (init) { - init = false; - // H - cvZero(sensor.H); - cvmSet(sensor.H, 0, 0, 1); - cvmSet(sensor.H, 1, 1, 1); - // R - cvSetIdentity(sensor.R, cvScalar(10)); - // F - cvSetIdentity(kalman.F); - cvmSet(kalman.F, 0, 2, 1); - cvmSet(kalman.F, 1, 3, 1); - // Q - cvmSet(kalman.Q, 0, 0, 0.0001); - cvmSet(kalman.Q, 1, 1, 0.0001); - cvmSet(kalman.Q, 2, 2, 0.000001); - cvmSet(kalman.Q, 3, 3, 0.000001); - // P - cvSetIdentity(kalman.P, cvScalar(100)); - } - cvmSet(sensor.z, 0, 0, x); - cvmSet(sensor.z, 1, 0, y); - kalman.predict_update(&sensor, (unsigned long)(cv::getTickCount() / cv::getTickFrequency() * 1000)); - *fx = cvmGet(kalman.x, 0, 0); - *fy = cvmGet(kalman.x, 1, 0); +void filter_kalman(double x, double y, double* fx, double* fy) +{ + static bool init = true; + static KalmanSensor sensor(4, 2); + static Kalman kalman(4); // x, y, dx, dy + if (init) + { + init = false; + // H + cvZero(sensor.H); + cvmSet(sensor.H, 0, 0, 1); + cvmSet(sensor.H, 1, 1, 1); + // R + cvSetIdentity(sensor.R, cvScalar(10)); + // F + cvSetIdentity(kalman.F); + cvmSet(kalman.F, 0, 2, 1); + cvmSet(kalman.F, 1, 3, 1); + // Q + cvmSet(kalman.Q, 0, 0, 0.0001); + cvmSet(kalman.Q, 1, 1, 0.0001); + cvmSet(kalman.Q, 2, 2, 0.000001); + cvmSet(kalman.Q, 3, 3, 0.000001); + // P + cvSetIdentity(kalman.P, cvScalar(100)); + } + cvmSet(sensor.z, 0, 0, x); + cvmSet(sensor.z, 1, 0, y); + kalman.predict_update( + &sensor, + (unsigned long)(cv::getTickCount() / cv::getTickFrequency() * 1000)); + *fx = cvmGet(kalman.x, 0, 0); + *fy = cvmGet(kalman.x, 1, 0); } -void filter_array_average(double x, double y, double *fx, double *fy) { - static bool init=true; - static FilterArray fa(2); - if (init) { - init=false; - for (int i=0; i<2; i++) { - fa[i].setWindowSize(30); - } +void filter_array_average(double x, double y, double* fx, double* fy) +{ + static bool init = true; + static FilterArray fa(2); + if (init) + { + init = false; + for (int i = 0; i < 2; i++) + { + fa[i].setWindowSize(30); } - *fx = fa[0].next(x); - *fy = fa[1].next(y); + } + *fx = fa[0].next(x); + *fy = fa[1].next(y); } -class KalmanSensorOwn : public KalmanSensorEkf { - virtual void h(CvMat *x_pred, CvMat *_z_pred) { - double x = cvmGet(x_pred, 0, 0); - double y = cvmGet(x_pred, 1, 0); - double dx = cvmGet(x_pred, 2, 0); - double dy = cvmGet(x_pred, 3, 0); - cvmSet(_z_pred, 0, 0, x); - cvmSet(_z_pred, 1, 0, y); - } +class KalmanSensorOwn : public KalmanSensorEkf +{ + virtual void h(CvMat* x_pred, CvMat* _z_pred) + { + double x = cvmGet(x_pred, 0, 0); + double y = cvmGet(x_pred, 1, 0); + double dx = cvmGet(x_pred, 2, 0); + double dy = cvmGet(x_pred, 3, 0); + cvmSet(_z_pred, 0, 0, x); + cvmSet(_z_pred, 1, 0, y); + } + public: - KalmanSensorOwn(int _n, int _m) : KalmanSensorEkf(_n, _m) {} + KalmanSensorOwn(int _n, int _m) : KalmanSensorEkf(_n, _m) + { + } }; -class KalmanOwn : public KalmanEkf { - virtual void f(CvMat *_x, CvMat *_x_pred, double dt) { - double x = cvmGet(_x, 0, 0); - double y = cvmGet(_x, 1, 0); - double dx = cvmGet(_x, 2, 0); - double dy = cvmGet(_x, 3, 0); - cvmSet(_x_pred, 0, 0, x + dt*dx); - cvmSet(_x_pred, 1, 0, y + dt*dy); - cvmSet(_x_pred, 2, 0, dx); - cvmSet(_x_pred, 3, 0, dy); - } +class KalmanOwn : public KalmanEkf +{ + virtual void f(CvMat* _x, CvMat* _x_pred, double dt) + { + double x = cvmGet(_x, 0, 0); + double y = cvmGet(_x, 1, 0); + double dx = cvmGet(_x, 2, 0); + double dy = cvmGet(_x, 3, 0); + cvmSet(_x_pred, 0, 0, x + dt * dx); + cvmSet(_x_pred, 1, 0, y + dt * dy); + cvmSet(_x_pred, 2, 0, dx); + cvmSet(_x_pred, 3, 0, dy); + } + public: - KalmanOwn(int _n) : KalmanEkf(_n) {} + KalmanOwn(int _n) : KalmanEkf(_n) + { + } }; -void filter_ekf(double x, double y, double *fx, double *fy) { - static bool init=true; - static KalmanSensorOwn sensor(4,2); - static KalmanOwn kalman(4); // x, y, dx, dy - if (init) { - init = false; - // R - cvSetIdentity(sensor.R, cvScalar(100)); - // Q - cvmSet(kalman.Q, 0, 0, 0.001); - cvmSet(kalman.Q, 1, 1, 0.001); - cvmSet(kalman.Q, 2, 2, 0.01); - cvmSet(kalman.Q, 3, 3, 0.01); - // P - cvSetIdentity(kalman.P, cvScalar(100)); - } - cvmSet(sensor.z, 0, 0, x); - cvmSet(sensor.z, 1, 0, y); - kalman.predict_update(&sensor, (unsigned long)(cv::getTickCount() / cv::getTickFrequency() * 1000)); - *fx = cvmGet(kalman.x, 0, 0); - *fy = cvmGet(kalman.x, 1, 0); +void filter_ekf(double x, double y, double* fx, double* fy) +{ + static bool init = true; + static KalmanSensorOwn sensor(4, 2); + static KalmanOwn kalman(4); // x, y, dx, dy + if (init) + { + init = false; + // R + cvSetIdentity(sensor.R, cvScalar(100)); + // Q + cvmSet(kalman.Q, 0, 0, 0.001); + cvmSet(kalman.Q, 1, 1, 0.001); + cvmSet(kalman.Q, 2, 2, 0.01); + cvmSet(kalman.Q, 3, 3, 0.01); + // P + cvSetIdentity(kalman.P, cvScalar(100)); + } + cvmSet(sensor.z, 0, 0, x); + cvmSet(sensor.z, 1, 0, y); + kalman.predict_update( + &sensor, + (unsigned long)(cv::getTickCount() / cv::getTickFrequency() * 1000)); + *fx = cvmGet(kalman.x, 0, 0); + *fy = cvmGet(kalman.x, 1, 0); } -//Make list of filters +// Make list of filters const int nof_filters = 8; -void (*(filters[nof_filters]))(double x, double y, double *fx, double *fy) = { - filter_none, - filter_average, - filter_median, - filter_running_average, - filter_des, - filter_kalman, - filter_ekf, - filter_array_average, -}; -char filter_names[nof_filters][64]={ - "No filter - Press any key to change", - "Average", - "Median", - "Running Average", - "Double Exponential Smoothing", - "Kalman", - "Extended Kalman", - "Array (average)" +void (*(filters[nof_filters]))(double x, double y, double* fx, double* fy) = { + filter_none, filter_average, filter_median, filter_running_average, + filter_des, filter_kalman, filter_ekf, filter_array_average, }; +char filter_names[nof_filters][64] = { "No filter - Press any key to change", + "Average", + "Median", + "Running Average", + "Double Exponential Smoothing", + "Kalman", + "Extended Kalman", + "Array (average)" }; // Just generate some random data that can be used as sensor input -void get_measurement(double *x, double *y) { - static double xx=0; - static double yy=0; - static double dxx = 0.3; - static double dyy = 0.7; - xx += dxx; yy += dyy; - if ((xx > res) || (xx < 0)) dxx = -dxx; - if ((yy > res) || (yy < 0)) dyy = -dyy; - double rx = (rand()*20.0/RAND_MAX)-10.0; - double ry = (rand()*20.0/RAND_MAX)-10.0; - - // Add some outliers - if(fabs(rx*ry)>50) - { - rx *= 5; - ry *= 5; - } +void get_measurement(double* x, double* y) +{ + static double xx = 0; + static double yy = 0; + static double dxx = 0.3; + static double dyy = 0.7; + xx += dxx; + yy += dyy; + if ((xx > res) || (xx < 0)) + dxx = -dxx; + if ((yy > res) || (yy < 0)) + dyy = -dyy; + double rx = (rand() * 20.0 / RAND_MAX) - 10.0; + double ry = (rand() * 20.0 / RAND_MAX) - 10.0; + + // Add some outliers + if (fabs(rx * ry) > 50) + { + rx *= 5; + ry *= 5; + } - *x = xx + rx; *y = yy + ry; + *x = xx + rx; + *y = yy + ry; } -int main(int argc, char *argv[]) +int main(int argc, char* argv[]) { - try { - // Output usage message - std::string filename(argv[0]); - filename = filename.substr(filename.find_last_of('\\') + 1); - std::cout << "SampleFilter" << std::endl; - std::cout << "============" << std::endl; - std::cout << std::endl; - std::cout << "Description:" << std::endl; - std::cout << " This is an example of how to use the 'FilterAverage', 'FilterMedian'," << std::endl; - std::cout << " 'FilterRunningAverage', 'FilterDoubleExponentialSmoothing', 'Kalman'" << std::endl; - std::cout << " 'KalmanEkf' and 'FilterArray' filtering classes. First the example" << std::endl; - std::cout << " shows unfiltered test data with outliers. The data is then filtered" << std::endl; - std::cout << " using the various filters. Press any key to cycle through the filters." << std::endl; - std::cout << std::endl; - std::cout << "Usage:" << std::endl; - std::cout << " " << filename << std::endl; - std::cout << std::endl; - std::cout << "Keyboard Shortcuts:" << std::endl; - std::cout << " any key: cycle through filters" << std::endl; - std::cout << " q: quit" << std::endl; - std::cout << std::endl; - - // Processing loop - IplImage *img = cvCreateImage(cvSize(res, res), IPL_DEPTH_8U, 3); - cvNamedWindow("SampleFilter"); - CvFont font; - cvInitFont(&font, CV_FONT_HERSHEY_PLAIN, 1.0, 1.0); - for (int ii = 0; ii < nof_filters; ii++) { - int key = 0; - double x, y; - double fx, fy; - vector tail; - while (1) { - get_measurement(&x, &y); - filters[ii](x, y, &fx, &fy); - cvZero(img); - cvPutText(img, filter_names[ii], cvPoint(3, res - 10), &font, CV_RGB(255, 255, 255)); - cvCircle(img, cvPoint(int(x), int(y)), 2, CV_RGB(0, 255, 255)); - cvCircle(img, cvPoint(int(x), int(y)), 3, CV_RGB(255, 255, 255)); - CvPoint fp; - fp.x = int(fx); - fp.y = int(fy); - tail.push_back(fp); - for (size_t iii = 0; iii < tail.size(); iii++) { - cvCircle(img, tail[iii], 0, CV_RGB(255, 255, 0)); - } - cvCircle(img, fp, 2, CV_RGB(255, 0, 255)); - cvShowImage("SampleFilter", img); - key = cvWaitKey(10); - if (key != -1) { - break; - } - } - if (key == 'q') { - break; - } + try + { + // Output usage message + std::string filename(argv[0]); + filename = filename.substr(filename.find_last_of('\\') + 1); + std::cout << "SampleFilter" << std::endl; + std::cout << "============" << std::endl; + std::cout << std::endl; + std::cout << "Description:" << std::endl; + std::cout << " This is an example of how to use the 'FilterAverage', " + "'FilterMedian'," + << std::endl; + std::cout << " 'FilterRunningAverage', " + "'FilterDoubleExponentialSmoothing', 'Kalman'" + << std::endl; + std::cout << " 'KalmanEkf' and 'FilterArray' filtering classes. First the " + "example" + << std::endl; + std::cout << " shows unfiltered test data with outliers. The data is then " + "filtered" + << std::endl; + std::cout << " using the various filters. Press any key to cycle through " + "the filters." + << std::endl; + std::cout << std::endl; + std::cout << "Usage:" << std::endl; + std::cout << " " << filename << std::endl; + std::cout << std::endl; + std::cout << "Keyboard Shortcuts:" << std::endl; + std::cout << " any key: cycle through filters" << std::endl; + std::cout << " q: quit" << std::endl; + std::cout << std::endl; + + // Processing loop + IplImage* img = cvCreateImage(cvSize(res, res), IPL_DEPTH_8U, 3); + cvNamedWindow("SampleFilter"); + CvFont font; + cvInitFont(&font, CV_FONT_HERSHEY_PLAIN, 1.0, 1.0); + for (int ii = 0; ii < nof_filters; ii++) + { + int key = 0; + double x, y; + double fx, fy; + vector tail; + while (1) + { + get_measurement(&x, &y); + filters[ii](x, y, &fx, &fy); + cvZero(img); + cvPutText(img, filter_names[ii], cvPoint(3, res - 10), &font, + CV_RGB(255, 255, 255)); + cvCircle(img, cvPoint(int(x), int(y)), 2, CV_RGB(0, 255, 255)); + cvCircle(img, cvPoint(int(x), int(y)), 3, CV_RGB(255, 255, 255)); + CvPoint fp; + fp.x = int(fx); + fp.y = int(fy); + tail.push_back(fp); + for (size_t iii = 0; iii < tail.size(); iii++) + { + cvCircle(img, tail[iii], 0, CV_RGB(255, 255, 0)); } - cvReleaseImage(&img); - return 0; - } - catch (const std::exception &e) { - std::cout << "Exception: " << e.what() << endl; - } - catch (...) { - std::cout << "Exception: unknown" << std::endl; + cvCircle(img, fp, 2, CV_RGB(255, 0, 255)); + cvShowImage("SampleFilter", img); + key = cvWaitKey(10); + if (key != -1) + { + break; + } + } + if (key == 'q') + { + break; + } } + cvReleaseImage(&img); + return 0; + } + catch (const std::exception& e) + { + std::cout << "Exception: " << e.what() << endl; + } + catch (...) + { + std::cout << "Exception: unknown" << std::endl; + } } diff --git a/ar_track_alvar/src/SampleIntegralImage.cpp b/ar_track_alvar/src/SampleIntegralImage.cpp index 2a88625..ec8b2fd 100644 --- a/ar_track_alvar/src/SampleIntegralImage.cpp +++ b/ar_track_alvar/src/SampleIntegralImage.cpp @@ -4,207 +4,257 @@ using namespace alvar; using namespace std; -void videocallback(IplImage *image) +void videocallback(IplImage* image) { - static IplImage *img_grad = NULL; - static IplImage *img_gray = NULL; - static IplImage *img_ver = NULL; - static IplImage *img_hor = NULL; - static IplImage *img_canny = NULL; - static IntegralImage integ; - static IntegralGradient grad; - - assert(image); - if (img_gray == NULL) { - // Following image is toggled visible using key '0' - img_grad = CvTestbed::Instance().CreateImageWithProto("Gradient", image); - CvTestbed::Instance().ToggleImageVisible(0); - img_gray = CvTestbed::Instance().CreateImageWithProto("Grayscale", image, 0, 1); - img_ver = CvTestbed::Instance().CreateImage("Vertical", cvSize(1,image->height), IPL_DEPTH_8U, 1); - img_hor = CvTestbed::Instance().CreateImage("Horizontal", cvSize(image->width,1), IPL_DEPTH_8U, 1); - img_canny = CvTestbed::Instance().CreateImageWithProto("Canny", image, 0, 1); - img_canny->origin = img_ver->origin = img_hor->origin = image->origin; - } - if (image->nChannels > 1) { - cvCvtColor(image, img_gray, CV_RGB2GRAY); - } else { - cvCopy(image, img_gray); + static IplImage* img_grad = NULL; + static IplImage* img_gray = NULL; + static IplImage* img_ver = NULL; + static IplImage* img_hor = NULL; + static IplImage* img_canny = NULL; + static IntegralImage integ; + static IntegralGradient grad; + + assert(image); + if (img_gray == NULL) + { + // Following image is toggled visible using key '0' + img_grad = CvTestbed::Instance().CreateImageWithProto("Gradient", image); + CvTestbed::Instance().ToggleImageVisible(0); + img_gray = + CvTestbed::Instance().CreateImageWithProto("Grayscale", image, 0, 1); + img_ver = CvTestbed::Instance().CreateImage( + "Vertical", cvSize(1, image->height), IPL_DEPTH_8U, 1); + img_hor = CvTestbed::Instance().CreateImage( + "Horizontal", cvSize(image->width, 1), IPL_DEPTH_8U, 1); + img_canny = + CvTestbed::Instance().CreateImageWithProto("Canny", image, 0, 1); + img_canny->origin = img_ver->origin = img_hor->origin = image->origin; + } + if (image->nChannels > 1) + { + cvCvtColor(image, img_gray, CV_RGB2GRAY); + } + else + { + cvCopy(image, img_gray); + } + + // Show PerformanceTimer + // PerformanceTimer timer; + // timer.Start(); + + // Update the integral images + integ.Update(img_gray); + grad.Update(img_gray); + + // Whole image projections + integ.GetSubimage(cvRect(0, 0, image->width, image->height), img_ver); + integ.GetSubimage(cvRect(0, 0, image->width, image->height), img_hor); + for (int y = 1; y < image->height; y++) + { + cvLine(image, cvPoint(int(cvGet2D(img_ver, y - 1, 0).val[0]), y - 1), + cvPoint(int(cvGet2D(img_ver, y, 0).val[0]), y), CV_RGB(255, 0, 0)); + } + for (int x = 1; x < image->width; x++) + { + cvLine(image, cvPoint(x - 1, int(cvGet2D(img_hor, 0, x - 1).val[0])), + cvPoint(x, int(cvGet2D(img_hor, 0, x).val[0])), CV_RGB(0, 255, 0)); + } + + // Gradients + // Mark gradients for 4x4 sub-blocks + /* + cvZero(img_grad); + CvRect r = {0,0,4,4}; + for (int y=0; yheight/4; y++) { + r.y = y*4; + for (int x=0; xwidth/4; x++) { + r.x = x*4; + double dirx, diry; + grad.GetAveGradient(r, &dirx, &diry); + cvLine(img_grad, cvPoint(r.x+2,r.y+2), + cvPoint(r.x+2+int(dirx),r.y+2+int(diry)), CV_RGB(255,0,0)); + } + } + */ + + // Gradients on canny + cvZero(img_grad); + static int t1 = 64, t2 = 192; + cvCreateTrackbar("t1", "Gradient", &t1, 255, NULL); + cvCreateTrackbar("t2", "Gradient", &t2, 255, NULL); + cvCanny(img_gray, img_canny, t1, t2); + CvRect r = { 0, 0, 4, 4 }; + for (r.y = 0; r.y < img_canny->height - 4; r.y++) + { + for (r.x = 0; r.x < img_canny->width - 4; r.x++) + { + if (img_canny->imageData[r.y * img_canny->widthStep + r.x]) + { + double dirx, diry; + grad.GetAveGradient(r, &dirx, &diry); + cvLine(img_grad, cvPoint(r.x + 2, r.y + 2), + cvPoint(r.x + 2 + int(dirx), r.y + 2 + int(diry)), + CV_RGB(0, 0, 255)); + cvLine(img_grad, cvPoint(r.x + 2, r.y + 2), + cvPoint(r.x + 2 + int(-diry), r.y + 2 + int(+dirx)), + CV_RGB(255, 0, 0)); + cvLine(img_grad, cvPoint(r.x + 2, r.y + 2), + cvPoint(r.x + 2 + int(+diry), r.y + 2 + int(-dirx)), + CV_RGB(255, 0, 0)); + } } + } + + // Show PerformanceTimer + // cout<<"Processing: "<<1.0 / timer.Stop()<<" fps"<width,image->height), img_ver); - integ.GetSubimage(cvRect(0,0,image->width,image->height), img_hor); - for (int y=1; yheight; y++) { - cvLine(image, - cvPoint(int(cvGet2D(img_ver, y-1, 0).val[0]), y-1), - cvPoint(int(cvGet2D(img_ver, y, 0).val[0]), y), - CV_RGB(255,0,0)); + // Enumerate possible capture plugins + CaptureFactory::CapturePluginVector plugins = + CaptureFactory::instance()->enumeratePlugins(); + if (plugins.size() < 1) + { + std::cout << "Could not find any capture plugins." << std::endl; + return 0; } - for (int x=1; xwidth; x++) { - cvLine(image, - cvPoint(x-1, int(cvGet2D(img_hor, 0, x-1).val[0])), - cvPoint(x, int(cvGet2D(img_hor, 0, x).val[0])), - CV_RGB(0,255,0)); + + // Display capture plugins + std::cout << "Available Plugins: "; + outputEnumeratedPlugins(plugins); + std::cout << std::endl; + + // Enumerate possible capture devices + CaptureFactory::CaptureDeviceVector devices = + CaptureFactory::instance()->enumerateDevices(); + if (devices.size() < 1) + { + std::cout << "Could not find any capture devices." << std::endl; + return 0; } - // Gradients - // Mark gradients for 4x4 sub-blocks - /* - cvZero(img_grad); - CvRect r = {0,0,4,4}; - for (int y=0; yheight/4; y++) { - r.y = y*4; - for (int x=0; xwidth/4; x++) { - r.x = x*4; - double dirx, diry; - grad.GetAveGradient(r, &dirx, &diry); - cvLine(img_grad, cvPoint(r.x+2,r.y+2), cvPoint(r.x+2+int(dirx),r.y+2+int(diry)), CV_RGB(255,0,0)); - } + // Check command line argument for which device to use + int selectedDevice = defaultDevice(devices); + if (argc > 1) + { + selectedDevice = atoi(argv[1]); } - */ - - // Gradients on canny - cvZero(img_grad); - static int t1=64, t2=192; - cvCreateTrackbar("t1", "Gradient", &t1, 255, NULL); - cvCreateTrackbar("t2", "Gradient", &t2, 255, NULL); - cvCanny(img_gray, img_canny, t1, t2); - CvRect r = {0,0,4,4}; - for (r.y=0; r.yheight-4; r.y++) { - for (r.x=0; r.xwidth-4; r.x++) { - if (img_canny->imageData[r.y*img_canny->widthStep+r.x]) { - double dirx, diry; - grad.GetAveGradient(r, &dirx, &diry); - cvLine(img_grad, cvPoint(r.x+2,r.y+2), cvPoint(r.x+2+int(dirx),r.y+2+int(diry)), CV_RGB(0,0,255)); - cvLine(img_grad, cvPoint(r.x+2,r.y+2), cvPoint(r.x+2+int(-diry),r.y+2+int(+dirx)), CV_RGB(255,0,0)); - cvLine(img_grad, cvPoint(r.x+2,r.y+2), cvPoint(r.x+2+int(+diry),r.y+2+int(-dirx)), CV_RGB(255,0,0)); - } - } + if (selectedDevice >= (int)devices.size()) + { + selectedDevice = defaultDevice(devices); } - // Show PerformanceTimer - //cout<<"Processing: "<<1.0 / timer.Stop()<<" fps"<enumeratePlugins(); - if (plugins.size() < 1) { - std::cout << "Could not find any capture plugins." << std::endl; - return 0; - } - - // Display capture plugins - std::cout << "Available Plugins: "; - outputEnumeratedPlugins(plugins); - std::cout << std::endl; - - // Enumerate possible capture devices - CaptureFactory::CaptureDeviceVector devices = CaptureFactory::instance()->enumerateDevices(); - if (devices.size() < 1) { - std::cout << "Could not find any capture devices." << std::endl; - return 0; - } - - // Check command line argument for which device to use - int selectedDevice = defaultDevice(devices); - if (argc > 1) { - selectedDevice = atoi(argv[1]); - } - if (selectedDevice >= (int)devices.size()) { - selectedDevice = defaultDevice(devices); - } - - // Display capture devices - std::cout << "Enumerated Capture Devices:" << std::endl; - outputEnumeratedDevices(devices, selectedDevice); - std::cout << std::endl; - - // Create capture object from camera - Capture *cap = CaptureFactory::instance()->createCapture(devices[selectedDevice]); - std::string uniqueName = devices[selectedDevice].uniqueName(); - - // Handle capture lifecycle and start video capture - // Note that loadSettings/saveSettings are not supported by all plugins - if (cap) { - std::stringstream settingsFilename; - settingsFilename << "camera_settings_" << uniqueName << ".xml"; - - cap->start(); - cap->setResolution(640, 480); - - if (cap->loadSettings(settingsFilename.str())) { - std::cout << "Loading settings: " << settingsFilename.str() << std::endl; - } - - std::stringstream title; - title << "SampleIntegralImage (" << cap->captureDevice().captureType() << ")"; - - CvTestbed::Instance().StartVideo(cap, title.str().c_str()); - - if (cap->saveSettings(settingsFilename.str())) { - std::cout << "Saving settings: " << settingsFilename.str() << std::endl; - } - - cap->stop(); - delete cap; - } - else if (CvTestbed::Instance().StartVideo(0, argv[0])) { - } - else { - std::cout << "Could not initialize the selected capture backend." << std::endl; - } - - return 0; + // Create capture object from camera + Capture* cap = + CaptureFactory::instance()->createCapture(devices[selectedDevice]); + std::string uniqueName = devices[selectedDevice].uniqueName(); + + // Handle capture lifecycle and start video capture + // Note that loadSettings/saveSettings are not supported by all plugins + if (cap) + { + std::stringstream settingsFilename; + settingsFilename << "camera_settings_" << uniqueName << ".xml"; + + cap->start(); + cap->setResolution(640, 480); + + if (cap->loadSettings(settingsFilename.str())) + { + std::cout << "Loading settings: " << settingsFilename.str() + << std::endl; + } + + std::stringstream title; + title << "SampleIntegralImage (" << cap->captureDevice().captureType() + << ")"; + + CvTestbed::Instance().StartVideo(cap, title.str().c_str()); + + if (cap->saveSettings(settingsFilename.str())) + { + std::cout << "Saving settings: " << settingsFilename.str() << std::endl; + } + + cap->stop(); + delete cap; } - catch (const std::exception &e) { - std::cout << "Exception: " << e.what() << endl; + else if (CvTestbed::Instance().StartVideo(0, argv[0])) + { } - catch (...) { - std::cout << "Exception: unknown" << std::endl; + else + { + std::cout << "Could not initialize the selected capture backend." + << std::endl; } + + return 0; + } + catch (const std::exception& e) + { + std::cout << "Exception: " << e.what() << endl; + } + catch (...) + { + std::cout << "Exception: unknown" << std::endl; + } } diff --git a/ar_track_alvar/src/SampleLabeling.cpp b/ar_track_alvar/src/SampleLabeling.cpp index 9af6eba..3f1e0ad 100644 --- a/ar_track_alvar/src/SampleLabeling.cpp +++ b/ar_track_alvar/src/SampleLabeling.cpp @@ -6,168 +6,200 @@ using namespace std; int thresh_param1 = 31; -void videocallback(IplImage *image) +void videocallback(IplImage* image) { - bool flip_image = (image->origin?true:false); - if (flip_image) { - cvFlip(image); - image->origin = !image->origin; + bool flip_image = (image->origin ? true : false); + if (flip_image) + { + cvFlip(image); + image->origin = !image->origin; + } + + static LabelingCvSeq* labeling = 0; + if (!labeling) + { + labeling = new LabelingCvSeq(); + } + + labeling->SetThreshParams(thresh_param1, 5); + + const int min_edge_size = 10; + CvSeq* edges = labeling->LabelImage(image, min_edge_size); + + int n_edges = edges->total; + for (int i = 0; i < n_edges; ++i) + { + CvSeq* pixels = (CvSeq*)cvGetSeqElem(edges, i); + int n_pixels = pixels->total; + for (int j = 0; j < n_pixels; ++j) + { + CvPoint* pt = (CvPoint*)cvGetSeqElem(pixels, j); + cvLine(image, *pt, *pt, CV_RGB(255, 0, 0)); } + } - static LabelingCvSeq* labeling = 0; - if(!labeling) { - labeling = new LabelingCvSeq(); - } + // Visualize now also the square corners. + labeling->LabelSquares(image, true); - labeling->SetThreshParams(thresh_param1, 5); + if (flip_image) + { + cvFlip(image); + image->origin = !image->origin; + } +} - const int min_edge_size = 10; - CvSeq* edges = labeling->LabelImage(image, min_edge_size); +int keycallback(int key) +{ + if (key == '+') + { + thresh_param1 += 2; + std::cout << "Adaptive threshold block size: " << thresh_param1 + << std::endl; + } + else if (key == '-') + { + thresh_param1 -= 2; + if (thresh_param1 < 3) + thresh_param1 = 3; + std::cout << "Adaptive threshold block size: " << thresh_param1 + << std::endl; + } + + else + return key; + + return 0; +} - int n_edges = edges->total; - for(int i = 0; i < n_edges; ++i) +int main(int argc, char* argv[]) +{ + try + { + // Output usage message + std::string filename(argv[0]); + filename = filename.substr(filename.find_last_of('\\') + 1); + std::cout << "SampleLabeling" << std::endl; + std::cout << "==============" << std::endl; + std::cout << std::endl; + std::cout << "Description:" << std::endl; + std::cout << " This is an example of how to use the 'LabelingCvSeq' class " + "to perform" + << std::endl; + std::cout << " labeling. Blobs are detected in the image and if the blobs " + "have four" + << std::endl; + std::cout << " corners, the edges between the corners are visualized." + << std::endl; + std::cout << std::endl; + std::cout << "Usage:" << std::endl; + std::cout << " " << filename << " [device]" << std::endl; + std::cout << std::endl; + std::cout << " device integer selecting device from enumeration list " + "(default 0)" + << std::endl; + std::cout << " highgui capture devices are prefered" + << std::endl; + std::cout << std::endl; + std::cout << "Keyboard Shortcuts:" << std::endl; + std::cout << " +: Increase adaptive threshold block size." << std::endl; + std::cout << " -: Decrease adaptive threshold block size." << std::endl; + std::cout << " q: quit" << std::endl; + std::cout << std::endl; + + // Initialise CvTestbed + CvTestbed::Instance().SetVideoCallback(videocallback); + CvTestbed::Instance().SetKeyCallback(keycallback); + + // Enumerate possible capture plugins + CaptureFactory::CapturePluginVector plugins = + CaptureFactory::instance()->enumeratePlugins(); + if (plugins.size() < 1) { - CvSeq* pixels = (CvSeq*)cvGetSeqElem(edges, i); - int n_pixels = pixels->total; - for(int j = 0; j < n_pixels; ++j) - { - CvPoint* pt = (CvPoint*)cvGetSeqElem(pixels, j); - cvLine(image, *pt, *pt, CV_RGB(255,0,0)); - } + std::cout << "Could not find any capture plugins." << std::endl; + return 0; } - // Visualize now also the square corners. - labeling->LabelSquares(image, true); + // Display capture plugins + std::cout << "Available Plugins: "; + outputEnumeratedPlugins(plugins); + std::cout << std::endl; - if (flip_image) { - cvFlip(image); - image->origin = !image->origin; + // Enumerate possible capture devices + CaptureFactory::CaptureDeviceVector devices = + CaptureFactory::instance()->enumerateDevices(); + if (devices.size() < 1) + { + std::cout << "Could not find any capture devices." << std::endl; + return 0; } -} -int keycallback(int key) -{ - if(key == '+') + // Check command line argument for which device to use + int selectedDevice = defaultDevice(devices); + if (argc > 1) { - thresh_param1+=2; - std::cout<<"Adaptive threshold block size: "<= (int)devices.size()) { - thresh_param1-=2; - if(thresh_param1<3) thresh_param1 = 3; - std::cout<<"Adaptive threshold block size: "<createCapture(devices[selectedDevice]); + std::string uniqueName = devices[selectedDevice].uniqueName(); -int main(int argc, char *argv[]) -{ - try { - // Output usage message - std::string filename(argv[0]); - filename = filename.substr(filename.find_last_of('\\') + 1); - std::cout << "SampleLabeling" << std::endl; - std::cout << "==============" << std::endl; - std::cout << std::endl; - std::cout << "Description:" << std::endl; - std::cout << " This is an example of how to use the 'LabelingCvSeq' class to perform" << std::endl; - std::cout << " labeling. Blobs are detected in the image and if the blobs have four" << std::endl; - std::cout << " corners, the edges between the corners are visualized." << std::endl; - std::cout << std::endl; - std::cout << "Usage:" << std::endl; - std::cout << " " << filename << " [device]" << std::endl; - std::cout << std::endl; - std::cout << " device integer selecting device from enumeration list (default 0)" << std::endl; - std::cout << " highgui capture devices are prefered" << std::endl; - std::cout << std::endl; - std::cout << "Keyboard Shortcuts:" << std::endl; - std::cout << " +: Increase adaptive threshold block size." << std::endl; - std::cout << " -: Decrease adaptive threshold block size." << std::endl; - std::cout << " q: quit" << std::endl; - std::cout << std::endl; - - // Initialise CvTestbed - CvTestbed::Instance().SetVideoCallback(videocallback); - CvTestbed::Instance().SetKeyCallback(keycallback); - - // Enumerate possible capture plugins - CaptureFactory::CapturePluginVector plugins = CaptureFactory::instance()->enumeratePlugins(); - if (plugins.size() < 1) { - std::cout << "Could not find any capture plugins." << std::endl; - return 0; - } - - // Display capture plugins - std::cout << "Available Plugins: "; - outputEnumeratedPlugins(plugins); - std::cout << std::endl; - - // Enumerate possible capture devices - CaptureFactory::CaptureDeviceVector devices = CaptureFactory::instance()->enumerateDevices(); - if (devices.size() < 1) { - std::cout << "Could not find any capture devices." << std::endl; - return 0; - } - - // Check command line argument for which device to use - int selectedDevice = defaultDevice(devices); - if (argc > 1) { - selectedDevice = atoi(argv[1]); - } - if (selectedDevice >= (int)devices.size()) { - selectedDevice = defaultDevice(devices); - } - - // Display capture devices - std::cout << "Enumerated Capture Devices:" << std::endl; - outputEnumeratedDevices(devices, selectedDevice); - std::cout << std::endl; - - // Create capture object from camera - Capture *cap = CaptureFactory::instance()->createCapture(devices[selectedDevice]); - std::string uniqueName = devices[selectedDevice].uniqueName(); - - // Handle capture lifecycle and start video capture - // Note that loadSettings/saveSettings are not supported by all plugins - if (cap) { - std::stringstream settingsFilename; - settingsFilename << "camera_settings_" << uniqueName << ".xml"; - - cap->start(); - cap->setResolution(640, 480); - - if (cap->loadSettings(settingsFilename.str())) { - std::cout << "Loading settings: " << settingsFilename.str() << std::endl; - } - - std::stringstream title; - title << "SampleLabeling (" << cap->captureDevice().captureType() << ")"; - - CvTestbed::Instance().StartVideo(cap, title.str().c_str()); - - if (cap->saveSettings(settingsFilename.str())) { - std::cout << "Saving settings: " << settingsFilename.str() << std::endl; - } - - cap->stop(); - delete cap; - } - else if (CvTestbed::Instance().StartVideo(0, argv[0])) { - } - else { - std::cout << "Could not initialize the selected capture backend." << std::endl; - } - - return 0; + // Handle capture lifecycle and start video capture + // Note that loadSettings/saveSettings are not supported by all plugins + if (cap) + { + std::stringstream settingsFilename; + settingsFilename << "camera_settings_" << uniqueName << ".xml"; + + cap->start(); + cap->setResolution(640, 480); + + if (cap->loadSettings(settingsFilename.str())) + { + std::cout << "Loading settings: " << settingsFilename.str() + << std::endl; + } + + std::stringstream title; + title << "SampleLabeling (" << cap->captureDevice().captureType() << ")"; + + CvTestbed::Instance().StartVideo(cap, title.str().c_str()); + + if (cap->saveSettings(settingsFilename.str())) + { + std::cout << "Saving settings: " << settingsFilename.str() << std::endl; + } + + cap->stop(); + delete cap; } - catch (const std::exception &e) { - std::cout << "Exception: " << e.what() << endl; + else if (CvTestbed::Instance().StartVideo(0, argv[0])) + { } - catch (...) { - std::cout << "Exception: unknown" << std::endl; + else + { + std::cout << "Could not initialize the selected capture backend." + << std::endl; } + + return 0; + } + catch (const std::exception& e) + { + std::cout << "Exception: " << e.what() << endl; + } + catch (...) + { + std::cout << "Exception: unknown" << std::endl; + } } diff --git a/ar_track_alvar/src/SampleMarkerCreator.cpp b/ar_track_alvar/src/SampleMarkerCreator.cpp index a773e2a..7d99aac 100644 --- a/ar_track_alvar/src/SampleMarkerCreator.cpp +++ b/ar_track_alvar/src/SampleMarkerCreator.cpp @@ -1,324 +1,419 @@ #include "ar_track_alvar/MultiMarker.h" -#include -#include "opencv2/opencv.hpp" +#include using namespace std; using namespace alvar; -struct State { - cv::Mat *img; - stringstream filename; - double minx, miny, maxx, maxy; // top-left and bottom-right in pixel units - MultiMarker multi_marker; - - // General options - bool prompt; - double units; // how many pixels per one unit - double marker_side_len; // marker side len in current units - int marker_type; // 0:MarkerData, 1:ArToolkit - double posx, posy; // The position of marker center in the given units - int content_res; - double margin_res; - bool array; - - // MarkerData specific options - MarkerData::MarkerContentType marker_data_content_type; - bool marker_data_force_strong_hamming; - - State() - : img(0), - prompt(false), - units(96.0/2.54), // cm assuming 96 dpi - marker_side_len(9.0), // 9 cm - marker_type(0), - posx(0), posy(0), - content_res(0), // 0 uses default - margin_res(0.0), // 0.0 uses default (can be n*0.5) - marker_data_content_type(MarkerData::MARKER_CONTENT_TYPE_NUMBER), - marker_data_force_strong_hamming(false) - {} - ~State() { - } - void AddMarker(const char *id) { - std::cout<<"ADDING MARKER "< new_maxx) new_maxx = maxx; - if (maxy > new_maxy) new_maxy = maxy; - - //TODO I don't know about all this tbh - - cv::Mat new_img = cv::Mat(cv::Size(int(new_maxx-new_minx+0.5), int(new_maxy-new_miny+0.5)),CV_8UC1); - new_img = cv::Scalar(255); - // CvRect roi = cvRect(int(minx-new_minx+0.5), int(miny-new_miny+0.5), img->rows, img->cols); - - cv::Rect roi(int(minx-new_minx+0.5), int(miny-new_miny+0.5), img->rows, img->cols); - cv::Mat image_roi = new_img(roi); - - - CvMat img2 = cvMat(image_roi); - - - - // cvSetImageROI(new_img, roi); - cvCopy(img, &img2); +struct State +{ + cv::Mat img; + stringstream filename; + double minx, miny, maxx, maxy; // top-left and bottom-right in pixel units + MultiMarker multi_marker; + // General options + bool prompt; + double units; // how many pixels per one unit + double marker_side_len; // marker side len in current units + int marker_type; // 0:MarkerData, 1:ArToolkit + double posx, posy; // The position of marker center in the given units + int content_res; + double margin_res; + bool array; - - roi.x = int((posx*units) - (marker_side_len*units/2.0) - new_minx + 0.5); - roi.y = int((posy*units) - (marker_side_len*units/2.0) - new_miny + 0.5); - roi.width = int(marker_side_len*units+0.5); roi.height = int(marker_side_len*units+0.5); - - cv::Mat im2 = img->clone(); - cv::Mat image_roi2 = im2(roi); - CvMat img3 = cvMat(image_roi2); + // MarkerData specific options + MarkerData::MarkerContentType marker_data_content_type; + bool marker_data_force_strong_hamming; - cv::Mat temp_img = cv::cvarrToMat(&img3); - img = &temp_img; - - minx = new_minx; miny = new_miny; - maxx = new_maxx; maxy = new_maxy; - } - - if (marker_data_content_type == MarkerData::MARKER_CONTENT_TYPE_NUMBER) { - int idi = atoi(id); - md.SetContent(marker_data_content_type, idi, 0); - if (filename.str().length()<64) filename<<"_"< new_maxx) + new_maxx = maxx; + if (maxy > new_maxy) + new_maxy = maxy; + cv::Mat new_img = cv::Mat(cv::Size(int(new_maxx - new_minx + 0.5), + int(new_maxy - new_miny + 0.5)), + CV_8UC1, cv::Scalar(255)); + img.copyTo( + new_img(cv::Rect(int(minx - new_minx + 0.5), + int(miny - new_miny + 0.5), img.cols, img.rows))); + img.release(); + img = new_img; + int roi_x = int((posx * units) - (marker_side_len * units / 2.0) - + new_minx + 0.5); + int roi_y = int((posy * units) - (marker_side_len * units / 2.0) - + new_miny + 0.5); + int roi_t = -roi_y; + int roi_b = -img.rows + int(marker_side_len * units + 0.5) + roi_y; + int roi_l = -roi_x; + int roi_r = -img.cols + int(marker_side_len * units + 0.5) + roi_x; + img.adjustROI(roi_t, roi_b, roi_l, roi_r); + minx = new_minx; + miny = new_miny; + maxx = new_maxx; + maxy = new_maxy; + } + if (marker_data_content_type == MarkerData::MARKER_CONTENT_TYPE_NUMBER) + { + int idi = atoi(id); + md.SetContent(marker_data_content_type, idi, 0); + if (filename.str().length() < 64) + filename << "_" << idi; - Pose pose; - pose.Reset(); - pose.SetTranslation(posx, -posy, 0); - multi_marker.PointCloudAdd(idi, marker_side_len, pose); - } - else - { - md.SetContent(marker_data_content_type, 0, id); - const char *p = id; - int counter=0; - filename<<"_"; - while(*p) { - if (!isalnum(*p)) filename<<"_"; - else filename<<(char)tolower(*p); - p++; counter++; - if (counter > 8) break; - } - } - md.ScaleMarkerToImage(img); - } - else if (marker_type == 1) { - // Create and save MarkerArtoolkit marker (Note, that this doesn't support multi markers) - MarkerArtoolkit md(marker_side_len, content_res, margin_res); - int side_len = int(marker_side_len*units+0.5); - cv::Mat temp_img = cv::Mat(cv::Size(side_len, side_len),CV_8UC1); - img = &temp_img; - filename.str(""); - filename<<"MarkerArtoolkit"; - md.SetContent(atoi(id)); - filename<<"_"< 8) + break; } + } + md.ScaleMarkerToImage(img); + // reset ROI + cv::Size roi_size; + cv::Point roi_offset; + img.locateROI(roi_size, roi_offset); + img.adjustROI(roi_offset.y, roi_size.height - img.rows, roi_offset.x, + roi_size.width - img.cols); } - void Save() { - if (img) { - std::stringstream filenamexml; - filenamexml< 1) { - std::cout<<"Saving: "< 1) + { + std::cout << "Saving: " << filenamexml.str() << std::endl; + multi_marker.Save(filenamexml.str().c_str(), alvar::FILE_FORMAT_XML); + } } + } } st; -int main(int argc, char *argv[]) +int main(int argc, char* argv[]) { - try { - if (argc < 2) st.prompt = true; - for (int i=1; i" << std::endl; - std::cout << " -s 5.0 use marker size 5.0x5.0 units (default 9.0x9.0)" << std::endl; - std::cout << " -r 5 marker content resolution -- 0 uses default" << std::endl; - std::cout << " -m 2.0 marker margin resolution -- 0 uses default" << std::endl; - std::cout << " -a use ArToolkit style matrix markers" << std::endl; - std::cout << " -p prompt marker placements interactively from the user" << std::endl; - std::cout << std::endl; + // Output usage message + if (st.prompt) + { + std::string filename(argv[0]); + filename = filename.substr(filename.find_last_of('\\') + 1); + std::cout << "SampleMarkerCreator" << std::endl; + std::cout << "===================" << std::endl; + std::cout << std::endl; + std::cout << "Description:" << std::endl; + std::cout << " This is an example of how to use the 'MarkerData' and " + "'MarkerArtoolkit'" + << std::endl; + std::cout << " classes to generate marker images. This application can " + "be used to" + << std::endl; + std::cout << " generate markers and multimarker setups that can be used " + "with" + << std::endl; + std::cout << " SampleMarkerDetector and SampleMultiMarker." << std::endl; + std::cout << std::endl; + std::cout << "Usage:" << std::endl; + std::cout << " " << filename << " [options] argument" << std::endl; + std::cout << std::endl; + std::cout << " 65535 marker with number 65535" + << std::endl; + std::cout << " -f 65535 force hamming(8,4) encoding" + << std::endl; + std::cout << " -1 \"hello world\" marker with string" << std::endl; + std::cout << " -2 catalog.xml marker with file reference" + << std::endl; + std::cout << " -3 www.vtt.fi marker with URL" << std::endl; + std::cout << " -u 96 use units corresponding to 1.0 unit " + "per 96 pixels" + << std::endl; + std::cout << " -uin use inches as units (assuming 96 dpi)" + << std::endl; + std::cout << " -ucm use cm's as units (assuming 96 dpi) " + "" + << std::endl; + std::cout << " -s 5.0 use marker size 5.0x5.0 units " + "(default 9.0x9.0)" + << std::endl; + std::cout << " -r 5 marker content resolution -- 0 uses " + "default" + << std::endl; + std::cout << " -m 2.0 marker margin resolution -- 0 uses " + "default" + << std::endl; + std::cout << " -a use ArToolkit style matrix markers" + << std::endl; + std::cout << " -p prompt marker placements " + "interactively from the user" + << std::endl; + std::cout << std::endl; - // Interactive stuff here - if (st.prompt) { - st.marker_type = 0; - st.marker_data_content_type = MarkerData::MARKER_CONTENT_TYPE_NUMBER; - std::cout<<"\nPrompt marker placements interactively"< 0) marker_id=atoi(s.c_str()); - if (marker_id < 0) break; - std::cout<<" x position (in current units) ["< 0) posx=atof(s.c_str()); - std::cout<<" y position (in current units) ["< 0) posy=atof(s.c_str()); - st.posx=posx; st.posy=posy; - std::stringstream ss; - ss< 0) + marker_id = atoi(s.c_str()); + if (marker_id < 0) + break; + std::cout << " x position (in current units) [" << posx << "]: "; + std::flush(std::cout); + std::getline(std::cin, s); + if (s.length() > 0) + posx = atof(s.c_str()); + std::cout << " y position (in current units) [" << posy << "]: "; + std::flush(std::cout); + std::getline(std::cin, s); + if (s.length() > 0) + posy = atof(s.c_str()); + st.posx = posx; + st.posy = posy; + std::stringstream ss; + ss << marker_id; + st.AddMarker(ss.str().c_str()); - // Guess suitable marker_id and placement for the next marker - marker_id++; - if (posx <= 0) { - posx = int(sqrt(double(marker_id)))*st.marker_side_len*2; - posy = 0; - vert = true; - } else if (vert) { - posy += (st.marker_side_len*2); - if (posy >= posx) vert = false; - } else { - posx -= (st.marker_side_len*2); - } - } - } + // Guess suitable marker_id and placement for the next marker + marker_id++; + if (posx <= 0) + { + posx = int(sqrt(double(marker_id))) * st.marker_side_len * 2; + posy = 0; + vert = true; + } + else if (vert) + { + posy += (st.marker_side_len * 2); + if (posy >= posx) + vert = false; + } + else + { + posx -= (st.marker_side_len * 2); + } } - else if(st.array) { - st.marker_type = 0; - st.marker_data_content_type = MarkerData::MARKER_CONTENT_TYPE_NUMBER; - int rows = 0; - int cols = 0; - double rows_distance = 0; - double cols_distance = 0; - bool column_major = 1; - int first_id = 0; - int marker_id = 0; - std::string s; - std::cout<<"\nPrompt marker placements interactively"< 0) rows=atoi(s.c_str()); - std::cout<<"Array cols : "; std::flush(std::cout); - std::getline(std::cin, s); if (s.length() > 0) cols=atoi(s.c_str()); - std::cout<<"Rows distance : "; std::flush(std::cout); - std::getline(std::cin, s); if (s.length() > 0) rows_distance=atof(s.c_str()); - std::cout<<"Cols distance : "; std::flush(std::cout); - std::getline(std::cin, s); if (s.length() > 0) cols_distance=atof(s.c_str()); - std::cout<<"Column major (1 or 0) : "; std::flush(std::cout); - std::getline(std::cin, s); if (s.length() > 0) column_major=atoi(s.c_str()); - std::cout<<"First marker ID : "; std::flush(std::cout); - std::getline(std::cin, s); if (s.length() > 0) first_id=atoi(s.c_str()); - if(!column_major) { - for(int row = 0; row 0) + rows = atoi(s.c_str()); + std::cout << "Array cols : "; + std::flush(std::cout); + std::getline(std::cin, s); + if (s.length() > 0) + cols = atoi(s.c_str()); + std::cout << "Rows distance : "; + std::flush(std::cout); + std::getline(std::cin, s); + if (s.length() > 0) + rows_distance = atof(s.c_str()); + std::cout << "Cols distance : "; + std::flush(std::cout); + std::getline(std::cin, s); + if (s.length() > 0) + cols_distance = atof(s.c_str()); + std::cout << "Column major (1 or 0) : "; + std::flush(std::cout); + std::getline(std::cin, s); + if (s.length() > 0) + column_major = atoi(s.c_str()); + std::cout << "First marker ID : "; + std::flush(std::cout); + std::getline(std::cin, s); + if (s.length() > 0) + first_id = atoi(s.c_str()); + if (!column_major) + { + for (int row = 0; row < rows; row++) + for (int col = 0; col < cols; col++) + { + st.posy = row * rows_distance; + st.posx = col * cols_distance; + marker_id = first_id + row * cols + col; + std::stringstream ss; + ss << marker_id; + st.AddMarker(ss.str().c_str()); + } + } + else + { + for (int col = 0; col < cols; col++) + for (int row = 0; row < rows; row++) + { + st.posy = row * rows_distance; + st.posx = col * cols_distance; + marker_id = first_id + col * rows + row; + std::stringstream ss; + ss << marker_id; + st.AddMarker(ss.str().c_str()); + } + } } + st.Save(); + return 0; + } + catch (const std::exception& e) + { + std::cout << "Exception: " << e.what() << endl; + } + catch (...) + { + std::cout << "Exception: unknown" << std::endl; + } } diff --git a/ar_track_alvar/src/SampleMarkerDetector.cpp b/ar_track_alvar/src/SampleMarkerDetector.cpp index 64297aa..961925e 100644 --- a/ar_track_alvar/src/SampleMarkerDetector.cpp +++ b/ar_track_alvar/src/SampleMarkerDetector.cpp @@ -5,195 +5,255 @@ using namespace alvar; using namespace std; -bool init=true; -const int marker_size=15; +bool init = true; +const int marker_size = 15; Camera cam; Drawable d[32]; std::stringstream calibrationFilename; -void videocallback(IplImage *image) +void videocallback(IplImage* image) { - static IplImage *rgba; - bool flip_image = (image->origin?true:false); - if (flip_image) { - cvFlip(image); - image->origin = !image->origin; - } + static IplImage* rgba; + bool flip_image = (image->origin ? true : false); + if (flip_image) + { + cvFlip(image); + image->origin = !image->origin; + } - if (init) { - init = false; - cout<<"Loading calibration: "<width, image->height)) { - cout<<" [Ok]"<width, image->height); - cout<<" [Fail]"<width,image->height); - GlutViewer::SetGlProjectionMatrix(p); - for (int i=0; i<32; i++) { - d[i].SetScale(marker_size); - } - rgba = CvTestbed::Instance().CreateImageWithProto("RGBA", image, 0, 4); + if (init) + { + init = false; + cout << "Loading calibration: " << calibrationFilename.str(); + if (cam.SetCalib(calibrationFilename.str().c_str(), image->width, + image->height)) + { + cout << " [Ok]" << endl; } - static MarkerDetector marker_detector; - marker_detector.SetMarkerSize(marker_size); // for marker ids larger than 255, set the content resolution accordingly - //static MarkerDetector marker_detector; - //marker_detector.SetMarkerSize(2.8, 3, 1.5); - - // Here we try to use RGBA just to make sure that also it works... - //cvCvtColor(image, rgba, CV_RGB2RGBA); - marker_detector.Detect(image, &cam, true, true); - GlutViewer::DrawableClear(); - for (size_t i=0; isize(); i++) { - if (i >= 32) break; - - Pose p = (*(marker_detector.markers))[i].pose; - p.GetMatrixGL(d[i].gl_mat); - - int id = (*(marker_detector.markers))[i].GetId(); - double r = 1.0 - double(id+1)/32.0; - double g = 1.0 - double(id*3%32+1)/32.0; - double b = 1.0 - double(id*7%32+1)/32.0; - d[i].SetColor(r, g, b); - - GlutViewer::DrawableAdd(&(d[i])); + else + { + cam.SetRes(image->width, image->height); + cout << " [Fail]" << endl; } - - if (flip_image) { - cvFlip(image); - image->origin = !image->origin; + double p[16]; + cam.GetOpenglProjectionMatrix(p, image->width, image->height); + GlutViewer::SetGlProjectionMatrix(p); + for (int i = 0; i < 32; i++) + { + d[i].SetScale(marker_size); } + rgba = CvTestbed::Instance().CreateImageWithProto("RGBA", image, 0, 4); + } + static MarkerDetector marker_detector; + marker_detector.SetMarkerSize(marker_size); // for marker ids larger than + // 255, set the content + // resolution accordingly + // static MarkerDetector marker_detector; + // marker_detector.SetMarkerSize(2.8, 3, 1.5); + + // Here we try to use RGBA just to make sure that also it works... + // cvCvtColor(image, rgba, CV_RGB2RGBA); + marker_detector.Detect(image, &cam, true, true); + GlutViewer::DrawableClear(); + for (size_t i = 0; i < marker_detector.markers->size(); i++) + { + if (i >= 32) + break; + + Pose p = (*(marker_detector.markers))[i].pose; + p.GetMatrixGL(d[i].gl_mat); + + int id = (*(marker_detector.markers))[i].GetId(); + double r = 1.0 - double(id + 1) / 32.0; + double g = 1.0 - double(id * 3 % 32 + 1) / 32.0; + double b = 1.0 - double(id * 7 % 32 + 1) / 32.0; + d[i].SetColor(r, g, b); + + GlutViewer::DrawableAdd(&(d[i])); + } + + if (flip_image) + { + cvFlip(image); + image->origin = !image->origin; + } } -int main(int argc, char *argv[]) +int main(int argc, char* argv[]) { - try { - // Output usage message - std::string filename(argv[0]); - filename = filename.substr(filename.find_last_of('\\') + 1); - std::cout << "SampleMarkerDetector" << std::endl; - std::cout << "====================" << std::endl; - std::cout << std::endl; - std::cout << "Description:" << std::endl; - std::cout << " This is an example of how to detect 'MarkerData' markers using" << std::endl; - std::cout << " 'MarkerDetector' and visualize them using 'GlutViewer'. In the" << std::endl; - std::cout << " SampleMarkerDetector window, various debug information is shown" << std::endl; - std::cout << " about the detected markers. The coordinate axes and a virtual cube" << std::endl; - std::cout << " are superimposed onto the markers to visualize the detected pose." << std::endl; - std::cout << " For each marker, a small image of the marker content is displayed" << std::endl; - std::cout << " at the origin and the marker number is displayed at one of the" << std::endl; - std::cout << " corners. At the opposing corner, the error estimation percentages" << std::endl; - std::cout << " 'MARGIN_ERROR' and 'DECODE_ERROR' (red) or 'TRACK_ERROR' (dark red)" << std::endl; - std::cout << " are displayed." << std::endl; - std::cout << std::endl; - std::cout << " In the AR window, squares are drawn over the marker positions using" << std::endl; - std::cout << " OpenGL. In the VR window, the squares are drawn with respect to the" << std::endl; - std::cout << " camera coordinate frame. The viewpoint can be modified by dragging" << std::endl; - std::cout << " with the left and right mouse buttons." << std::endl; - std::cout << std::endl; - std::cout << "Usage:" << std::endl; - std::cout << " " << filename << " [device|filename]" << std::endl; - std::cout << std::endl; - std::cout << " device integer selecting device from enumeration list (default 0)" << std::endl; - std::cout << " highgui capture devices are prefered" << std::endl; - std::cout << " filename string specifying a media file as input" << std::endl; - std::cout << std::endl; - std::cout << "Keyboard Shortcuts:" << std::endl; - std::cout << " q: quit" << std::endl; - std::cout << std::endl; - - // Initialise GlutViewer and CvTestbed - GlutViewer::Start(argc, argv, 640, 480); - CvTestbed::Instance().SetVideoCallback(videocallback); - - // Create capture object from camera (argv[1] is a number) or from file (argv[1] is a string) - Capture *cap; - std::string uniqueName; - if ((argc > 1) && (!isdigit(argv[1][0]))) { - // Manually create capture device and initialize capture object - CaptureDevice device("file", argv[1]); - cap = CaptureFactory::instance()->createCapture(device); - uniqueName = "file"; - } - else { - // Enumerate possible capture plugins - CaptureFactory::CapturePluginVector plugins = CaptureFactory::instance()->enumeratePlugins(); - if (plugins.size() < 1) { - std::cout << "Could not find any capture plugins." << std::endl; - return 0; - } - - // Display capture plugins - std::cout << "Available Plugins: "; - outputEnumeratedPlugins(plugins); - std::cout << std::endl; - - // Enumerate possible capture devices - CaptureFactory::CaptureDeviceVector devices = CaptureFactory::instance()->enumerateDevices(); - if (devices.size() < 1) { - std::cout << "Could not find any capture devices." << std::endl; - return 0; - } - - // Check command line argument for which device to use - int selectedDevice = defaultDevice(devices); - if (argc > 1) { - selectedDevice = atoi(argv[1]); - } - if (selectedDevice >= (int)devices.size()) { - selectedDevice = defaultDevice(devices); - } - - // Display capture devices - std::cout << "Enumerated Capture Devices:" << std::endl; - outputEnumeratedDevices(devices, selectedDevice); - std::cout << std::endl; - - // Create capture object from camera - cap = CaptureFactory::instance()->createCapture(devices[selectedDevice]); - uniqueName = devices[selectedDevice].uniqueName(); - } - - // Handle capture lifecycle and start video capture - // Note that loadSettings/saveSettings are not supported by all plugins - if (cap) { - std::stringstream settingsFilename; - settingsFilename << "camera_settings_" << uniqueName << ".xml"; - calibrationFilename << "camera_calibration_" << uniqueName << ".xml"; - - cap->start(); - cap->setResolution(640, 480); - - if (cap->loadSettings(settingsFilename.str())) { - std::cout << "Loading settings: " << settingsFilename.str() << std::endl; - } - - std::stringstream title; - title << "SampleMarkerDetector (" << cap->captureDevice().captureType() << ")"; - - CvTestbed::Instance().StartVideo(cap, title.str().c_str()); - - if (cap->saveSettings(settingsFilename.str())) { - std::cout << "Saving settings: " << settingsFilename.str() << std::endl; - } - - cap->stop(); - delete cap; - } - else if (CvTestbed::Instance().StartVideo(0, argv[0])) { - } - else { - std::cout << "Could not initialize the selected capture backend." << std::endl; - } + try + { + // Output usage message + std::string filename(argv[0]); + filename = filename.substr(filename.find_last_of('\\') + 1); + std::cout << "SampleMarkerDetector" << std::endl; + std::cout << "====================" << std::endl; + std::cout << std::endl; + std::cout << "Description:" << std::endl; + std::cout << " This is an example of how to detect 'MarkerData' markers " + "using" + << std::endl; + std::cout << " 'MarkerDetector' and visualize them using 'GlutViewer'. In " + "the" + << std::endl; + std::cout << " SampleMarkerDetector window, various debug information is " + "shown" + << std::endl; + std::cout << " about the detected markers. The coordinate axes and a " + "virtual cube" + << std::endl; + std::cout << " are superimposed onto the markers to visualize the " + "detected pose." + << std::endl; + std::cout << " For each marker, a small image of the marker content is " + "displayed" + << std::endl; + std::cout << " at the origin and the marker number is displayed at one of " + "the" + << std::endl; + std::cout << " corners. At the opposing corner, the error estimation " + "percentages" + << std::endl; + std::cout << " 'MARGIN_ERROR' and 'DECODE_ERROR' (red) or 'TRACK_ERROR' " + "(dark red)" + << std::endl; + std::cout << " are displayed." << std::endl; + std::cout << std::endl; + std::cout << " In the AR window, squares are drawn over the marker " + "positions using" + << std::endl; + std::cout << " OpenGL. In the VR window, the squares are drawn with " + "respect to the" + << std::endl; + std::cout << " camera coordinate frame. The viewpoint can be modified by " + "dragging" + << std::endl; + std::cout << " with the left and right mouse buttons." << std::endl; + std::cout << std::endl; + std::cout << "Usage:" << std::endl; + std::cout << " " << filename << " [device|filename]" << std::endl; + std::cout << std::endl; + std::cout << " device integer selecting device from enumeration list " + "(default 0)" + << std::endl; + std::cout << " highgui capture devices are prefered" + << std::endl; + std::cout << " filename string specifying a media file as input" + << std::endl; + std::cout << std::endl; + std::cout << "Keyboard Shortcuts:" << std::endl; + std::cout << " q: quit" << std::endl; + std::cout << std::endl; + + // Initialise GlutViewer and CvTestbed + GlutViewer::Start(argc, argv, 640, 480); + CvTestbed::Instance().SetVideoCallback(videocallback); + + // Create capture object from camera (argv[1] is a number) or from file + // (argv[1] is a string) + Capture* cap; + std::string uniqueName; + if ((argc > 1) && (!isdigit(argv[1][0]))) + { + // Manually create capture device and initialize capture object + CaptureDevice device("file", argv[1]); + cap = CaptureFactory::instance()->createCapture(device); + uniqueName = "file"; + } + else + { + // Enumerate possible capture plugins + CaptureFactory::CapturePluginVector plugins = + CaptureFactory::instance()->enumeratePlugins(); + if (plugins.size() < 1) + { + std::cout << "Could not find any capture plugins." << std::endl; + return 0; + } + + // Display capture plugins + std::cout << "Available Plugins: "; + outputEnumeratedPlugins(plugins); + std::cout << std::endl; + // Enumerate possible capture devices + CaptureFactory::CaptureDeviceVector devices = + CaptureFactory::instance()->enumerateDevices(); + if (devices.size() < 1) + { + std::cout << "Could not find any capture devices." << std::endl; return 0; + } + + // Check command line argument for which device to use + int selectedDevice = defaultDevice(devices); + if (argc > 1) + { + selectedDevice = atoi(argv[1]); + } + if (selectedDevice >= (int)devices.size()) + { + selectedDevice = defaultDevice(devices); + } + + // Display capture devices + std::cout << "Enumerated Capture Devices:" << std::endl; + outputEnumeratedDevices(devices, selectedDevice); + std::cout << std::endl; + + // Create capture object from camera + cap = CaptureFactory::instance()->createCapture(devices[selectedDevice]); + uniqueName = devices[selectedDevice].uniqueName(); + } + + // Handle capture lifecycle and start video capture + // Note that loadSettings/saveSettings are not supported by all plugins + if (cap) + { + std::stringstream settingsFilename; + settingsFilename << "camera_settings_" << uniqueName << ".xml"; + calibrationFilename << "camera_calibration_" << uniqueName << ".xml"; + + cap->start(); + cap->setResolution(640, 480); + + if (cap->loadSettings(settingsFilename.str())) + { + std::cout << "Loading settings: " << settingsFilename.str() + << std::endl; + } + + std::stringstream title; + title << "SampleMarkerDetector (" << cap->captureDevice().captureType() + << ")"; + + CvTestbed::Instance().StartVideo(cap, title.str().c_str()); + + if (cap->saveSettings(settingsFilename.str())) + { + std::cout << "Saving settings: " << settingsFilename.str() << std::endl; + } + + cap->stop(); + delete cap; } - catch (const std::exception &e) { - std::cout << "Exception: " << e.what() << endl; + else if (CvTestbed::Instance().StartVideo(0, argv[0])) + { } - catch (...) { - std::cout << "Exception: unknown" << std::endl; + else + { + std::cout << "Could not initialize the selected capture backend." + << std::endl; } + + return 0; + } + catch (const std::exception& e) + { + std::cout << "Exception: " << e.what() << endl; + } + catch (...) + { + std::cout << "Exception: unknown" << std::endl; + } } diff --git a/ar_track_alvar/src/SampleMarkerHide.cpp b/ar_track_alvar/src/SampleMarkerHide.cpp index e833af4..8f5cd55 100644 --- a/ar_track_alvar/src/SampleMarkerHide.cpp +++ b/ar_track_alvar/src/SampleMarkerHide.cpp @@ -5,223 +5,269 @@ using namespace alvar; using namespace std; -#define GLUT_DISABLE_ATEXIT_HACK // Needed to compile with Mingw? +#define GLUT_DISABLE_ATEXIT_HACK // Needed to compile with Mingw? #include const double margin = 1.0; std::stringstream calibrationFilename; // Own drawable for showing hide-texture in OpenGL -struct OwnDrawable : public Drawable { - unsigned char hidingtex[64*64*4]; - virtual void Draw() { - glPushMatrix(); - glMultMatrixd(gl_mat); - - glPushAttrib(GL_ALL_ATTRIB_BITS); - glEnable(GL_TEXTURE_2D); - int tex=0; - glBindTexture(GL_TEXTURE_2D, tex); - glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP); - glTexImage2D(GL_TEXTURE_2D,0,GL_RGBA,64,64,0,GL_RGBA,GL_UNSIGNED_BYTE,hidingtex); - glDisable(GL_CULL_FACE); - glDisable(GL_LIGHTING); - glDisable(GL_DEPTH_TEST); - glEnable(GL_ALPHA_TEST); - glEnable(GL_BLEND); - glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); - glBegin(GL_QUADS); - glTexCoord2d(0.0,0.0); - glVertex3d(-margin,-margin,0); - glTexCoord2d(0.0,1.0); - glVertex3d(-margin,margin,0); - glTexCoord2d(1.0,1.0); - glVertex3d(margin,margin,0); - glTexCoord2d(1.0,0.0); - glVertex3d(margin,-margin,0); - glEnd(); - glPopAttrib(); - glPopMatrix(); - } +struct OwnDrawable : public Drawable +{ + unsigned char hidingtex[64 * 64 * 4]; + virtual void Draw() + { + glPushMatrix(); + glMultMatrixd(gl_mat); + + glPushAttrib(GL_ALL_ATTRIB_BITS); + glEnable(GL_TEXTURE_2D); + int tex = 0; + glBindTexture(GL_TEXTURE_2D, tex); + glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP); + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 64, 64, 0, GL_RGBA, + GL_UNSIGNED_BYTE, hidingtex); + glDisable(GL_CULL_FACE); + glDisable(GL_LIGHTING); + glDisable(GL_DEPTH_TEST); + glEnable(GL_ALPHA_TEST); + glEnable(GL_BLEND); + glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); + glBegin(GL_QUADS); + glTexCoord2d(0.0, 0.0); + glVertex3d(-margin, -margin, 0); + glTexCoord2d(0.0, 1.0); + glVertex3d(-margin, margin, 0); + glTexCoord2d(1.0, 1.0); + glVertex3d(margin, margin, 0); + glTexCoord2d(1.0, 0.0); + glVertex3d(margin, -margin, 0); + glEnd(); + glPopAttrib(); + glPopMatrix(); + } }; -void videocallback(IplImage *image) +void videocallback(IplImage* image) { - static bool init=true; - static const int marker_size=15; - static Camera cam; - static OwnDrawable d[32]; - static IplImage *hide_texture; - - bool flip_image = (image->origin?true:false); - if (flip_image) { - cvFlip(image); - image->origin = !image->origin; + static bool init = true; + static const int marker_size = 15; + static Camera cam; + static OwnDrawable d[32]; + static IplImage* hide_texture; + + bool flip_image = (image->origin ? true : false); + if (flip_image) + { + cvFlip(image); + image->origin = !image->origin; + } + + static IplImage* bg_image = 0; + if (!bg_image) + bg_image = cvCreateImage(cvSize(512, 512), 8, 3); + if (image->nChannels == 3) + { + bg_image->origin = 0; + cvResize(image, bg_image); + GlutViewer::SetVideo(bg_image); + } + + if (init) + { + init = false; + cout << "Loading calibration: " << calibrationFilename.str(); + if (cam.SetCalib(calibrationFilename.str().c_str(), image->width, + image->height)) + { + cout << " [Ok]" << endl; + } + else + { + cam.SetRes(image->width, image->height); + cout << " [Fail]" << endl; } + double p[16]; + cam.GetOpenglProjectionMatrix(p, image->width, image->height); + GlutViewer::SetGlProjectionMatrix(p); + hide_texture = + CvTestbed::Instance().CreateImage("hide_texture", cvSize(64, 64), 8, 4); + } + static MarkerDetector marker_detector; + marker_detector.Detect(image, &cam, false, false); - static IplImage* bg_image = 0; - if(!bg_image) bg_image = cvCreateImage(cvSize(512, 512), 8, 3); - if(image->nChannels == 3) + GlutViewer::DrawableClear(); + for (size_t i = 0; i < marker_detector.markers->size(); i++) + { + if (i >= 32) + break; + GlutViewer::DrawableAdd(&(d[i])); + } + for (size_t i = 0; i < marker_detector.markers->size(); i++) + { + if (i >= 32) + break; + + // Note that we need to mirror both the y- and z-axis because: + // - In OpenCV we have coordinates: x-right, y-down, z-ahead + // - In OpenGL we have coordinates: x-right, y-up, z-backwards + // TODO: Better option might be to use OpenGL projection matrix that + // matches our OpenCV-approach + Pose p = (*(marker_detector.markers))[i].pose; + BuildHideTexture(image, hide_texture, &cam, d[i].gl_mat, + PointDouble(-margin, -margin), + PointDouble(margin, margin)); + // DrawTexture(image, hide_texture, &cam, d[i].gl_mat, PointDouble(-0.7, + // -0.7), PointDouble(0.7, 0.7)); + + p.GetMatrixGL(d[i].gl_mat); + for (int ii = 0; ii < 64 * 64; ii++) { - bg_image->origin = 0; - cvResize(image, bg_image); - GlutViewer::SetVideo(bg_image); + d[i].hidingtex[ii * 4 + 0] = hide_texture->imageData[ii * 4 + 2]; + d[i].hidingtex[ii * 4 + 1] = hide_texture->imageData[ii * 4 + 1]; + d[i].hidingtex[ii * 4 + 2] = hide_texture->imageData[ii * 4 + 0]; + d[i].hidingtex[ii * 4 + 3] = hide_texture->imageData[ii * 4 + 3]; } + } + if (flip_image) + { + cvFlip(image); + image->origin = !image->origin; + } +} + +int main(int argc, char* argv[]) +{ + try + { + // Output usage message + std::string filename(argv[0]); + filename = filename.substr(filename.find_last_of('\\') + 1); + std::cout << "SampleMarkerHide" << std::endl; + std::cout << "================" << std::endl; + std::cout << std::endl; + std::cout << "Description:" << std::endl; + std::cout << " This is an example of how to detect 'MarkerData' markers, " + "similarly" + << std::endl; + std::cout << " to 'SampleMarkerDetector', and hide them using the " + "'BuildHideTexture'" + << std::endl; + std::cout << " and 'DrawTexture' classes." << std::endl; + std::cout << std::endl; + std::cout << "Usage:" << std::endl; + std::cout << " " << filename << " [device]" << std::endl; + std::cout << std::endl; + std::cout << " device integer selecting device from enumeration list " + "(default 0)" + << std::endl; + std::cout << " highgui capture devices are prefered" + << std::endl; + std::cout << std::endl; + std::cout << "Keyboard Shortcuts:" << std::endl; + std::cout << " q: quit" << std::endl; + std::cout << std::endl; - if (init) { - init = false; - cout<<"Loading calibration: "<width, image->height)) { - cout<<" [Ok]"<width, image->height); - cout<<" [Fail]"<width,image->height); - GlutViewer::SetGlProjectionMatrix(p); - hide_texture = CvTestbed::Instance().CreateImage("hide_texture", cvSize(64, 64), 8, 4); + // Initialise GlutViewer and CvTestbed + GlutViewer::Start(argc, argv, 640, 480, 15); + CvTestbed::Instance().SetVideoCallback(videocallback); + + // Enumerate possible capture plugins + CaptureFactory::CapturePluginVector plugins = + CaptureFactory::instance()->enumeratePlugins(); + if (plugins.size() < 1) + { + std::cout << "Could not find any capture plugins." << std::endl; + return 0; } - static MarkerDetector marker_detector;\ - marker_detector.Detect(image, &cam, false, false); - GlutViewer::DrawableClear(); - for (size_t i=0; isize(); i++) { - if (i >= 32) break; - GlutViewer::DrawableAdd(&(d[i])); + // Display capture plugins + std::cout << "Available Plugins: "; + outputEnumeratedPlugins(plugins); + std::cout << std::endl; + + // Enumerate possible capture devices + CaptureFactory::CaptureDeviceVector devices = + CaptureFactory::instance()->enumerateDevices(); + if (devices.size() < 1) + { + std::cout << "Could not find any capture devices." << std::endl; + return 0; } - for (size_t i=0; isize(); i++) { - if (i >= 32) break; - - // Note that we need to mirror both the y- and z-axis because: - // - In OpenCV we have coordinates: x-right, y-down, z-ahead - // - In OpenGL we have coordinates: x-right, y-up, z-backwards - // TODO: Better option might be to use OpenGL projection matrix that matches our OpenCV-approach - Pose p = (*(marker_detector.markers))[i].pose; - BuildHideTexture(image, hide_texture, &cam, d[i].gl_mat, PointDouble(-margin, -margin), PointDouble(margin, margin)); - //DrawTexture(image, hide_texture, &cam, d[i].gl_mat, PointDouble(-0.7, -0.7), PointDouble(0.7, 0.7)); - - p.GetMatrixGL(d[i].gl_mat); - for (int ii=0; ii<64*64; ii++) { - d[i].hidingtex[ii*4+0] = hide_texture->imageData[ii*4+2]; - d[i].hidingtex[ii*4+1] = hide_texture->imageData[ii*4+1]; - d[i].hidingtex[ii*4+2] = hide_texture->imageData[ii*4+0]; - d[i].hidingtex[ii*4+3] = hide_texture->imageData[ii*4+3]; - } + + // Check command line argument for which device to use + int selectedDevice = defaultDevice(devices); + if (argc > 1) + { + selectedDevice = atoi(argv[1]); } - if (flip_image) { - cvFlip(image); - image->origin = !image->origin; + if (selectedDevice >= (int)devices.size()) + { + selectedDevice = defaultDevice(devices); } -} -int main(int argc, char *argv[]) -{ - try { - // Output usage message - std::string filename(argv[0]); - filename = filename.substr(filename.find_last_of('\\') + 1); - std::cout << "SampleMarkerHide" << std::endl; - std::cout << "================" << std::endl; - std::cout << std::endl; - std::cout << "Description:" << std::endl; - std::cout << " This is an example of how to detect 'MarkerData' markers, similarly" << std::endl; - std::cout << " to 'SampleMarkerDetector', and hide them using the 'BuildHideTexture'" << std::endl; - std::cout << " and 'DrawTexture' classes." << std::endl; - std::cout << std::endl; - std::cout << "Usage:" << std::endl; - std::cout << " " << filename << " [device]" << std::endl; - std::cout << std::endl; - std::cout << " device integer selecting device from enumeration list (default 0)" << std::endl; - std::cout << " highgui capture devices are prefered" << std::endl; - std::cout << std::endl; - std::cout << "Keyboard Shortcuts:" << std::endl; - std::cout << " q: quit" << std::endl; - std::cout << std::endl; - - // Initialise GlutViewer and CvTestbed - GlutViewer::Start(argc, argv, 640, 480, 15); - CvTestbed::Instance().SetVideoCallback(videocallback); - - // Enumerate possible capture plugins - CaptureFactory::CapturePluginVector plugins = CaptureFactory::instance()->enumeratePlugins(); - if (plugins.size() < 1) { - std::cout << "Could not find any capture plugins." << std::endl; - return 0; - } - - // Display capture plugins - std::cout << "Available Plugins: "; - outputEnumeratedPlugins(plugins); - std::cout << std::endl; - - // Enumerate possible capture devices - CaptureFactory::CaptureDeviceVector devices = CaptureFactory::instance()->enumerateDevices(); - if (devices.size() < 1) { - std::cout << "Could not find any capture devices." << std::endl; - return 0; - } - - // Check command line argument for which device to use - int selectedDevice = defaultDevice(devices); - if (argc > 1) { - selectedDevice = atoi(argv[1]); - } - if (selectedDevice >= (int)devices.size()) { - selectedDevice = defaultDevice(devices); - } - - // Display capture devices - std::cout << "Enumerated Capture Devices:" << std::endl; - outputEnumeratedDevices(devices, selectedDevice); - std::cout << std::endl; - - // Create capture object from camera - Capture *cap = CaptureFactory::instance()->createCapture(devices[selectedDevice]); - std::string uniqueName = devices[selectedDevice].uniqueName(); - - // Handle capture lifecycle and start video capture - // Note that loadSettings/saveSettings are not supported by all plugins - if (cap) { - std::stringstream settingsFilename; - settingsFilename << "camera_settings_" << uniqueName << ".xml"; - calibrationFilename << "camera_calibration_" << uniqueName << ".xml"; - - cap->start(); - cap->setResolution(640, 480); - - if (cap->loadSettings(settingsFilename.str())) { - std::cout << "Loading settings: " << settingsFilename.str() << std::endl; - } - - std::stringstream title; - title << "SampleMarkerHide (" << cap->captureDevice().captureType() << ")"; - - CvTestbed::Instance().StartVideo(cap, title.str().c_str()); - - if (cap->saveSettings(settingsFilename.str())) { - std::cout << "Saving settings: " << settingsFilename.str() << std::endl; - } - - cap->stop(); - delete cap; - } - else if (CvTestbed::Instance().StartVideo(0, argv[0])) { - } - else { - std::cout << "Could not initialize the selected capture backend." << std::endl; - } - - return 0; + // Display capture devices + std::cout << "Enumerated Capture Devices:" << std::endl; + outputEnumeratedDevices(devices, selectedDevice); + std::cout << std::endl; + + // Create capture object from camera + Capture* cap = + CaptureFactory::instance()->createCapture(devices[selectedDevice]); + std::string uniqueName = devices[selectedDevice].uniqueName(); + + // Handle capture lifecycle and start video capture + // Note that loadSettings/saveSettings are not supported by all plugins + if (cap) + { + std::stringstream settingsFilename; + settingsFilename << "camera_settings_" << uniqueName << ".xml"; + calibrationFilename << "camera_calibration_" << uniqueName << ".xml"; + + cap->start(); + cap->setResolution(640, 480); + + if (cap->loadSettings(settingsFilename.str())) + { + std::cout << "Loading settings: " << settingsFilename.str() + << std::endl; + } + + std::stringstream title; + title << "SampleMarkerHide (" << cap->captureDevice().captureType() + << ")"; + + CvTestbed::Instance().StartVideo(cap, title.str().c_str()); + + if (cap->saveSettings(settingsFilename.str())) + { + std::cout << "Saving settings: " << settingsFilename.str() << std::endl; + } + + cap->stop(); + delete cap; } - catch (const std::exception &e) { - std::cout << "Exception: " << e.what() << endl; + else if (CvTestbed::Instance().StartVideo(0, argv[0])) + { } - catch (...) { - std::cout << "Exception: unknown" << std::endl; + else + { + std::cout << "Could not initialize the selected capture backend." + << std::endl; } + + return 0; + } + catch (const std::exception& e) + { + std::cout << "Exception: " << e.what() << endl; + } + catch (...) + { + std::cout << "Exception: unknown" << std::endl; + } } diff --git a/ar_track_alvar/src/SampleMarkerlessCreator.cpp b/ar_track_alvar/src/SampleMarkerlessCreator.cpp index fdbb5c9..1040c8c 100644 --- a/ar_track_alvar/src/SampleMarkerlessCreator.cpp +++ b/ar_track_alvar/src/SampleMarkerlessCreator.cpp @@ -2,52 +2,66 @@ using namespace std; using namespace alvar; -int main(int argc, char *argv[]) +int main(int argc, char* argv[]) { - try { - // Output usage message - std::string filename(argv[0]); - filename = filename.substr(filename.find_last_of('\\') + 1); - std::cout << "SampleMarkerlessCreator" << std::endl; - std::cout << "=======================" << std::endl; - std::cout << std::endl; - std::cout << "Description:" << std::endl; - std::cout << " This is an example of how to use the 'FernImageDetector' class" << std::endl; - std::cout << " to train a Fern classifier for markerless image-based tracking." << std::endl; - std::cout << " The image should contain many unique features and be in the range" << std::endl; - std::cout << " of 200x200 to 500x500 pixels. A '.dat' file will be saved in the" << std::endl; - std::cout << " same directory as the image and can be used with the" << std::endl; - std::cout << " SampleMarkerlessDetector sample." << std::endl; - std::cout << std::endl; - std::cout << "Usage:" << std::endl; - std::cout << " " << filename << " filename" << std::endl; - std::cout << std::endl; - std::cout << " filename filename of image to train" << std::endl; - std::cout << std::endl; + try + { + // Output usage message + std::string filename(argv[0]); + filename = filename.substr(filename.find_last_of('\\') + 1); + std::cout << "SampleMarkerlessCreator" << std::endl; + std::cout << "=======================" << std::endl; + std::cout << std::endl; + std::cout << "Description:" << std::endl; + std::cout << " This is an example of how to use the 'FernImageDetector' " + "class" + << std::endl; + std::cout << " to train a Fern classifier for markerless image-based " + "tracking." + << std::endl; + std::cout << " The image should contain many unique features and be in " + "the range" + << std::endl; + std::cout << " of 200x200 to 500x500 pixels. A '.dat' file will be saved " + "in the" + << std::endl; + std::cout << " same directory as the image and can be used with the" + << std::endl; + std::cout << " SampleMarkerlessDetector sample." << std::endl; + std::cout << std::endl; + std::cout << "Usage:" << std::endl; + std::cout << " " << filename << " filename" << std::endl; + std::cout << std::endl; + std::cout << " filename filename of image to train" << std::endl; + std::cout << std::endl; - if (argc < 2) { - std::cout << "Filename not specified." << std::endl; - return 0; - } - - std::cout << "Training classifier." << std::endl; - FernImageDetector fernDetector(true); - std::string imageFilename(argv[1]); - fernDetector.train(imageFilename); + if (argc < 2) + { + std::cout << "Filename not specified." << std::endl; + return 0; + } - std::cout << "Writing classifier." << std::endl; - std::string classifierFilename = imageFilename + ".dat"; - if (!fernDetector.write(classifierFilename)) { - std::cout << "Writing classifier failed." << std::endl; - return 1; - } + std::cout << "Training classifier." << std::endl; + FernImageDetector fernDetector(true); + std::string imageFilename(argv[1]); + fernDetector.train(imageFilename); - return 0; - } - catch (const std::exception &e) { - std::cout << "Exception: " << e.what() << endl; - } - catch (...) { - std::cout << "Exception: unknown" << std::endl; + std::cout << "Writing classifier." << std::endl; + std::string classifierFilename = imageFilename + ".dat"; + if (!fernDetector.write(classifierFilename)) + { + std::cout << "Writing classifier failed." << std::endl; + return 1; } + + return 0; + } + catch (const std::exception& e) + { + std::cout << "Exception: " << e.what() << endl; + } + catch (...) + { + std::cout << "Exception: unknown" << std::endl; + } } diff --git a/ar_track_alvar/src/SampleMarkerlessDetector.cpp b/ar_track_alvar/src/SampleMarkerlessDetector.cpp index 1c125f2..dcf4ce3 100644 --- a/ar_track_alvar/src/SampleMarkerlessDetector.cpp +++ b/ar_track_alvar/src/SampleMarkerlessDetector.cpp @@ -15,185 +15,235 @@ Drawable d; cv::Mat gray; bool reset = false; -void videocallback(IplImage *image) +void videocallback(IplImage* image) { - bool flip_image = (image->origin?true:false); - if (flip_image) { - cvFlip(image); - image->origin = !image->origin; + bool flip_image = (image->origin ? true : false); + if (flip_image) + { + cvFlip(image); + image->origin = !image->origin; + } + + if (init) + { + init = false; + cout << "Loading calibration: " << calibrationFilename.str(); + if (fernEstimator.setCalibration(calibrationFilename.str(), image->width, + image->height)) + { + cout << " [Ok]" << endl; } - - if (init) { - init = false; - cout << "Loading calibration: " << calibrationFilename.str(); - if (fernEstimator.setCalibration(calibrationFilename.str(), image->width, image->height)) { - cout << " [Ok]" << endl; - } else { - fernEstimator.setResolution(image->width, image->height); - cout << " [Fail]" << endl; - } - double p[16]; - fernEstimator.camera().GetOpenglProjectionMatrix(p, image->width, image->height); - GlutViewer::SetGlProjectionMatrix(p); - d.SetScale(10); - gray = cv::Mat(image); + else + { + fernEstimator.setResolution(image->width, image->height); + cout << " [Fail]" << endl; } + double p[16]; + fernEstimator.camera().GetOpenglProjectionMatrix(p, image->width, + image->height); + GlutViewer::SetGlProjectionMatrix(p); + d.SetScale(10); + gray = cv::Mat(image); + } + + if (image->nChannels == 3) + { + cv::Mat img = cvarrToMat(image); + cv::cvtColor(img, gray, CV_RGB2GRAY); + } + else + { + gray = image; + } + + vector ipts; + vector mpts; + + fernDetector.findFeatures(gray, true); + fernDetector.imagePoints(ipts); + fernDetector.modelPoints(mpts, true); + double test = fernDetector.inlierRatio(); + if (test > 0.15 && mpts.size() > 4) + { + fernEstimator.calculateFromPointCorrespondences(mpts, ipts); + } + + GlutViewer::DrawableClear(); + Pose pose = fernEstimator.pose(); + pose.GetMatrixGL(d.gl_mat); + GlutViewer::DrawableAdd(&d); + + if (flip_image) + { + cvFlip(image); + image->origin = !image->origin; + } +} - if (image->nChannels == 3) { - cv::Mat img = cvarrToMat(image); - cv::cvtColor(img, gray, CV_RGB2GRAY); +int main(int argc, char* argv[]) +{ + try + { + // Output usage message + std::string filename(argv[0]); + filename = filename.substr(filename.find_last_of('\\') + 1); + std::cout << "SampleMarkerlessDetector" << std::endl; + std::cout << "========================" << std::endl; + std::cout << std::endl; + std::cout << "Description:" << std::endl; + std::cout << " This is an example of how to use the 'FernImageDetector' " + "and" + << std::endl; + std::cout << " 'FernPoseEstimator' classes to detect and track an image " + "and" + << std::endl; + std::cout << " visualize it using 'GlutViewer'. The classification must " + "first" + << std::endl; + std::cout << " be trained with the SampleMarkerlessCreator sample and the" + << std::endl; + std::cout << " resulting file passed as an argument to this sample." + << std::endl; + std::cout << std::endl; + std::cout << " For optimal results, a high quality USB camera or a " + "Firewire" + << std::endl; + std::cout << " camera is necessary. It is also advised to calibrate the " + "camera" + << std::endl; + std::cout << " using the SampleCamCalib sample. It should be noted that " + "the size" + << std::endl; + std::cout << " of the trained image will affect the optimal distance for " + "detection." + << std::endl; + std::cout << std::endl; + std::cout << "Usage:" << std::endl; + std::cout << " " << filename << " filename [device]" << std::endl; + std::cout << std::endl; + std::cout << " filename the filename of classifier (.dat)" << std::endl; + std::cout << " device integer selecting device from enumeration list " + "(default 0)" + << std::endl; + std::cout << " highgui capture devices are prefered" + << std::endl; + std::cout << std::endl; + std::cout << "Keyboard Shortcuts:" << std::endl; + std::cout << " q: quit" << std::endl; + std::cout << std::endl; + + if (argc < 2) + { + std::cout << "Filename not specified." << std::endl; + return 0; } - else { - gray = image; + std::string classifierFilename(argv[1]); + + // Initialise GlutViewer and CvTestbed + GlutViewer::Start(argc, argv, 640, 480); + CvTestbed::Instance().SetVideoCallback(videocallback); + + // Enumerate possible capture plugins + CaptureFactory::CapturePluginVector plugins = + CaptureFactory::instance()->enumeratePlugins(); + if (plugins.size() < 1) + { + std::cout << "Could not find any capture plugins." << std::endl; + return 0; } - vector ipts; - vector mpts; - - fernDetector.findFeatures(gray, true); - fernDetector.imagePoints(ipts); - fernDetector.modelPoints(mpts, true); - double test = fernDetector.inlierRatio(); - if (test > 0.15 && mpts.size() > 4) { - fernEstimator.calculateFromPointCorrespondences(mpts, ipts); + // Display capture plugins + std::cout << "Available Plugins: "; + outputEnumeratedPlugins(plugins); + std::cout << std::endl; + + // Enumerate possible capture devices + CaptureFactory::CaptureDeviceVector devices = + CaptureFactory::instance()->enumerateDevices(); + if (devices.size() < 1) + { + std::cout << "Could not find any capture devices." << std::endl; + return 0; } - GlutViewer::DrawableClear(); - Pose pose = fernEstimator.pose(); - pose.GetMatrixGL(d.gl_mat); - GlutViewer::DrawableAdd(&d); - - if (flip_image) { - cvFlip(image); - image->origin = !image->origin; + // Check command line argument for which device to use + int selectedDevice = defaultDevice(devices); + if (argc > 2) + { + selectedDevice = atoi(argv[2]); + } + if (selectedDevice >= (int)devices.size()) + { + selectedDevice = defaultDevice(devices); } -} -int main(int argc, char *argv[]) -{ - try { - // Output usage message - std::string filename(argv[0]); - filename = filename.substr(filename.find_last_of('\\') + 1); - std::cout << "SampleMarkerlessDetector" << std::endl; - std::cout << "========================" << std::endl; - std::cout << std::endl; - std::cout << "Description:" << std::endl; - std::cout << " This is an example of how to use the 'FernImageDetector' and" << std::endl; - std::cout << " 'FernPoseEstimator' classes to detect and track an image and" << std::endl; - std::cout << " visualize it using 'GlutViewer'. The classification must first" << std::endl; - std::cout << " be trained with the SampleMarkerlessCreator sample and the" << std::endl; - std::cout << " resulting file passed as an argument to this sample." << std::endl; - std::cout << std::endl; - std::cout << " For optimal results, a high quality USB camera or a Firewire" << std::endl; - std::cout << " camera is necessary. It is also advised to calibrate the camera" << std::endl; - std::cout << " using the SampleCamCalib sample. It should be noted that the size" << std::endl; - std::cout << " of the trained image will affect the optimal distance for detection." << std::endl; - std::cout << std::endl; - std::cout << "Usage:" << std::endl; - std::cout << " " << filename << " filename [device]" << std::endl; - std::cout << std::endl; - std::cout << " filename the filename of classifier (.dat)" << std::endl; - std::cout << " device integer selecting device from enumeration list (default 0)" << std::endl; - std::cout << " highgui capture devices are prefered" << std::endl; - std::cout << std::endl; - std::cout << "Keyboard Shortcuts:" << std::endl; - std::cout << " q: quit" << std::endl; - std::cout << std::endl; - - if (argc < 2) { - std::cout << "Filename not specified." << std::endl; - return 0; - } - std::string classifierFilename(argv[1]); - - // Initialise GlutViewer and CvTestbed - GlutViewer::Start(argc, argv, 640, 480); - CvTestbed::Instance().SetVideoCallback(videocallback); - - // Enumerate possible capture plugins - CaptureFactory::CapturePluginVector plugins = CaptureFactory::instance()->enumeratePlugins(); - if (plugins.size() < 1) { - std::cout << "Could not find any capture plugins." << std::endl; - return 0; - } - - // Display capture plugins - std::cout << "Available Plugins: "; - outputEnumeratedPlugins(plugins); - std::cout << std::endl; - - // Enumerate possible capture devices - CaptureFactory::CaptureDeviceVector devices = CaptureFactory::instance()->enumerateDevices(); - if (devices.size() < 1) { - std::cout << "Could not find any capture devices." << std::endl; - return 0; - } - - // Check command line argument for which device to use - int selectedDevice = defaultDevice(devices); - if (argc > 2) { - selectedDevice = atoi(argv[2]); - } - if (selectedDevice >= (int)devices.size()) { - selectedDevice = defaultDevice(devices); - } - - // Display capture devices - std::cout << "Enumerated Capture Devices:" << std::endl; - outputEnumeratedDevices(devices, selectedDevice); - std::cout << std::endl; - - // Create capture object from camera - Capture *cap = CaptureFactory::instance()->createCapture(devices[selectedDevice]); - std::string uniqueName = devices[selectedDevice].uniqueName(); - - // Handle capture lifecycle and start video capture - // Note that loadSettings/saveSettings are not supported by all plugins - if (cap) { - std::cout << "Loading classifier." << std::endl; - if (!fernDetector.read(classifierFilename)) { - std::cout << "Loading classifier failed." << std::endl; - cap->stop(); - delete cap; - return 1; - } - - std::stringstream settingsFilename; - settingsFilename << "camera_settings_" << uniqueName << ".xml"; - calibrationFilename << "camera_calibration_" << uniqueName << ".xml"; - - cap->start(); - cap->setResolution(640, 480); - - if (cap->loadSettings(settingsFilename.str())) { - std::cout << "Loading settings: " << settingsFilename.str() << std::endl; - } - - std::stringstream title; - title << "SampleMarkerDetector (" << cap->captureDevice().captureType() << ")"; - - CvTestbed::Instance().StartVideo(cap, title.str().c_str()); - - if (cap->saveSettings(settingsFilename.str())) { - std::cout << "Saving settings: " << settingsFilename.str() << std::endl; - } - - cap->stop(); - delete cap; - } - else if (CvTestbed::Instance().StartVideo(0, argv[0])) { - } - else { - std::cout << "Could not initialize the selected capture backend." << std::endl; - } - - return 0; + // Display capture devices + std::cout << "Enumerated Capture Devices:" << std::endl; + outputEnumeratedDevices(devices, selectedDevice); + std::cout << std::endl; + + // Create capture object from camera + Capture* cap = + CaptureFactory::instance()->createCapture(devices[selectedDevice]); + std::string uniqueName = devices[selectedDevice].uniqueName(); + + // Handle capture lifecycle and start video capture + // Note that loadSettings/saveSettings are not supported by all plugins + if (cap) + { + std::cout << "Loading classifier." << std::endl; + if (!fernDetector.read(classifierFilename)) + { + std::cout << "Loading classifier failed." << std::endl; + cap->stop(); + delete cap; + return 1; + } + + std::stringstream settingsFilename; + settingsFilename << "camera_settings_" << uniqueName << ".xml"; + calibrationFilename << "camera_calibration_" << uniqueName << ".xml"; + + cap->start(); + cap->setResolution(640, 480); + + if (cap->loadSettings(settingsFilename.str())) + { + std::cout << "Loading settings: " << settingsFilename.str() + << std::endl; + } + + std::stringstream title; + title << "SampleMarkerDetector (" << cap->captureDevice().captureType() + << ")"; + + CvTestbed::Instance().StartVideo(cap, title.str().c_str()); + + if (cap->saveSettings(settingsFilename.str())) + { + std::cout << "Saving settings: " << settingsFilename.str() << std::endl; + } + + cap->stop(); + delete cap; } - catch (const std::exception &e) { - std::cout << "Exception: " << e.what() << endl; + else if (CvTestbed::Instance().StartVideo(0, argv[0])) + { } - catch (...) { - std::cout << "Exception: unknown" << std::endl; + else + { + std::cout << "Could not initialize the selected capture backend." + << std::endl; } + + return 0; + } + catch (const std::exception& e) + { + std::cout << "Exception: " << e.what() << endl; + } + catch (...) + { + std::cout << "Exception: unknown" << std::endl; + } } diff --git a/ar_track_alvar/src/SampleMultiMarker.cpp b/ar_track_alvar/src/SampleMultiMarker.cpp index 16333a9..12ba859 100644 --- a/ar_track_alvar/src/SampleMultiMarker.cpp +++ b/ar_track_alvar/src/SampleMultiMarker.cpp @@ -6,226 +6,261 @@ using namespace alvar; using namespace std; -int visualize=0; +int visualize = 0; bool detect_additional = false; const int nof_markers = 5; const double marker_size = 4; MarkerDetector marker_detector; -//MultiMarker *multi_marker; -MultiMarker *multi_marker; +// MultiMarker *multi_marker; +MultiMarker* multi_marker; std::stringstream calibrationFilename; -void videocallback(IplImage *image) +void videocallback(IplImage* image) { - static Camera cam; - Pose pose; - bool flip_image = (image->origin?true:false); - if (flip_image) { - cvFlip(image); - image->origin = !image->origin; - } + static Camera cam; + Pose pose; + bool flip_image = (image->origin ? true : false); + if (flip_image) + { + cvFlip(image); + image->origin = !image->origin; + } - static bool init = true; - if (init) - { + static bool init = true; + if (init) + { + init = false; - init = false; - - // Initialize camera - cout<<"Loading calibration: "<width, image->height)) - { - cout<<" [Ok]"<width, image->height); - cout<<" [Fail]"< id_vector; - for(int i = 0; i < nof_markers; ++i) - id_vector.push_back(i); - - // We make the initialization for MultiMarkerBundle using a fixed marker field (can be printed from ALVAR.pdf) - marker_detector.SetMarkerSize(marker_size); - marker_detector.SetMarkerSizeForId(0, marker_size*2); - multi_marker = new MultiMarker(id_vector); - pose.Reset(); - multi_marker->PointCloudAdd(0, marker_size*2, pose); - pose.SetTranslation(-marker_size*2.5, +marker_size*1.5, 0); - multi_marker->PointCloudAdd(1, marker_size, pose); - pose.SetTranslation(+marker_size*2.5, +marker_size*1.5, 0); - multi_marker->PointCloudAdd(2, marker_size, pose); - pose.SetTranslation(-marker_size*2.5, -marker_size*1.5, 0); - multi_marker->PointCloudAdd(3, marker_size, pose); - pose.SetTranslation(+marker_size*2.5, -marker_size*1.5, 0); - multi_marker->PointCloudAdd(4, marker_size, pose); - } + // Initialize camera + cout << "Loading calibration: " << calibrationFilename.str(); - double error=-1; - if (marker_detector.Detect(image, &cam, true, (visualize == 1), 0.0)) { - if (detect_additional) { - error = multi_marker->Update(marker_detector.markers, &cam, pose); - multi_marker->SetTrackMarkers(marker_detector, &cam, pose, visualize ? image : NULL); - marker_detector.DetectAdditional(image, &cam, (visualize == 1)); - } - if (visualize == 2) - error = multi_marker->Update(marker_detector.markers, &cam, pose, image); - else - error = multi_marker->Update(marker_detector.markers, &cam, pose); + if (cam.SetCalib(calibrationFilename.str().c_str(), image->width, + image->height)) + { + cout << " [Ok]" << endl; } - - static Marker foo; - foo.SetMarkerSize(marker_size*4); - if ((error >= 0) && (error < 5)) + else { - foo.pose = pose; + cam.SetRes(image->width, image->height); + cout << " [Fail]" << endl; } - foo.Visualize(image, &cam); - if (flip_image) { - cvFlip(image); - image->origin = !image->origin; + vector id_vector; + for (int i = 0; i < nof_markers; ++i) + id_vector.push_back(i); + + // We make the initialization for MultiMarkerBundle using a fixed marker + // field (can be printed from ALVAR.pdf) + marker_detector.SetMarkerSize(marker_size); + marker_detector.SetMarkerSizeForId(0, marker_size * 2); + multi_marker = new MultiMarker(id_vector); + pose.Reset(); + multi_marker->PointCloudAdd(0, marker_size * 2, pose); + pose.SetTranslation(-marker_size * 2.5, +marker_size * 1.5, 0); + multi_marker->PointCloudAdd(1, marker_size, pose); + pose.SetTranslation(+marker_size * 2.5, +marker_size * 1.5, 0); + multi_marker->PointCloudAdd(2, marker_size, pose); + pose.SetTranslation(-marker_size * 2.5, -marker_size * 1.5, 0); + multi_marker->PointCloudAdd(3, marker_size, pose); + pose.SetTranslation(+marker_size * 2.5, -marker_size * 1.5, 0); + multi_marker->PointCloudAdd(4, marker_size, pose); + } + + double error = -1; + if (marker_detector.Detect(image, &cam, true, (visualize == 1), 0.0)) + { + if (detect_additional) + { + error = multi_marker->Update(marker_detector.markers, &cam, pose); + multi_marker->SetTrackMarkers(marker_detector, &cam, pose, + visualize ? image : NULL); + marker_detector.DetectAdditional(image, &cam, (visualize == 1)); } + if (visualize == 2) + error = multi_marker->Update(marker_detector.markers, &cam, pose, image); + else + error = multi_marker->Update(marker_detector.markers, &cam, pose); + } + + static Marker foo; + foo.SetMarkerSize(marker_size * 4); + if ((error >= 0) && (error < 5)) + { + foo.pose = pose; + } + foo.Visualize(image, &cam); + + if (flip_image) + { + cvFlip(image); + image->origin = !image->origin; + } } int keycallback(int key) { - if(key == 'v') + if (key == 'v') + { + visualize = (visualize + 1) % 3; + } + else if (key == 'l') + { + if (multi_marker->Load("mmarker.xml", alvar::FILE_FORMAT_XML)) { - visualize = (visualize+1)%3; + cout << "Multi marker loaded" << endl; } - else if(key == 'l') + else + cout << "Cannot load multi marker" << endl; + } + else if (key == 'd') + { + detect_additional = !detect_additional; + if (detect_additional) + cout << "Unreadable marker detection enabled." << endl; + else + cout << "Unreadable marker detection disabled." << endl; + } + else + return key; + + return 0; +} + +int main(int argc, char* argv[]) +{ + try + { + // Output usage message + std::string filename(argv[0]); + filename = filename.substr(filename.find_last_of('\\') + 1); + std::cout << "SampleMultiMarker" << std::endl; + std::cout << "=================" << std::endl; + std::cout << std::endl; + std::cout << "Description:" << std::endl; + std::cout << " This is an example of how to use the 'MultiMarker' class " + "to detect" + << std::endl; + std::cout << " preconfigured multi-marker setup (see ALVAR.pdf). A large " + "cube is drawn" + << std::endl; + std::cout << " over the detected multi-marker even when only some of the " + "markers are" + << std::endl; + std::cout << " visible." << std::endl; + std::cout << std::endl; + std::cout << "Usage:" << std::endl; + std::cout << " " << filename << " [device]" << std::endl; + std::cout << std::endl; + std::cout << " device integer selecting device from enumeration list " + "(default 0)" + << std::endl; + std::cout << " highgui capture devices are prefered" + << std::endl; + std::cout << std::endl; + std::cout << "Keyboard Shortcuts:" << std::endl; + std::cout << " v: switch between three debug visualizations" << std::endl; + std::cout << " l: load marker configuration from mmarker.txt" << std::endl; + std::cout << " d: toggle the detection of non-readable markers" + << std::endl; + std::cout << " q: quit" << std::endl; + std::cout << std::endl; + + // Initialise CvTestbed + CvTestbed::Instance().SetVideoCallback(videocallback); + CvTestbed::Instance().SetKeyCallback(keycallback); + + // Enumerate possible capture plugins + CaptureFactory::CapturePluginVector plugins = + CaptureFactory::instance()->enumeratePlugins(); + if (plugins.size() < 1) { - if(multi_marker->Load("mmarker.xml", alvar::FILE_FORMAT_XML)) - { - cout<<"Multi marker loaded"<enumerateDevices(); + if (devices.size() < 1) { - detect_additional = !detect_additional; - if(detect_additional) - cout<<"Unreadable marker detection enabled."< 1) + { + selectedDevice = atoi(argv[1]); + } + if (selectedDevice >= (int)devices.size()) + { + selectedDevice = defaultDevice(devices); + } -int main(int argc, char *argv[]) -{ - try { - // Output usage message - std::string filename(argv[0]); - filename = filename.substr(filename.find_last_of('\\') + 1); - std::cout << "SampleMultiMarker" << std::endl; - std::cout << "=================" << std::endl; - std::cout << std::endl; - std::cout << "Description:" << std::endl; - std::cout << " This is an example of how to use the 'MultiMarker' class to detect" << std::endl; - std::cout << " preconfigured multi-marker setup (see ALVAR.pdf). A large cube is drawn" << std::endl; - std::cout << " over the detected multi-marker even when only some of the markers are" << std::endl; - std::cout << " visible." << std::endl; - std::cout << std::endl; - std::cout << "Usage:" << std::endl; - std::cout << " " << filename << " [device]" << std::endl; - std::cout << std::endl; - std::cout << " device integer selecting device from enumeration list (default 0)" << std::endl; - std::cout << " highgui capture devices are prefered" << std::endl; - std::cout << std::endl; - std::cout << "Keyboard Shortcuts:" << std::endl; - std::cout << " v: switch between three debug visualizations" << std::endl; - std::cout << " l: load marker configuration from mmarker.txt" << std::endl; - std::cout << " d: toggle the detection of non-readable markers" << std::endl; - std::cout << " q: quit" << std::endl; - std::cout << std::endl; - - // Initialise CvTestbed - CvTestbed::Instance().SetVideoCallback(videocallback); - CvTestbed::Instance().SetKeyCallback(keycallback); - - // Enumerate possible capture plugins - CaptureFactory::CapturePluginVector plugins = CaptureFactory::instance()->enumeratePlugins(); - if (plugins.size() < 1) { - std::cout << "Could not find any capture plugins." << std::endl; - return 0; - } - - // Display capture plugins - std::cout << "Available Plugins: "; - outputEnumeratedPlugins(plugins); - std::cout << std::endl; - - // Enumerate possible capture devices - CaptureFactory::CaptureDeviceVector devices = CaptureFactory::instance()->enumerateDevices(); - if (devices.size() < 1) { - std::cout << "Could not find any capture devices." << std::endl; - return 0; - } - - // Check command line argument for which device to use - int selectedDevice = defaultDevice(devices); - if (argc > 1) { - selectedDevice = atoi(argv[1]); - } - if (selectedDevice >= (int)devices.size()) { - selectedDevice = defaultDevice(devices); - } - - // Display capture devices - std::cout << "Enumerated Capture Devices:" << std::endl; - outputEnumeratedDevices(devices, selectedDevice); - std::cout << std::endl; - - // Create capture object from camera - Capture *cap = CaptureFactory::instance()->createCapture(devices[selectedDevice]); - std::string uniqueName = devices[selectedDevice].uniqueName(); - - // Handle capture lifecycle and start video capture - // Note that loadSettings/saveSettings are not supported by all plugins - if (cap) { - std::stringstream settingsFilename; - settingsFilename << "camera_settings_" << uniqueName << ".xml"; - calibrationFilename << "camera_calibration_" << uniqueName << ".xml"; - - cap->start(); - cap->setResolution(640, 480); - - if (cap->loadSettings(settingsFilename.str())) { - std::cout << "Loading settings: " << settingsFilename.str() << std::endl; - } - - std::stringstream title; - title << "SampleMultiMarker (" << cap->captureDevice().captureType() << ")"; - - CvTestbed::Instance().StartVideo(cap, title.str().c_str()); - - if (cap->saveSettings(settingsFilename.str())) { - std::cout << "Saving settings: " << settingsFilename.str() << std::endl; - } - - cap->stop(); - delete cap; - } - else if (CvTestbed::Instance().StartVideo(0, argv[0])) { - } - else { - std::cout << "Could not initialize the selected capture backend." << std::endl; - } - - return 0; + // Display capture devices + std::cout << "Enumerated Capture Devices:" << std::endl; + outputEnumeratedDevices(devices, selectedDevice); + std::cout << std::endl; + + // Create capture object from camera + Capture* cap = + CaptureFactory::instance()->createCapture(devices[selectedDevice]); + std::string uniqueName = devices[selectedDevice].uniqueName(); + + // Handle capture lifecycle and start video capture + // Note that loadSettings/saveSettings are not supported by all plugins + if (cap) + { + std::stringstream settingsFilename; + settingsFilename << "camera_settings_" << uniqueName << ".xml"; + calibrationFilename << "camera_calibration_" << uniqueName << ".xml"; + + cap->start(); + cap->setResolution(640, 480); + + if (cap->loadSettings(settingsFilename.str())) + { + std::cout << "Loading settings: " << settingsFilename.str() + << std::endl; + } + + std::stringstream title; + title << "SampleMultiMarker (" << cap->captureDevice().captureType() + << ")"; + + CvTestbed::Instance().StartVideo(cap, title.str().c_str()); + + if (cap->saveSettings(settingsFilename.str())) + { + std::cout << "Saving settings: " << settingsFilename.str() << std::endl; + } + + cap->stop(); + delete cap; } - catch (const std::exception &e) { - std::cout << "Exception: " << e.what() << endl; + else if (CvTestbed::Instance().StartVideo(0, argv[0])) + { } - catch (...) { - std::cout << "Exception: unknown" << std::endl; + else + { + std::cout << "Could not initialize the selected capture backend." + << std::endl; } + + return 0; + } + catch (const std::exception& e) + { + std::cout << "Exception: " << e.what() << endl; + } + catch (...) + { + std::cout << "Exception: unknown" << std::endl; + } } diff --git a/ar_track_alvar/src/SampleMultiMarkerBundle.cpp b/ar_track_alvar/src/SampleMultiMarkerBundle.cpp index 3500495..5229f09 100644 --- a/ar_track_alvar/src/SampleMultiMarkerBundle.cpp +++ b/ar_track_alvar/src/SampleMultiMarkerBundle.cpp @@ -7,306 +7,357 @@ using namespace alvar; using namespace std; -int visualize=1; +int visualize = 1; const int nof_markers = 8; const double marker_size = 4; -bool add_measurement=false; +bool add_measurement = false; bool optimize = false; bool optimize_done = false; MarkerDetector marker_detector; std::stringstream calibrationFilename; -MultiMarkerInitializer *multi_marker_init=NULL; -MultiMarkerBundle *multi_marker_bundle=NULL; -double GetMultiMarkerPose(IplImage *image, Camera *cam, Pose &pose) { - static bool init=true; - if (init) { - cout<<"Using manual multimarker approach with MultiMarkerInitializer & MultiMarkerBundle."< id_vector; - for(int i = 0; i < nof_markers; ++i) - id_vector.push_back(i); - // We make the initialization for MultiMarkerBundle using MultiMarkerInitializer - // Each marker needs to be visible in at least two images and at most 64 image are used. - multi_marker_init = new MultiMarkerInitializer(id_vector, 2, 64); - pose.Reset(); - multi_marker_init->PointCloudAdd(id_vector[0], marker_size, pose); - multi_marker_bundle = new MultiMarkerBundle(id_vector); - } +MultiMarkerInitializer* multi_marker_init = NULL; +MultiMarkerBundle* multi_marker_bundle = NULL; +double GetMultiMarkerPose(IplImage* image, Camera* cam, Pose& pose) +{ + static bool init = true; + if (init) + { + cout << "Using manual multimarker approach with MultiMarkerInitializer & " + "MultiMarkerBundle." + << endl; + cout << "Use 'p' for taking keyframes and 'o' for optimizing." << endl; + init = false; + vector id_vector; + for (int i = 0; i < nof_markers; ++i) + id_vector.push_back(i); + // We make the initialization for MultiMarkerBundle using + // MultiMarkerInitializer Each marker needs to be visible in at least two + // images and at most 64 image are used. + multi_marker_init = new MultiMarkerInitializer(id_vector, 2, 64); + pose.Reset(); + multi_marker_init->PointCloudAdd(id_vector[0], marker_size, pose); + multi_marker_bundle = new MultiMarkerBundle(id_vector); + } - double error = -1; - if (!optimize_done) { - if (marker_detector.Detect(image, cam, true, (visualize == 1), 0.0)) { - if (visualize == 2) - error = multi_marker_init->Update(marker_detector.markers, cam, pose, image); - else - error = multi_marker_init->Update(marker_detector.markers, cam, pose); - } - } else { - if (marker_detector.Detect(image, cam, true, (visualize == 1), 0.0)) { - if (visualize == 2) - error = multi_marker_bundle->Update(marker_detector.markers, cam, pose, image); - else - error = multi_marker_bundle->Update(marker_detector.markers, cam, pose); - if ((multi_marker_bundle->SetTrackMarkers(marker_detector, cam, pose, image) > 0) && - (marker_detector.DetectAdditional(image, cam, (visualize == 3)) > 0)) - { - if (visualize == 3) - error = multi_marker_bundle->Update(marker_detector.markers, cam, pose, image); - else - error = multi_marker_bundle->Update(marker_detector.markers, cam, pose); - } - } + double error = -1; + if (!optimize_done) + { + if (marker_detector.Detect(image, cam, true, (visualize == 1), 0.0)) + { + if (visualize == 2) + error = multi_marker_init->Update(marker_detector.markers, cam, pose, + image); + else + error = multi_marker_init->Update(marker_detector.markers, cam, pose); + } + } + else + { + if (marker_detector.Detect(image, cam, true, (visualize == 1), 0.0)) + { + if (visualize == 2) + error = multi_marker_bundle->Update(marker_detector.markers, cam, pose, + image); + else + error = multi_marker_bundle->Update(marker_detector.markers, cam, pose); + if ((multi_marker_bundle->SetTrackMarkers(marker_detector, cam, pose, + image) > 0) && + (marker_detector.DetectAdditional(image, cam, (visualize == 3)) > 0)) + { + if (visualize == 3) + error = multi_marker_bundle->Update(marker_detector.markers, cam, + pose, image); + else + error = + multi_marker_bundle->Update(marker_detector.markers, cam, pose); + } } + } - if (add_measurement) { - if (marker_detector.markers->size() >= 2) { - cout<<"Adding measurement..."<MeasurementsAdd(marker_detector.markers); - add_measurement=false; - } + if (add_measurement) + { + if (marker_detector.markers->size() >= 2) + { + cout << "Adding measurement..." << endl; + multi_marker_init->MeasurementsAdd(marker_detector.markers); + add_measurement = false; } + } - if (optimize) { - cout<<"Initializing..."<Initialize(cam)) { - cout<<"Initialization failed, add some more measurements."<Reset(); - multi_marker_bundle->MeasurementsReset(); - // Copy all measurements into the bundle adjuster. - for (int i = 0; i < multi_marker_init->getMeasurementCount(); ++i) { - Pose pose; - multi_marker_init->getMeasurementPose(i, cam, pose); - const std::vector markers - = multi_marker_init->getMeasurementMarkers(i); - multi_marker_bundle->MeasurementsAdd(&markers, pose); - } - // Initialize the bundle adjuster with initial marker poses. - multi_marker_bundle->PointCloudCopy(multi_marker_init); - cout<<"Optimizing..."<Optimize(cam, 0.01, 20)) { - cout<<"Optimizing done"<Initialize(cam)) + { + cout << "Initialization failed, add some more measurements." << endl; + } + else + { + // Reset the bundle adjuster. + multi_marker_bundle->Reset(); + multi_marker_bundle->MeasurementsReset(); + // Copy all measurements into the bundle adjuster. + for (int i = 0; i < multi_marker_init->getMeasurementCount(); ++i) + { + Pose pose; + multi_marker_init->getMeasurementPose(i, cam, pose); + const std::vector markers = + multi_marker_init->getMeasurementMarkers(i); + multi_marker_bundle->MeasurementsAdd(&markers, pose); + } + // Initialize the bundle adjuster with initial marker poses. + multi_marker_bundle->PointCloudCopy(multi_marker_init); + cout << "Optimizing..." << endl; + if (multi_marker_bundle->Optimize(cam, 0.01, 20)) + { + cout << "Optimizing done" << endl; + optimize_done = true; + } + else + { + cout << "Optimizing FAILED!" << endl; + } } - return error; + optimize = false; + } + return error; } -void videocallback(IplImage *image) +void videocallback(IplImage* image) { - static Camera cam; - static Pose pose; - bool flip_image = (image->origin?true:false); - if (flip_image) { - cvFlip(image); - image->origin = !image->origin; - } - - static bool init = true; - if (init) - { - init = false; + static Camera cam; + static Pose pose; + bool flip_image = (image->origin ? true : false); + if (flip_image) + { + cvFlip(image); + image->origin = !image->origin; + } - // Initialize camera - cout<<"Loading calibration: "<width, image->height)) - { - cout<<" [Ok]"<width, image->height); - cout<<" [Fail]"<width, + image->height)) + { + cout << " [Ok]" << endl; } - - // Manual approach (use 'p' for taking keyframes and 'o' for optimizing) - double error = GetMultiMarkerPose(image, &cam, pose); - - // Visualize a blue marker at the origo. - static Marker foo; - foo.SetMarkerSize(marker_size); - if ((error >= 0) && (error < 5)) + else { - foo.pose = pose; + cam.SetRes(image->width, image->height); + cout << " [Fail]" << endl; } - foo.Visualize(image, &cam, CV_RGB(0,0,255)); - if (flip_image) { - cvFlip(image); - image->origin = !image->origin; - } + marker_detector.SetMarkerSize(marker_size); + } + + // Manual approach (use 'p' for taking keyframes and 'o' for optimizing) + double error = GetMultiMarkerPose(image, &cam, pose); + + // Visualize a blue marker at the origo. + static Marker foo; + foo.SetMarkerSize(marker_size); + if ((error >= 0) && (error < 5)) + { + foo.pose = pose; + } + foo.Visualize(image, &cam, CV_RGB(0, 0, 255)); + + if (flip_image) + { + cvFlip(image); + image->origin = !image->origin; + } } int keycallback(int key) { - static bool fixed = false; + static bool fixed = false; - if(key == 'r') + if (key == 'r') + { + cout << "Reseting multi marker" << endl; + multi_marker_init->Reset(); + multi_marker_init->MeasurementsReset(); + multi_marker_bundle->Reset(); + multi_marker_bundle->MeasurementsReset(); + add_measurement = false; + optimize = false; + optimize_done = false; + } + else if (key == 'v') + { + visualize = (visualize + 1) % 3; + } + else if (key == 'l') + { + if (multi_marker_bundle->Load("mmarker.xml", FILE_FORMAT_XML)) { - cout<<"Reseting multi marker"<Reset(); - multi_marker_init->MeasurementsReset(); - multi_marker_bundle->Reset(); - multi_marker_bundle->MeasurementsReset(); - add_measurement = false; - optimize = false; - optimize_done = false; + cout << "Multi marker loaded" << endl; + multi_marker_init->PointCloudCopy(multi_marker_bundle); + optimize_done = true; } - else if(key == 'v') + else + cout << "Cannot load multi marker" << endl; + } + else if (key == 's') + { + if (multi_marker_bundle->Save("mmarker.xml", FILE_FORMAT_XML)) + cout << "Multi marker saved" << endl; + else + cout << "Cannot save multi marker" << endl; + } + else if (key == 'p') + { + add_measurement = true; + } + else if (key == 'o') + { + optimize = true; + } + else + return key; + + return 0; +} + +int main(int argc, char* argv[]) +{ + try + { + // Output usage message + std::string filename(argv[0]); + filename = filename.substr(filename.find_last_of('\\') + 1); + std::cout << "SampleMultiMarkerBundle" << std::endl; + std::cout << "=======================" << std::endl; + std::cout << std::endl; + std::cout << "Description:" << std::endl; + std::cout << " This is an example of how to use the 'MultiMarkerBundle' " + "class to" + << std::endl; + std::cout << " automatically deduce and optimize 'MultiMarker' setups." + << std::endl; + std::cout << std::endl; + std::cout << "Usage:" << std::endl; + std::cout << " " << filename << " [device]" << std::endl; + std::cout << std::endl; + std::cout << " device integer selecting device from enumeration list " + "(default 0)" + << std::endl; + std::cout << " highgui capture devices are prefered" + << std::endl; + std::cout << std::endl; + std::cout << "Keyboard Shortcuts:" << std::endl; + std::cout << " l: load marker configuration from mmarker.txt" << std::endl; + std::cout << " s: save marker configuration to mmarker.txt" << std::endl; + std::cout << " r: reset marker configuration" << std::endl; + std::cout << " p: add measurement" << std::endl; + std::cout << " o: optimize bundle" << std::endl; + std::cout << " q: quit" << std::endl; + std::cout << std::endl; + + // Initialise CvTestbed + CvTestbed::Instance().SetVideoCallback(videocallback); + CvTestbed::Instance().SetKeyCallback(keycallback); + + // Enumerate possible capture plugins + CaptureFactory::CapturePluginVector plugins = + CaptureFactory::instance()->enumeratePlugins(); + if (plugins.size() < 1) { - visualize = (visualize+1)%3; + std::cout << "Could not find any capture plugins." << std::endl; + return 0; } - else if(key == 'l') + + // Display capture plugins + std::cout << "Available Plugins: "; + outputEnumeratedPlugins(plugins); + std::cout << std::endl; + + // Enumerate possible capture devices + CaptureFactory::CaptureDeviceVector devices = + CaptureFactory::instance()->enumerateDevices(); + if (devices.size() < 1) { - if(multi_marker_bundle->Load("mmarker.xml", FILE_FORMAT_XML)) - { - cout<<"Multi marker loaded"<PointCloudCopy(multi_marker_bundle); - optimize_done = true; - } - else - cout<<"Cannot load multi marker"< 1) { - if(multi_marker_bundle->Save("mmarker.xml", FILE_FORMAT_XML)) - cout<<"Multi marker saved"<= (int)devices.size()) { - add_measurement=true; + selectedDevice = defaultDevice(devices); } - else if(key == 'o') + + // Display capture devices + std::cout << "Enumerated Capture Devices:" << std::endl; + outputEnumeratedDevices(devices, selectedDevice); + std::cout << std::endl; + + // Create capture object from camera + Capture* cap = + CaptureFactory::instance()->createCapture(devices[selectedDevice]); + std::string uniqueName = devices[selectedDevice].uniqueName(); + + // Handle capture lifecycle and start video capture + // Note that loadSettings/saveSettings are not supported by all plugins + if (cap) { - optimize=true; - } - else return key; + std::stringstream settingsFilename; + settingsFilename << "camera_settings_" << uniqueName << ".xml"; + calibrationFilename << "camera_calibration_" << uniqueName << ".xml"; - return 0; -} + cap->start(); + cap->setResolution(640, 480); -int main(int argc, char *argv[]) -{ - try { - // Output usage message - std::string filename(argv[0]); - filename = filename.substr(filename.find_last_of('\\') + 1); - std::cout << "SampleMultiMarkerBundle" << std::endl; - std::cout << "=======================" << std::endl; - std::cout << std::endl; - std::cout << "Description:" << std::endl; - std::cout << " This is an example of how to use the 'MultiMarkerBundle' class to" << std::endl; - std::cout << " automatically deduce and optimize 'MultiMarker' setups." << std::endl; - std::cout << std::endl; - std::cout << "Usage:" << std::endl; - std::cout << " " << filename << " [device]" << std::endl; - std::cout << std::endl; - std::cout << " device integer selecting device from enumeration list (default 0)" << std::endl; - std::cout << " highgui capture devices are prefered" << std::endl; - std::cout << std::endl; - std::cout << "Keyboard Shortcuts:" << std::endl; - std::cout << " l: load marker configuration from mmarker.txt" << std::endl; - std::cout << " s: save marker configuration to mmarker.txt" << std::endl; - std::cout << " r: reset marker configuration" << std::endl; - std::cout << " p: add measurement" << std::endl; - std::cout << " o: optimize bundle" << std::endl; - std::cout << " q: quit" << std::endl; - std::cout << std::endl; - - // Initialise CvTestbed - CvTestbed::Instance().SetVideoCallback(videocallback); - CvTestbed::Instance().SetKeyCallback(keycallback); - - // Enumerate possible capture plugins - CaptureFactory::CapturePluginVector plugins = CaptureFactory::instance()->enumeratePlugins(); - if (plugins.size() < 1) { - std::cout << "Could not find any capture plugins." << std::endl; - return 0; - } - - // Display capture plugins - std::cout << "Available Plugins: "; - outputEnumeratedPlugins(plugins); - std::cout << std::endl; - - // Enumerate possible capture devices - CaptureFactory::CaptureDeviceVector devices = CaptureFactory::instance()->enumerateDevices(); - if (devices.size() < 1) { - std::cout << "Could not find any capture devices." << std::endl; - return 0; - } - - // Check command line argument for which device to use - int selectedDevice = defaultDevice(devices); - if (argc > 1) { - selectedDevice = atoi(argv[1]); - } - if (selectedDevice >= (int)devices.size()) { - selectedDevice = defaultDevice(devices); - } - - // Display capture devices - std::cout << "Enumerated Capture Devices:" << std::endl; - outputEnumeratedDevices(devices, selectedDevice); - std::cout << std::endl; - - // Create capture object from camera - Capture *cap = CaptureFactory::instance()->createCapture(devices[selectedDevice]); - std::string uniqueName = devices[selectedDevice].uniqueName(); - - // Handle capture lifecycle and start video capture - // Note that loadSettings/saveSettings are not supported by all plugins - if (cap) { - std::stringstream settingsFilename; - settingsFilename << "camera_settings_" << uniqueName << ".xml"; - calibrationFilename << "camera_calibration_" << uniqueName << ".xml"; - - cap->start(); - cap->setResolution(640, 480); - - if (cap->loadSettings(settingsFilename.str())) { - std::cout << "Loading settings: " << settingsFilename.str() << std::endl; - } - - std::stringstream title; - title << "SampleMultiMarkerBundle (" << cap->captureDevice().captureType() << ")"; - - CvTestbed::Instance().StartVideo(cap, title.str().c_str()); - - if (cap->saveSettings(settingsFilename.str())) { - std::cout << "Saving settings: " << settingsFilename.str() << std::endl; - } - - cap->stop(); - delete cap; - } - else if (CvTestbed::Instance().StartVideo(0, argv[0])) { - } - else { - std::cout << "Could not initialize the selected capture backend." << std::endl; - } - - return 0; + if (cap->loadSettings(settingsFilename.str())) + { + std::cout << "Loading settings: " << settingsFilename.str() + << std::endl; + } + + std::stringstream title; + title << "SampleMultiMarkerBundle (" << cap->captureDevice().captureType() + << ")"; + + CvTestbed::Instance().StartVideo(cap, title.str().c_str()); + + if (cap->saveSettings(settingsFilename.str())) + { + std::cout << "Saving settings: " << settingsFilename.str() << std::endl; + } + + cap->stop(); + delete cap; } - catch (const std::exception &e) { - std::cout << "Exception: " << e.what() << endl; + else if (CvTestbed::Instance().StartVideo(0, argv[0])) + { } - catch (...) { - std::cout << "Exception: unknown" << std::endl; + else + { + std::cout << "Could not initialize the selected capture backend." + << std::endl; } + + return 0; + } + catch (const std::exception& e) + { + std::cout << "Exception: " << e.what() << endl; + } + catch (...) + { + std::cout << "Exception: unknown" << std::endl; + } } diff --git a/ar_track_alvar/src/SampleOptimization.cpp b/ar_track_alvar/src/SampleOptimization.cpp index 6e34300..4099784 100644 --- a/ar_track_alvar/src/SampleOptimization.cpp +++ b/ar_track_alvar/src/SampleOptimization.cpp @@ -8,136 +8,163 @@ using namespace std; using namespace alvar; -const int res=640; -const double poly_res=8.0; +const int res = 640; +const double poly_res = 8.0; -double random(int dist_type, double param1, double param2) { - static CvRNG rng=0; - if (rng == 0) rng = cvRNG(time(0)); - double m_data; - CvMat m = cvMat(1, 1, CV_64F, &m_data); - cvRandArr(&rng, &m, dist_type, cvScalar(param1), cvScalar(param2)); - return m_data; +double random(int dist_type, double param1, double param2) +{ + static CvRNG rng = 0; + if (rng == 0) + rng = cvRNG(time(0)); + double m_data; + CvMat m = cvMat(1, 1, CV_64F, &m_data); + cvRandArr(&rng, &m, dist_type, cvScalar(param1), cvScalar(param2)); + return m_data; } -double get_y(double x, double a, double b, double c, double d, double e) { - return (a*x*x*x*x + b*x*x*x + c*x*x + d*x + e); +double get_y(double x, double a, double b, double c, double d, double e) +{ + return (a * x * x * x * x + b * x * x * x + c * x * x + d * x + e); } // Just generate some random data that can be used as sensor input -bool get_measurement(double *x, double *y, double a, double b, double c, double d, double e) { - double xx = random(CV_RAND_UNI, -(poly_res/2), +(poly_res/2)); //(rand()*poly_res/RAND_MAX)-(poly_res/2); - double yy = get_y(xx, a, b, c, d, e); - double ry = random(CV_RAND_NORMAL, 0, poly_res/8); //(rand()*(poly_res/4)/RAND_MAX)-(poly_res/8); - yy += ry; - *x = xx; - *y = yy; - if (*y < -(poly_res/2)) return false; - if (*y >= (poly_res/2)) return false; - return true; +bool get_measurement(double* x, double* y, double a, double b, double c, + double d, double e) +{ + double xx = + random(CV_RAND_UNI, -(poly_res / 2), + +(poly_res / 2)); //(rand()*poly_res/RAND_MAX)-(poly_res/2); + double yy = get_y(xx, a, b, c, d, e); + double ry = + random(CV_RAND_NORMAL, 0, + poly_res / 8); //(rand()*(poly_res/4)/RAND_MAX)-(poly_res/8); + yy += ry; + *x = xx; + *y = yy; + if (*y < -(poly_res / 2)) + return false; + if (*y >= (poly_res / 2)) + return false; + return true; } -void Estimate(CvMat* state, CvMat *projection, void *param) { - double *measx=(double *)param; - int data_degree = state->rows-1; - double a = (data_degree >= 4? cvmGet(state, 4, 0) : 0); - double b = (data_degree >= 3? cvmGet(state, 3, 0) : 0); - double c = (data_degree >= 2? cvmGet(state, 2, 0) : 0); - double d = (data_degree >= 1? cvmGet(state, 1, 0) : 0); - double e = (data_degree >= 0? cvmGet(state, 0, 0) : 0); - for (int i=0; irows; i++) { - cvmSet(projection, i, 0, get_y(measx[i], a, b, c, d, e)); - } +void Estimate(CvMat* state, CvMat* projection, void* param) +{ + double* measx = (double*)param; + int data_degree = state->rows - 1; + double a = (data_degree >= 4 ? cvmGet(state, 4, 0) : 0); + double b = (data_degree >= 3 ? cvmGet(state, 3, 0) : 0); + double c = (data_degree >= 2 ? cvmGet(state, 2, 0) : 0); + double d = (data_degree >= 1 ? cvmGet(state, 1, 0) : 0); + double e = (data_degree >= 0 ? cvmGet(state, 0, 0) : 0); + for (int i = 0; i < projection->rows; i++) + { + cvmSet(projection, i, 0, get_y(measx[i], a, b, c, d, e)); + } } -int main(int argc, char *argv[]) +int main(int argc, char* argv[]) { - try { - // Output usage message - std::string filename(argv[0]); - filename = filename.substr(filename.find_last_of('\\') + 1); - std::cout << "SampleOptimization" << std::endl; - std::cout << "==================" << std::endl; - std::cout << std::endl; - std::cout << "Description:" << std::endl; - std::cout << " This is an example of how to use the 'Optimization' class. Random data" << std::endl; - std::cout << " is generated and approximated using curves of increasing degrees." << std::endl; - std::cout << std::endl; - std::cout << "Usage:" << std::endl; - std::cout << " " << filename << std::endl; - std::cout << std::endl; - std::cout << "Keyboard Shortcuts:" << std::endl; - std::cout << " any key: cycle through datasets" << std::endl; - std::cout << " q: quit" << std::endl; - std::cout << std::endl; + try + { + // Output usage message + std::string filename(argv[0]); + filename = filename.substr(filename.find_last_of('\\') + 1); + std::cout << "SampleOptimization" << std::endl; + std::cout << "==================" << std::endl; + std::cout << std::endl; + std::cout << "Description:" << std::endl; + std::cout << " This is an example of how to use the 'Optimization' class. " + "Random data" + << std::endl; + std::cout << " is generated and approximated using curves of increasing " + "degrees." + << std::endl; + std::cout << std::endl; + std::cout << "Usage:" << std::endl; + std::cout << " " << filename << std::endl; + std::cout << std::endl; + std::cout << "Keyboard Shortcuts:" << std::endl; + std::cout << " any key: cycle through datasets" << std::endl; + std::cout << " q: quit" << std::endl; + std::cout << std::endl; - // Processing loop - IplImage *img = cvCreateImage(cvSize(res,res), IPL_DEPTH_8U, 3); - cvNamedWindow("SampleOptimization"); - for (int data_degree=0; data_degree<5; data_degree++) { - double a = (data_degree >= 4? random(CV_RAND_UNI, -0.5, 0.5) : 0); - double b = (data_degree >= 3? random(CV_RAND_UNI, -0.5, 0.5) : 0); - double c = (data_degree >= 2? random(CV_RAND_UNI, -0.5, 0.5) : 0); - double d = (data_degree >= 1? random(CV_RAND_UNI, -0.5, 0.5) : 0); - double e = (data_degree >= 0? random(CV_RAND_UNI, -0.5, 0.5) : 0); - cvZero(img); - vector measvec; - for (int i=0; i<1000; i++) { - double x, y; - if (get_measurement(&x, &y, a, b, c, d, e)) { - measvec.push_back(cvPoint2D32f(x, y)); - x = (x*res/poly_res)+(res/2); - y = (y*res/poly_res)+(res/2); - cvCircle(img, cvPoint(int(x), int(y)), 1, CV_RGB(0,255,0)); - } - } - cvShowImage("SampleOptimization", img); - cvWaitKey(10); - double measx[1000]; - CvMat *meas = cvCreateMat(measvec.size(), 1, CV_64F); - for (size_t i=0; irows); - opt.Optimize(¶m, meas, 0.1, 100, Estimate, measx); - double a = (degree >= 4? cvmGet(¶m, 4, 0) : 0); - double b = (degree >= 3? cvmGet(¶m, 3, 0) : 0); - double c = (degree >= 2? cvmGet(¶m, 2, 0) : 0); - double d = (degree >= 1? cvmGet(¶m, 1, 0) : 0); - double e = (degree >= 0? cvmGet(¶m, 0, 0) : 0); - const int step=5; - for (int x2=step; x2= 4 ? random(CV_RAND_UNI, -0.5, 0.5) : 0); + double b = (data_degree >= 3 ? random(CV_RAND_UNI, -0.5, 0.5) : 0); + double c = (data_degree >= 2 ? random(CV_RAND_UNI, -0.5, 0.5) : 0); + double d = (data_degree >= 1 ? random(CV_RAND_UNI, -0.5, 0.5) : 0); + double e = (data_degree >= 0 ? random(CV_RAND_UNI, -0.5, 0.5) : 0); + cvZero(img); + vector measvec; + for (int i = 0; i < 1000; i++) + { + double x, y; + if (get_measurement(&x, &y, a, b, c, d, e)) + { + measvec.push_back(cvPoint2D32f(x, y)); + x = (x * res / poly_res) + (res / 2); + y = (y * res / poly_res) + (res / 2); + cvCircle(img, cvPoint(int(x), int(y)), 1, CV_RGB(0, 255, 0)); } - cvReleaseImage(&img); - return 0; - } - catch (const std::exception &e) { - std::cout << "Exception: " << e.what() << endl; - } - catch (...) { - std::cout << "Exception: unknown" << std::endl; + } + cvShowImage("SampleOptimization", img); + cvWaitKey(10); + double measx[1000]; + CvMat* meas = cvCreateMat(measvec.size(), 1, CV_64F); + for (size_t i = 0; i < measvec.size(); i++) + { + measx[i] = measvec[i].x; + cvmSet(meas, i, 0, measvec[i].y); + } + for (int degree = 0; degree < 5; degree++) + { + double param_data[5] = { 0 }; + CvMat param = cvMat(degree + 1, 1, CV_64F, param_data); + Optimization opt(param.rows, meas->rows); + opt.Optimize(¶m, meas, 0.1, 100, Estimate, measx); + double a = (degree >= 4 ? cvmGet(¶m, 4, 0) : 0); + double b = (degree >= 3 ? cvmGet(¶m, 3, 0) : 0); + double c = (degree >= 2 ? cvmGet(¶m, 2, 0) : 0); + double d = (degree >= 1 ? cvmGet(¶m, 1, 0) : 0); + double e = (degree >= 0 ? cvmGet(¶m, 0, 0) : 0); + const int step = 5; + for (int x2 = step; x2 < res; x2 += step) + { + int x1 = x2 - step; + double xx1 = (x1 * poly_res / res) - (poly_res / 2); + double xx2 = (x2 * poly_res / res) - (poly_res / 2); + double yy1 = get_y(xx1, a, b, c, d, e); + double yy2 = get_y(xx2, a, b, c, d, e); + int y1 = int((yy1 * res / poly_res) + (res / 2)); + int y2 = int((yy2 * res / poly_res) + (res / 2)); + cvLine(img, cvPoint(x1, y1), cvPoint(x2, y2), + CV_RGB(degree * 50, 255 - (degree * 50), 255)); + } + cvShowImage("SampleOptimization", img); + cvWaitKey(10); + } + cvReleaseMat(&meas); + cvShowImage("SampleOptimization", img); + int key = cvWaitKey(0); + if (key == 'q') + { + break; + } } + cvReleaseImage(&img); + return 0; + } + catch (const std::exception& e) + { + std::cout << "Exception: " << e.what() << endl; + } + catch (...) + { + std::cout << "Exception: unknown" << std::endl; + } } diff --git a/ar_track_alvar/src/SamplePointcloud.cpp b/ar_track_alvar/src/SamplePointcloud.cpp index b971b66..f16ee6c 100644 --- a/ar_track_alvar/src/SamplePointcloud.cpp +++ b/ar_track_alvar/src/SamplePointcloud.cpp @@ -5,288 +5,351 @@ using namespace alvar; using namespace std; -const double marker_size=1; -bool init=true; -SimpleSfM *sfm; +const double marker_size = 1; +bool init = true; +SimpleSfM* sfm; std::stringstream calibrationFilename; // Own drawable 3D cross on features -struct OwnDrawable : public Drawable { - virtual void Draw() { - const double scale = 0.2; - glPushMatrix(); - glMultMatrixd(gl_mat); - glColor3d(color[0], color[1], color[2]); - glBegin(GL_LINES); - glVertex3f(0.0, 0.0, -scale); - glVertex3f(0.0, 0.0, scale); - glVertex3f(0.0, -scale, 0.0); - glVertex3f(0.0, scale, 0.0); - glVertex3f(-scale, 0.0, 0.0); - glVertex3f(scale, 0.0, 0.0); - glEnd(); - glPopMatrix(); - } +struct OwnDrawable : public Drawable +{ + virtual void Draw() + { + const double scale = 0.2; + glPushMatrix(); + glMultMatrixd(gl_mat); + glColor3d(color[0], color[1], color[2]); + glBegin(GL_LINES); + glVertex3f(0.0, 0.0, -scale); + glVertex3f(0.0, 0.0, scale); + glVertex3f(0.0, -scale, 0.0); + glVertex3f(0.0, scale, 0.0); + glVertex3f(-scale, 0.0, 0.0); + glVertex3f(scale, 0.0, 0.0); + glEnd(); + glPopMatrix(); + } }; Drawable d_marker; OwnDrawable d_points[1000]; int own_drawable_count; -bool reset=false; -void videocallback(IplImage *image) +bool reset = false; +void videocallback(IplImage* image) { - static IplImage *rgb = 0; - static IplImage* bg_image = 0; + static IplImage* rgb = 0; + static IplImage* bg_image = 0; - bool flip_image = (image->origin?true:false); - if (flip_image) { - cvFlip(image); - image->origin = !image->origin; - } + bool flip_image = (image->origin ? true : false); + if (flip_image) + { + cvFlip(image); + image->origin = !image->origin; + } - if (init) { - init = false; - sfm->Clear(); - cout<<"Loading calibration: "<GetCamera()->SetCalib(calibrationFilename.str().c_str(), image->width, image->height)) { - cout<<" [Ok]"<GetCamera()->SetRes(image->width, image->height); - cout<<" [Fail]"<GetCamera()->GetOpenglProjectionMatrix(p,image->width,image->height); - GlutViewer::SetGlProjectionMatrix(p); - d_marker.SetScale(marker_size*2); - rgb = CvTestbed::Instance().CreateImageWithProto("RGB", image, 0, 3); - CvTestbed::Instance().ToggleImageVisible(0, 0); - bg_image = CvTestbed::Instance().CreateImage("BG texture", cvSize(512,512),8, 3); - bg_image->origin = 0; + if (init) + { + init = false; + sfm->Clear(); + cout << "Loading calibration: " << calibrationFilename.str(); + if (sfm->GetCamera()->SetCalib(calibrationFilename.str().c_str(), + image->width, image->height)) + { + cout << " [Ok]" << endl; + } + else + { + sfm->GetCamera()->SetRes(image->width, image->height); + cout << " [Fail]" << endl; + } + double p[16]; + sfm->GetCamera()->GetOpenglProjectionMatrix(p, image->width, image->height); + GlutViewer::SetGlProjectionMatrix(p); + d_marker.SetScale(marker_size * 2); + rgb = CvTestbed::Instance().CreateImageWithProto("RGB", image, 0, 3); + CvTestbed::Instance().ToggleImageVisible(0, 0); + bg_image = + CvTestbed::Instance().CreateImage("BG texture", cvSize(512, 512), 8, 3); + bg_image->origin = 0; - sfm->SetScale(10); - if (sfm->AddMultiMarker("mmarker.xml", FILE_FORMAT_XML)) { - std::cout<<"Using MultiMarker defined in mmarker.xml."<AddMarker(0, marker_size*2, pose); - pose.SetTranslation(-marker_size*2.5, +marker_size*1.5, 0); - sfm->AddMarker(1, marker_size, pose); - pose.SetTranslation(+marker_size*2.5, +marker_size*1.5, 0); - sfm->AddMarker(2, marker_size, pose); - pose.SetTranslation(-marker_size*2.5, -marker_size*1.5, 0); - sfm->AddMarker(3, marker_size, pose); - pose.SetTranslation(+marker_size*2.5, -marker_size*1.5, 0); - sfm->AddMarker(4, marker_size, pose); - } - sfm->SetResetPoint(); + sfm->SetScale(10); + if (sfm->AddMultiMarker("mmarker.xml", FILE_FORMAT_XML)) + { + std::cout << "Using MultiMarker defined in mmarker.xml." << std::endl; } - if (reset) { - sfm->Reset(); - reset = false; + else + { + std::cout << "Couldn't load mmarker.xml. Using default " + "'SampleMultiMarker' setup." + << std::endl; + Pose pose; + pose.Reset(); + sfm->AddMarker(0, marker_size * 2, pose); + pose.SetTranslation(-marker_size * 2.5, +marker_size * 1.5, 0); + sfm->AddMarker(1, marker_size, pose); + pose.SetTranslation(+marker_size * 2.5, +marker_size * 1.5, 0); + sfm->AddMarker(2, marker_size, pose); + pose.SetTranslation(-marker_size * 2.5, -marker_size * 1.5, 0); + sfm->AddMarker(3, marker_size, pose); + pose.SetTranslation(+marker_size * 2.5, -marker_size * 1.5, 0); + sfm->AddMarker(4, marker_size, pose); } + sfm->SetResetPoint(); + } + if (reset) + { + sfm->Reset(); + reset = false; + } - //if (sfm->UpdateRotationsOnly(image)) { - //if (sfm->UpdateTriangulateOnly(image)) { - if (sfm->Update(image, false, true, 7.f, 15.f)) { - // Draw the camera (The GlutViewer has little weirdness here...)q - Pose pose = *(sfm->GetPose()); - double gl[16]; - pose.GetMatrixGL(gl, true); - GlutViewer::SetGlModelviewMatrix(gl); - pose.Invert(); - pose.GetMatrixGL(d_marker.gl_mat, false); - GlutViewer::DrawableClear(); - GlutViewer::DrawableAdd(&d_marker); - own_drawable_count=0; + // if (sfm->UpdateRotationsOnly(image)) { + // if (sfm->UpdateTriangulateOnly(image)) { + if (sfm->Update(image, false, true, 7.f, 15.f)) + { + // Draw the camera (The GlutViewer has little weirdness here...)q + Pose pose = *(sfm->GetPose()); + double gl[16]; + pose.GetMatrixGL(gl, true); + GlutViewer::SetGlModelviewMatrix(gl); + pose.Invert(); + pose.GetMatrixGL(d_marker.gl_mat, false); + GlutViewer::DrawableClear(); + GlutViewer::DrawableAdd(&d_marker); + own_drawable_count = 0; - // Draw features - std::map::iterator iter; - iter = sfm->container.begin(); - for(;iter != sfm->container.end(); iter++) { - if (sfm->container_triangulated.find(iter->first) != sfm->container_triangulated.end()) continue; - if (iter->second.has_p3d) - { - if (own_drawable_count < 1000) { - memset(d_points[own_drawable_count].gl_mat, 0, 16*sizeof(double)); - d_points[own_drawable_count].gl_mat[0] = 1; - d_points[own_drawable_count].gl_mat[5] = 1; - d_points[own_drawable_count].gl_mat[10] = 1; - d_points[own_drawable_count].gl_mat[15] = 1; - d_points[own_drawable_count].gl_mat[12] = iter->second.p3d.x; - d_points[own_drawable_count].gl_mat[13] = iter->second.p3d.y; - d_points[own_drawable_count].gl_mat[14] = iter->second.p3d.z; - if (iter->second.type_id == 0) d_points[own_drawable_count].SetColor(1,0,0); - else d_points[own_drawable_count].SetColor(0,1,0); - GlutViewer::DrawableAdd(&(d_points[own_drawable_count])); - own_drawable_count++; - } - } + // Draw features + std::map::iterator iter; + iter = sfm->container.begin(); + for (; iter != sfm->container.end(); iter++) + { + if (sfm->container_triangulated.find(iter->first) != + sfm->container_triangulated.end()) + continue; + if (iter->second.has_p3d) + { + if (own_drawable_count < 1000) + { + memset(d_points[own_drawable_count].gl_mat, 0, 16 * sizeof(double)); + d_points[own_drawable_count].gl_mat[0] = 1; + d_points[own_drawable_count].gl_mat[5] = 1; + d_points[own_drawable_count].gl_mat[10] = 1; + d_points[own_drawable_count].gl_mat[15] = 1; + d_points[own_drawable_count].gl_mat[12] = iter->second.p3d.x; + d_points[own_drawable_count].gl_mat[13] = iter->second.p3d.y; + d_points[own_drawable_count].gl_mat[14] = iter->second.p3d.z; + if (iter->second.type_id == 0) + d_points[own_drawable_count].SetColor(1, 0, 0); + else + d_points[own_drawable_count].SetColor(0, 1, 0); + GlutViewer::DrawableAdd(&(d_points[own_drawable_count])); + own_drawable_count++; } + } + } - // Draw triangulated features - iter = sfm->container_triangulated.begin(); - for(;iter != sfm->container_triangulated.end(); iter++) { - if (iter->second.has_p3d) - { - if (own_drawable_count < 1000) { - memset(d_points[own_drawable_count].gl_mat, 0, 16*sizeof(double)); - d_points[own_drawable_count].gl_mat[0] = 1; - d_points[own_drawable_count].gl_mat[5] = 1; - d_points[own_drawable_count].gl_mat[10] = 1; - d_points[own_drawable_count].gl_mat[15] = 1; - d_points[own_drawable_count].gl_mat[12] = iter->second.p3d.x; - d_points[own_drawable_count].gl_mat[13] = iter->second.p3d.y; - d_points[own_drawable_count].gl_mat[14] = iter->second.p3d.z; - /*if (iter->second.type_id == 0) d_points[own_drawable_count].SetColor(1,0,1); - else*/ d_points[own_drawable_count].SetColor(0,0,1); - GlutViewer::DrawableAdd(&(d_points[own_drawable_count])); - own_drawable_count++; - } - } + // Draw triangulated features + iter = sfm->container_triangulated.begin(); + for (; iter != sfm->container_triangulated.end(); iter++) + { + if (iter->second.has_p3d) + { + if (own_drawable_count < 1000) + { + memset(d_points[own_drawable_count].gl_mat, 0, 16 * sizeof(double)); + d_points[own_drawable_count].gl_mat[0] = 1; + d_points[own_drawable_count].gl_mat[5] = 1; + d_points[own_drawable_count].gl_mat[10] = 1; + d_points[own_drawable_count].gl_mat[15] = 1; + d_points[own_drawable_count].gl_mat[12] = iter->second.p3d.x; + d_points[own_drawable_count].gl_mat[13] = iter->second.p3d.y; + d_points[own_drawable_count].gl_mat[14] = iter->second.p3d.z; + /*if (iter->second.type_id == 0) + d_points[own_drawable_count].SetColor(1,0,1); else*/ + d_points[own_drawable_count].SetColor(0, 0, 1); + GlutViewer::DrawableAdd(&(d_points[own_drawable_count])); + own_drawable_count++; } + } } - if (image->nChannels == 1) cvCvtColor(image, rgb, CV_GRAY2RGB); - else if (image->nChannels == 3) cvCopy(image, rgb); + } + if (image->nChannels == 1) + cvCvtColor(image, rgb, CV_GRAY2RGB); + else if (image->nChannels == 3) + cvCopy(image, rgb); - // Draw video on GlutViewer background - cvResize(rgb, bg_image); - GlutViewer::SetVideo(bg_image); + // Draw video on GlutViewer background + cvResize(rgb, bg_image); + GlutViewer::SetVideo(bg_image); - // Draw debug info to the rgb - sfm->Draw(rgb); + // Draw debug info to the rgb + sfm->Draw(rgb); - if (flip_image) { - cvFlip(image); - image->origin = !image->origin; - } + if (flip_image) + { + cvFlip(image); + image->origin = !image->origin; + } } - int keycallback(int key) { - if(key == 'r') - { - reset = true; - } - else return key; + if (key == 'r') + { + reset = true; + } + else + return key; - return 0; + return 0; } -int main(int argc, char *argv[]) +int main(int argc, char* argv[]) { - try { - // Output usage message - std::string filename(argv[0]); - filename = filename.substr(filename.find_last_of('\\') + 1); - std::cout << "SamplePointcloud" << std::endl; - std::cout << "================" << std::endl; - std::cout << std::endl; - std::cout << "Description:" << std::endl; - std::cout << " This example shows simple structure from motion approach that can be "<< std::endl; - std::cout << " used to track environment beyond an multimarker setup. To get this "<< std::endl; - std::cout << " example work properly be sure to calibrate your camera and tune it "<< std::endl; - std::cout << " to have fast framerate without motion blur. "<< std::endl; - std::cout << std::endl; - std::cout << " There are two possible approaches Update() and UpdateRotationsOnly()."<< std::endl; - std::cout << " By default the Update() is used but you can easily uncomment the "<< std::endl; - std::cout << " other one if needed."<< std::endl; - std::cout << std::endl; - std::cout << "Usage:" << std::endl; - std::cout << " " << filename << " [device]" << std::endl; - std::cout << std::endl; - std::cout << " device integer selecting device from enumeration list (default 0)" << std::endl; - std::cout << " highgui capture devices are prefered" << std::endl; - std::cout << std::endl; - std::cout << "Keyboard Shortcuts:" << std::endl; - std::cout << " r: reset" << std::endl; - std::cout << " q: quit" << std::endl; - std::cout << std::endl; + try + { + // Output usage message + std::string filename(argv[0]); + filename = filename.substr(filename.find_last_of('\\') + 1); + std::cout << "SamplePointcloud" << std::endl; + std::cout << "================" << std::endl; + std::cout << std::endl; + std::cout << "Description:" << std::endl; + std::cout << " This example shows simple structure from motion approach " + "that can be " + << std::endl; + std::cout << " used to track environment beyond an multimarker setup. To " + "get this " + << std::endl; + std::cout << " example work properly be sure to calibrate your camera and " + "tune it " + << std::endl; + std::cout << " to have fast framerate without motion blur. " + " " + << std::endl; + std::cout << std::endl; + std::cout << " There are two possible approaches Update() and " + "UpdateRotationsOnly()." + << std::endl; + std::cout << " By default the Update() is used but you can easily " + "uncomment the " + << std::endl; + std::cout << " other one if needed." << std::endl; + std::cout << std::endl; + std::cout << "Usage:" << std::endl; + std::cout << " " << filename << " [device]" << std::endl; + std::cout << std::endl; + std::cout << " device integer selecting device from enumeration list " + "(default 0)" + << std::endl; + std::cout << " highgui capture devices are prefered" + << std::endl; + std::cout << std::endl; + std::cout << "Keyboard Shortcuts:" << std::endl; + std::cout << " r: reset" << std::endl; + std::cout << " q: quit" << std::endl; + std::cout << std::endl; - // Initialise GlutViewer and CvTestbed - GlutViewer::Start(argc, argv, 640, 480, 100); - CvTestbed::Instance().SetKeyCallback(keycallback); - CvTestbed::Instance().SetVideoCallback(videocallback); - sfm = new SimpleSfM(); + // Initialise GlutViewer and CvTestbed + GlutViewer::Start(argc, argv, 640, 480, 100); + CvTestbed::Instance().SetKeyCallback(keycallback); + CvTestbed::Instance().SetVideoCallback(videocallback); + sfm = new SimpleSfM(); - // Enumerate possible capture plugins - CaptureFactory::CapturePluginVector plugins = CaptureFactory::instance()->enumeratePlugins(); - if (plugins.size() < 1) { - std::cout << "Could not find any capture plugins." << std::endl; - return 0; - } + // Enumerate possible capture plugins + CaptureFactory::CapturePluginVector plugins = + CaptureFactory::instance()->enumeratePlugins(); + if (plugins.size() < 1) + { + std::cout << "Could not find any capture plugins." << std::endl; + return 0; + } - // Display capture plugins - std::cout << "Available Plugins: "; - outputEnumeratedPlugins(plugins); - std::cout << std::endl; + // Display capture plugins + std::cout << "Available Plugins: "; + outputEnumeratedPlugins(plugins); + std::cout << std::endl; - // Enumerate possible capture devices - CaptureFactory::CaptureDeviceVector devices = CaptureFactory::instance()->enumerateDevices(); - if (devices.size() < 1) { - std::cout << "Could not find any capture devices." << std::endl; - return 0; - } + // Enumerate possible capture devices + CaptureFactory::CaptureDeviceVector devices = + CaptureFactory::instance()->enumerateDevices(); + if (devices.size() < 1) + { + std::cout << "Could not find any capture devices." << std::endl; + return 0; + } - // Check command line argument for which device to use - int selectedDevice = defaultDevice(devices); - if (argc > 1) { - selectedDevice = atoi(argv[1]); - } - if (selectedDevice >= (int)devices.size()) { - selectedDevice = defaultDevice(devices); - } - - // Display capture devices - std::cout << "Enumerated Capture Devices:" << std::endl; - outputEnumeratedDevices(devices, selectedDevice); - std::cout << std::endl; - - // Create capture object from camera - Capture *cap = CaptureFactory::instance()->createCapture(devices[selectedDevice]); - std::string uniqueName = devices[selectedDevice].uniqueName(); + // Check command line argument for which device to use + int selectedDevice = defaultDevice(devices); + if (argc > 1) + { + selectedDevice = atoi(argv[1]); + } + if (selectedDevice >= (int)devices.size()) + { + selectedDevice = defaultDevice(devices); + } - // Handle capture lifecycle and start video capture - // Note that loadSettings/saveSettings are not supported by all plugins - if (cap) { - std::stringstream settingsFilename; - settingsFilename << "camera_settings_" << uniqueName << ".xml"; - calibrationFilename << "camera_calibration_" << uniqueName << ".xml"; - - cap->start(); - cap->setResolution(640, 480); - - if (cap->loadSettings(settingsFilename.str())) { - std::cout << "Loading settings: " << settingsFilename.str() << std::endl; - } + // Display capture devices + std::cout << "Enumerated Capture Devices:" << std::endl; + outputEnumeratedDevices(devices, selectedDevice); + std::cout << std::endl; - std::stringstream title; - title << "SamplePointcloud (" << cap->captureDevice().captureType() << ")"; + // Create capture object from camera + Capture* cap = + CaptureFactory::instance()->createCapture(devices[selectedDevice]); + std::string uniqueName = devices[selectedDevice].uniqueName(); - CvTestbed::Instance().StartVideo(cap, title.str().c_str()); + // Handle capture lifecycle and start video capture + // Note that loadSettings/saveSettings are not supported by all plugins + if (cap) + { + std::stringstream settingsFilename; + settingsFilename << "camera_settings_" << uniqueName << ".xml"; + calibrationFilename << "camera_calibration_" << uniqueName << ".xml"; - if (cap->saveSettings(settingsFilename.str())) { - std::cout << "Saving settings: " << settingsFilename.str() << std::endl; - } + cap->start(); + cap->setResolution(640, 480); - cap->stop(); - delete cap; cap = NULL; - } - else if (CvTestbed::Instance().StartVideo(0, argv[0])) { - } - else { - std::cout << "Could not initialize the selected capture backend." << std::endl; - } - delete sfm; sfm = NULL; + if (cap->loadSettings(settingsFilename.str())) + { + std::cout << "Loading settings: " << settingsFilename.str() + << std::endl; + } + + std::stringstream title; + title << "SamplePointcloud (" << cap->captureDevice().captureType() + << ")"; + + CvTestbed::Instance().StartVideo(cap, title.str().c_str()); - return 0; + if (cap->saveSettings(settingsFilename.str())) + { + std::cout << "Saving settings: " << settingsFilename.str() << std::endl; + } + + cap->stop(); + delete cap; + cap = NULL; } - catch (const std::exception &e) { - std::cout << "Exception: " << e.what() << endl; + else if (CvTestbed::Instance().StartVideo(0, argv[0])) + { } - catch (...) { - std::cout << "Exception: unknown" << std::endl; + else + { + std::cout << "Could not initialize the selected capture backend." + << std::endl; } + delete sfm; + sfm = NULL; + + return 0; + } + catch (const std::exception& e) + { + std::cout << "Exception: " << e.what() << endl; + } + catch (...) + { + std::cout << "Exception: unknown" << std::endl; + } } diff --git a/ar_track_alvar/src/SampleTrack.cpp b/ar_track_alvar/src/SampleTrack.cpp index e85f03b..f5b1db8 100644 --- a/ar_track_alvar/src/SampleTrack.cpp +++ b/ar_track_alvar/src/SampleTrack.cpp @@ -12,253 +12,304 @@ bool reset = true; CvFont font; std::stringstream calibrationFilename; -void track_none(IplImage *image, IplImage *img_gray) { +void track_none(IplImage* image, IplImage* img_gray) +{ } -void track_psa(IplImage *image, IplImage *img_gray) { - static TrackerPsa tracker_psa; - static double x, y; - if (reset) { - reset = false; - x = img_gray->width / 2; - y = img_gray->height / 2; - tracker_psa.Track(img_gray); // To reset tracker call it twice - } - tracker_psa.Track(img_gray); - tracker_psa.Compensate(&x, &y); - cvCircle(image, cvPoint(int(x), int(y)), 10, CV_RGB(255,0,0)); +void track_psa(IplImage* image, IplImage* img_gray) +{ + static TrackerPsa tracker_psa; + static double x, y; + if (reset) + { + reset = false; + x = img_gray->width / 2; + y = img_gray->height / 2; + tracker_psa.Track(img_gray); // To reset tracker call it twice + } + tracker_psa.Track(img_gray); + tracker_psa.Compensate(&x, &y); + cvCircle(image, cvPoint(int(x), int(y)), 10, CV_RGB(255, 0, 0)); } -void track_psa_rot(IplImage *image, IplImage *img_gray) { - static TrackerPsaRot tracker_psa_rot; - static double x, y, r; - if (reset) { - reset = false; - x = img_gray->width / 2; - y = img_gray->height / 2; - r = 0; - tracker_psa_rot.Track(img_gray); // To reset tracker call it twice - } - tracker_psa_rot.Track(img_gray); - tracker_psa_rot.Compensate(&x, &y); - r += tracker_psa_rot.rotd; - cvCircle(image, cvPoint(int(x), int(y)), 15, CV_RGB(255,0,0)); - double r_rad = r*3.1415926535/180; - cvLine(image, cvPoint(int(x), int(y)), cvPoint(int(x-sin(r_rad)*15), int(y+cos(r_rad)*15)), CV_RGB(255,0,0)); +void track_psa_rot(IplImage* image, IplImage* img_gray) +{ + static TrackerPsaRot tracker_psa_rot; + static double x, y, r; + if (reset) + { + reset = false; + x = img_gray->width / 2; + y = img_gray->height / 2; + r = 0; + tracker_psa_rot.Track(img_gray); // To reset tracker call it twice + } + tracker_psa_rot.Track(img_gray); + tracker_psa_rot.Compensate(&x, &y); + r += tracker_psa_rot.rotd; + cvCircle(image, cvPoint(int(x), int(y)), 15, CV_RGB(255, 0, 0)); + double r_rad = r * 3.1415926535 / 180; + cvLine(image, cvPoint(int(x), int(y)), + cvPoint(int(x - sin(r_rad) * 15), int(y + cos(r_rad) * 15)), + CV_RGB(255, 0, 0)); } -void track_stat(IplImage *image, IplImage *img_gray) { - static TrackerStat tracker_stat; - static double x, y; - if (reset) { - reset = false; - x = img_gray->width / 2; - y = img_gray->height / 2; - tracker_stat.Track(img_gray); // To reset tracker call it twice - } - tracker_stat.Track(img_gray); - tracker_stat.Compensate(&x, &y); - cvCircle(image, cvPoint(int(x), int(y)), 10, CV_RGB(0,255,0)); +void track_stat(IplImage* image, IplImage* img_gray) +{ + static TrackerStat tracker_stat; + static double x, y; + if (reset) + { + reset = false; + x = img_gray->width / 2; + y = img_gray->height / 2; + tracker_stat.Track(img_gray); // To reset tracker call it twice + } + tracker_stat.Track(img_gray); + tracker_stat.Compensate(&x, &y); + cvCircle(image, cvPoint(int(x), int(y)), 10, CV_RGB(0, 255, 0)); } -void track_stat_rot(IplImage *image, IplImage *img_gray) { - static TrackerStatRot tracker_stat_rot; - static double x, y, r; - if (reset) { - reset = false; - x = img_gray->width / 2; - y = img_gray->height / 2; - r = 0; - tracker_stat_rot.Track(img_gray); // To reset tracker call it twice - } - tracker_stat_rot.Track(img_gray); - tracker_stat_rot.Compensate(&x, &y); - r += tracker_stat_rot.rotd; - cvCircle(image, cvPoint(int(x), int(y)), 15, CV_RGB(0,255,0)); - double r_rad = r*3.1415926535/180; - cvLine(image, cvPoint(int(x), int(y)), cvPoint(int(x-sin(r_rad)*15), int(y+cos(r_rad)*15)), CV_RGB(0,255,0)); +void track_stat_rot(IplImage* image, IplImage* img_gray) +{ + static TrackerStatRot tracker_stat_rot; + static double x, y, r; + if (reset) + { + reset = false; + x = img_gray->width / 2; + y = img_gray->height / 2; + r = 0; + tracker_stat_rot.Track(img_gray); // To reset tracker call it twice + } + tracker_stat_rot.Track(img_gray); + tracker_stat_rot.Compensate(&x, &y); + r += tracker_stat_rot.rotd; + cvCircle(image, cvPoint(int(x), int(y)), 15, CV_RGB(0, 255, 0)); + double r_rad = r * 3.1415926535 / 180; + cvLine(image, cvPoint(int(x), int(y)), + cvPoint(int(x - sin(r_rad) * 15), int(y + cos(r_rad) * 15)), + CV_RGB(0, 255, 0)); } -void track_features(IplImage *image, IplImage *img_gray) { - static TrackerFeatures tracker_features(200, 190, 0.01, 0, 4, 6); - if (reset) { - reset = false; - tracker_features.Reset(); - } - tracker_features.Purge(); - tracker_features.Track(img_gray); - for (int i=0; iwidth, image->height)) { - cout<<" [Ok]"<width, image->height); - cout<<" [Fail]"<width, + image->height)) + { + cout << " [Ok]" << endl; } - if (image->nChannels == 1) cvCopy(image, img_gray); - else cvCvtColor(image, img_gray, CV_RGB2GRAY); + else + { + cam.SetRes(image->width, image->height); + cout << " [Fail]" << endl; + } + } + if (image->nChannels == 1) + cvCopy(image, img_gray); + else + cvCvtColor(image, img_gray, CV_RGB2GRAY); - trackers[tracker](image, img_gray); - cvPutText(image, tracker_names[tracker], cvPoint(3, image->height - 20), &font, CV_RGB(255, 255, 255)); + trackers[tracker](image, img_gray); + cvPutText(image, tracker_names[tracker], cvPoint(3, image->height - 20), + &font, CV_RGB(255, 255, 255)); } -int keycallback(int key) { - if ((key == 'r') || (key == 't')) { - reset = true; - return 0; - } - else if ((key == 'n') || (key == ' ')){ - tracker = ((tracker+1)%nof_trackers); - reset = true; - return 0; - } - return key; +int keycallback(int key) +{ + if ((key == 'r') || (key == 't')) + { + reset = true; + return 0; + } + else if ((key == 'n') || (key == ' ')) + { + tracker = ((tracker + 1) % nof_trackers); + reset = true; + return 0; + } + return key; } -int main(int argc, char *argv[]) +int main(int argc, char* argv[]) { - try { - // Output usage message - std::string filename(argv[0]); - filename = filename.substr(filename.find_last_of('\\') + 1); - std::cout << "SampleTrack" << std::endl; - std::cout << "===========" << std::endl; - std::cout << std::endl; - std::cout << "Description:" << std::endl; - std::cout << " This is an example of how to use the 'TrackerPsa', 'TrackerPsaRot'," << std::endl; - std::cout << " 'TrackerFeatures', 'TrackerStat' and 'TrackerStatRot' classes to" << std::endl; - std::cout << " track the optical flow of the video." << std::endl; - std::cout << std::endl; - std::cout << "Usage:" << std::endl; - std::cout << " " << filename << " [device]" << std::endl; - std::cout << std::endl; - std::cout << " device integer selecting device from enumeration list (default 0)" << std::endl; - std::cout << " highgui capture devices are prefered" << std::endl; - std::cout << std::endl; - std::cout << "Keyboard Shortcuts:" << std::endl; - std::cout << " r,t: reset tracker" << std::endl; - std::cout << " n,space: cycle through tracking algorithms" << std::endl; - std::cout << " q: quit" << std::endl; - std::cout << std::endl; + try + { + // Output usage message + std::string filename(argv[0]); + filename = filename.substr(filename.find_last_of('\\') + 1); + std::cout << "SampleTrack" << std::endl; + std::cout << "===========" << std::endl; + std::cout << std::endl; + std::cout << "Description:" << std::endl; + std::cout << " This is an example of how to use the 'TrackerPsa', " + "'TrackerPsaRot'," + << std::endl; + std::cout << " 'TrackerFeatures', 'TrackerStat' and 'TrackerStatRot' " + "classes to" + << std::endl; + std::cout << " track the optical flow of the video." << std::endl; + std::cout << std::endl; + std::cout << "Usage:" << std::endl; + std::cout << " " << filename << " [device]" << std::endl; + std::cout << std::endl; + std::cout << " device integer selecting device from enumeration list " + "(default 0)" + << std::endl; + std::cout << " highgui capture devices are prefered" + << std::endl; + std::cout << std::endl; + std::cout << "Keyboard Shortcuts:" << std::endl; + std::cout << " r,t: reset tracker" << std::endl; + std::cout << " n,space: cycle through tracking algorithms" << std::endl; + std::cout << " q: quit" << std::endl; + std::cout << std::endl; + + // Initialize font + cvInitFont(&font, CV_FONT_HERSHEY_PLAIN, 1.0, 1.0); + + // Initialise CvTestbed + CvTestbed::Instance().SetVideoCallback(videocallback); + CvTestbed::Instance().SetKeyCallback(keycallback); - // Initialize font - cvInitFont(&font, CV_FONT_HERSHEY_PLAIN, 1.0, 1.0); + // Enumerate possible capture plugins + CaptureFactory::CapturePluginVector plugins = + CaptureFactory::instance()->enumeratePlugins(); + if (plugins.size() < 1) + { + std::cout << "Could not find any capture plugins." << std::endl; + return 0; + } + + // Display capture plugins + std::cout << "Available Plugins: "; + outputEnumeratedPlugins(plugins); + std::cout << std::endl; - // Initialise CvTestbed - CvTestbed::Instance().SetVideoCallback(videocallback); - CvTestbed::Instance().SetKeyCallback(keycallback); + // Enumerate possible capture devices + CaptureFactory::CaptureDeviceVector devices = + CaptureFactory::instance()->enumerateDevices(); + if (devices.size() < 1) + { + std::cout << "Could not find any capture devices." << std::endl; + return 0; + } - // Enumerate possible capture plugins - CaptureFactory::CapturePluginVector plugins = CaptureFactory::instance()->enumeratePlugins(); - if (plugins.size() < 1) { - std::cout << "Could not find any capture plugins." << std::endl; - return 0; - } + // Check command line argument for which device to use + int selectedDevice = defaultDevice(devices); + if (argc > 1) + { + selectedDevice = atoi(argv[1]); + } + if (selectedDevice >= (int)devices.size()) + { + selectedDevice = defaultDevice(devices); + } - // Display capture plugins - std::cout << "Available Plugins: "; - outputEnumeratedPlugins(plugins); - std::cout << std::endl; + // Display capture devices + std::cout << "Enumerated Capture Devices:" << std::endl; + outputEnumeratedDevices(devices, selectedDevice); + std::cout << std::endl; - // Enumerate possible capture devices - CaptureFactory::CaptureDeviceVector devices = CaptureFactory::instance()->enumerateDevices(); - if (devices.size() < 1) { - std::cout << "Could not find any capture devices." << std::endl; - return 0; - } + // Create capture object from camera + Capture* cap = + CaptureFactory::instance()->createCapture(devices[selectedDevice]); + std::string uniqueName = devices[selectedDevice].uniqueName(); - // Check command line argument for which device to use - int selectedDevice = defaultDevice(devices); - if (argc > 1) { - selectedDevice = atoi(argv[1]); - } - if (selectedDevice >= (int)devices.size()) { - selectedDevice = defaultDevice(devices); - } - - // Display capture devices - std::cout << "Enumerated Capture Devices:" << std::endl; - outputEnumeratedDevices(devices, selectedDevice); - std::cout << std::endl; - - // Create capture object from camera - Capture *cap = CaptureFactory::instance()->createCapture(devices[selectedDevice]); - std::string uniqueName = devices[selectedDevice].uniqueName(); + // Handle capture lifecycle and start video capture + // Note that loadSettings/saveSettings are not supported by all plugins + if (cap) + { + std::stringstream settingsFilename; + settingsFilename << "camera_settings_" << uniqueName << ".xml"; + calibrationFilename << "camera_calibration_" << uniqueName << ".xml"; - // Handle capture lifecycle and start video capture - // Note that loadSettings/saveSettings are not supported by all plugins - if (cap) { - std::stringstream settingsFilename; - settingsFilename << "camera_settings_" << uniqueName << ".xml"; - calibrationFilename << "camera_calibration_" << uniqueName << ".xml"; - - cap->start(); - cap->setResolution(640, 480); - - if (cap->loadSettings(settingsFilename.str())) { - std::cout << "Loading settings: " << settingsFilename.str() << std::endl; - } + cap->start(); + cap->setResolution(640, 480); - std::stringstream title; - title << "SampleTrack (" << cap->captureDevice().captureType() << ")"; + if (cap->loadSettings(settingsFilename.str())) + { + std::cout << "Loading settings: " << settingsFilename.str() + << std::endl; + } - CvTestbed::Instance().StartVideo(cap, title.str().c_str()); + std::stringstream title; + title << "SampleTrack (" << cap->captureDevice().captureType() << ")"; - if (cap->saveSettings(settingsFilename.str())) { - std::cout << "Saving settings: " << settingsFilename.str() << std::endl; - } + CvTestbed::Instance().StartVideo(cap, title.str().c_str()); - cap->stop(); - delete cap; - } - else if (CvTestbed::Instance().StartVideo(0, argv[0])) { - } - else { - std::cout << "Could not initialize the selected capture backend." << std::endl; - } + if (cap->saveSettings(settingsFilename.str())) + { + std::cout << "Saving settings: " << settingsFilename.str() << std::endl; + } - return 0; + cap->stop(); + delete cap; } - catch (const std::exception &e) { - std::cout << "Exception: " << e.what() << endl; + else if (CvTestbed::Instance().StartVideo(0, argv[0])) + { } - catch (...) { - std::cout << "Exception: unknown" << std::endl; + else + { + std::cout << "Could not initialize the selected capture backend." + << std::endl; } + + return 0; + } + catch (const std::exception& e) + { + std::cout << "Exception: " << e.what() << endl; + } + catch (...) + { + std::cout << "Exception: unknown" << std::endl; + } } diff --git a/ar_track_alvar/src/SfM.cpp b/ar_track_alvar/src/SfM.cpp index efd36e3..728304c 100644 --- a/ar_track_alvar/src/SfM.cpp +++ b/ar_track_alvar/src/SfM.cpp @@ -23,485 +23,651 @@ #include "SfM.h" -namespace alvar { - +namespace alvar +{ class CameraMoves { - bool is_initialized; - double prev[3]; + bool is_initialized; + double prev[3]; public: - - CameraMoves() - { - is_initialized = false; - memset(prev, 0, 3*sizeof(double)); - } - - bool UpdateDistance(Pose* pose, double limit=10) - { - double trad[3]; CvMat tra = cvMat(3, 1, CV_64F, trad); - Pose p = *pose; - p.Invert(); - p.GetTranslation(&tra); - if(is_initialized == false) - { - memcpy(prev, trad, 3*sizeof(double)); - is_initialized = true; - return false; - } - double dist = (prev[0]-trad[0])*(prev[0]-trad[0]) + - (prev[1]-trad[1])*(prev[1]-trad[1]) + - (prev[2]-trad[2])*(prev[2]-trad[2]); - if(dist > limit) - { - memcpy(prev, trad, 3*sizeof(double)); - return true; - } - return false; - } + CameraMoves() + { + is_initialized = false; + memset(prev, 0, 3 * sizeof(double)); + } + + bool UpdateDistance(Pose* pose, double limit = 10) + { + double trad[3]; + CvMat tra = cvMat(3, 1, CV_64F, trad); + Pose p = *pose; + p.Invert(); + p.GetTranslation(&tra); + if (is_initialized == false) + { + memcpy(prev, trad, 3 * sizeof(double)); + is_initialized = true; + return false; + } + double dist = (prev[0] - trad[0]) * (prev[0] - trad[0]) + + (prev[1] - trad[1]) * (prev[1] - trad[1]) + + (prev[2] - trad[2]) * (prev[2] - trad[2]); + if (dist > limit) + { + memcpy(prev, trad, 3 * sizeof(double)); + return true; + } + return false; + } }; CameraMoves moving; -void SimpleSfM::Reset(bool reset_also_triangulated) { - pose_ok = false; - container = container_reset_point; - if (reset_also_triangulated) { - container_triangulated = container_triangulated_reset_point; - } - pose_difference.Reset(); - pose_difference.Mirror(false, true, true); +void SimpleSfM::Reset(bool reset_also_triangulated) +{ + pose_ok = false; + container = container_reset_point; + if (reset_also_triangulated) + { + container_triangulated = container_triangulated_reset_point; + } + pose_difference.Reset(); + pose_difference.Mirror(false, true, true); } -void SimpleSfM::SetResetPoint() { - container_reset_point = container; - container_triangulated_reset_point = container_triangulated; +void SimpleSfM::SetResetPoint() +{ + container_reset_point = container; + container_triangulated_reset_point = container_triangulated; } -void SimpleSfM::Clear() { - container.clear(); +void SimpleSfM::Clear() +{ + container.clear(); } -CameraEC *SimpleSfM::GetCamera() { return &cam; } - -Pose *SimpleSfM::GetPose() { return &pose; } +CameraEC* SimpleSfM::GetCamera() +{ + return &cam; +} -bool SimpleSfM::AddMultiMarker(const char *fname, FILE_FORMAT format/* = FILE_FORMAT_XML*/) { - MultiMarkerEC mm; - return mm.Load(container, fname, format, 0, 0,1023); +Pose* SimpleSfM::GetPose() +{ + return &pose; } -bool SimpleSfM::AddMultiMarker(MultiMarkerEC *mm) { - return mm->MarkersToEC(container, 0, 0, 1023); +bool SimpleSfM::AddMultiMarker(const char* fname, + FILE_FORMAT format /* = FILE_FORMAT_XML*/) +{ + MultiMarkerEC mm; + return mm.Load(container, fname, format, 0, 0, 1023); } -void SimpleSfM::AddMarker(int marker_id, double edge_length, Pose &pose) { - marker_detector.MarkerToEC(container, marker_id, edge_length, pose, 0, 0, 1023); +bool SimpleSfM::AddMultiMarker(MultiMarkerEC* mm) +{ + return mm->MarkersToEC(container, 0, 0, 1023); } -float PointVectorFromCamera(CvPoint3D32f p3d, CvPoint3D32f &p3d_vec, Pose *camera_pose) { - double pd[16], v[4] = {0,0,0,1}; - CvMat Pi = cvMat(4, 4, CV_64F, pd); - CvMat V = cvMat(4, 1, CV_64F, v); - - // Camera location in marker coordinates - camera_pose->GetMatrix(&Pi); - cvInv(&Pi, &Pi); - cvMatMul(&Pi, &V, &V); - v[0] /= v[3]; - v[1] /= v[3]; - v[2] /= v[3]; - - // Vector from camera-point to the 3D-point in marker coordinates - p3d_vec.x = float(p3d.x - v[0]); - p3d_vec.y = float(p3d.y - v[1]); - p3d_vec.z = float(p3d.z - v[2]); - - return std::sqrt(p3d_vec.x*p3d_vec.x + p3d_vec.y*p3d_vec.y + p3d_vec.z*p3d_vec.z); +void SimpleSfM::AddMarker(int marker_id, double edge_length, Pose& pose) +{ + marker_detector.MarkerToEC(container, marker_id, edge_length, pose, 0, 0, + 1023); } -void CreateShadowPoint(CvPoint3D32f &p3d_sh, CvPoint3D32f p3d, CameraEC *cam, Pose *camera_pose, float parallax_length, float triangulate_angle) { - // Vector from camera-point to the 3D-point in marker coordinates - float l = PointVectorFromCamera(p3d, p3d_sh, camera_pose); - - // Figure out a suitable matching shadow point distance - CvPoint2D32f p2d; - CvPoint3D32f p3d2; - cam->ProjectPoint(p3d, camera_pose, p2d); - p2d.x += parallax_length; - cam->Get3dOnDepth(camera_pose, p2d, l, p3d2); - p3d2.x -= p3d.x; - p3d2.y -= p3d.y; - p3d2.z -= p3d.z; - float pl = std::sqrt(p3d2.x*p3d2.x + p3d2.y*p3d2.y + p3d2.z*p3d2.z); - float shadow_point_dist = float(pl / std::abs(tan(triangulate_angle*3.1415926535/180.))); // How far we need the shadow point to have the parallax limit match - - // Create the shadow point - p3d_sh.x /= l; - p3d_sh.y /= l; - p3d_sh.z /= l; - p3d_sh.x *= shadow_point_dist; - p3d_sh.y *= shadow_point_dist; - p3d_sh.z *= shadow_point_dist; - p3d_sh.x += p3d.x; - p3d_sh.y += p3d.y; - p3d_sh.z += p3d.z; - - //std::cout<<"p : "<GetMatrix(&Pi); + cvInv(&Pi, &Pi); + cvMatMul(&Pi, &V, &V); + v[0] /= v[3]; + v[1] /= v[3]; + v[2] /= v[3]; + + // Vector from camera-point to the 3D-point in marker coordinates + p3d_vec.x = float(p3d.x - v[0]); + p3d_vec.y = float(p3d.y - v[1]); + p3d_vec.z = float(p3d.z - v[2]); + + return std::sqrt(p3d_vec.x * p3d_vec.x + p3d_vec.y * p3d_vec.y + + p3d_vec.z * p3d_vec.z); } -bool SimpleSfM::Update(IplImage *image, bool assume_plane, bool triangulate, float reproj_err_limit, float triangulate_angle) { - const int min_triangulate=50, max_triangulate=150; - const float plane_dist_limit = 100.f; - const bool remember_3d_points = true; - const float parallax_length = 10.f; - const float parallax_length_sq = parallax_length*parallax_length; - float reproj_err_limit_sq = reproj_err_limit*reproj_err_limit; - std::map::iterator iter; - std::map::iterator iter_end = container.end(); - - // #1. Detect marker corners and track features - marker_detector.Detect(image, &cam, container, 0, 0, 1023, true, false); - tf.Purge(); - tf.Track(image, 0, container, 1); // Track without adding features - //tf.Track(image, 0, container, 1, 1024, 65535); // Track with adding features - tf.EraseNonTracked(container, 1); - - // #2. Update pose - if (!pose_ok) pose_ok = cam.CalcExteriorOrientation(container, &pose); - pose_ok = cam.UpdatePose(container, &pose); - if (!pose_ok) pose_ok = cam.UpdatePose(container, &pose, 0); // Only markers - if (!pose_ok) return false; - - // #3. Reproject features and their shadows - double err_markers = cam.Reproject(container, &pose, 0, true, false, 100); - if (err_markers > reproj_err_limit*2) { Reset(false); return false;} - double err_features = cam.Reproject(container, &pose, 1, false, false, 100); - if ((err_markers == 0) && (err_features > reproj_err_limit*2)) { - // Error is so large, that we just are satisfied we have some pose - // Don't triangulate and remove points based on this result - return true; - } - - // #4. Add previously triangulated features to container if they are visible... - if (remember_3d_points) { - IplImage *mask = tf.NewFeatureMask(); - iter = container_triangulated.begin(); - while(iter != container_triangulated.end()) { - if (container.find(iter->first) == container.end()) { - Camera *c = &cam; - c->ProjectPoint(iter->second.p3d, &pose, iter->second.projected_p2d); - if ((iter->second.projected_p2d.x >= 0) && - (iter->second.projected_p2d.y >= 0) && - (iter->second.projected_p2d.x < image->width) && - (iter->second.projected_p2d.y < image->height)) - { - CvScalar s = cvGet2D(mask, int(iter->second.projected_p2d.y), int(iter->second.projected_p2d.x)); - if (s.val[0] == 0) { - container[iter->first] = iter->second; - container[iter->first].estimation_type = 3; - container[iter->first].p2d = container[iter->first].projected_p2d; - } - } - } - iter++; - } - } - - // #5. Add other features to container - // Assume them initially on marker plane if (assume_plane == true) - tf.AddFeatures(container, 1, 1024, 0xffffff); //65535); - if (assume_plane) { - for (iter = container.begin(); iter != iter_end; iter++) { - Feature &f = iter->second; - //if (f.type_id == 0) continue; - if (!f.has_p3d && f.estimation_type < 1) { - //cam.Get3dOnPlane(&pose, f.p2d, f.p3d); - - //CvPoint3D32f p3d_vec; - //float l = PointVectorFromCamera(f.p3d, p3d_vec, &pose); - //if (l > plane_dist_limit) cam.Get3dOnDepth(&pose, f.p2d, plane_dist_limit, f.p3d); - cam.Get3dOnDepth(&pose, f.p2d, plane_dist_limit, f.p3d); - - // TODO: Now we don't create a shadow point for plane points. This is because - // we don't want to remember them as 3d points in container_triangulated. - // We probably get the same benefit just by assuming that everything is on plane? - //CreateShadowPoint(f.p3d_sh, f.p3d, &pose, shadow_point_dist); - f.p3d_sh = f.p3d; - - f.has_p3d = true; - f.estimation_type = 1; - } - } - } - - // #6. Triangulate features with good parallax - if (triangulate) { - for (iter = container.begin(); iter != iter_end; iter++) { - Feature &f = iter->second; - //if (f.type_id == 0) continue; - if (!f.has_p3d && !f.has_stored_pose) { - f.pose1 = pose; - f.p2d1 = f.p2d; - f.has_stored_pose = true; - cam.Get3dOnDepth(&pose, f.p2d, 8, f.p3d); - CreateShadowPoint(f.p3d_sh, f.p3d, &cam, &pose, parallax_length, triangulate_angle); - } - if (!f.has_p3d && f.has_stored_pose && f.estimation_type < 2) { - double dist_sh_sq = (f.projected_p2d_sh.x-f.projected_p2d.x)*(f.projected_p2d_sh.x-f.projected_p2d.x); - dist_sh_sq += (f.projected_p2d_sh.y-f.projected_p2d.y)*(f.projected_p2d_sh.y-f.projected_p2d.y); - if (dist_sh_sq > parallax_length_sq) { - CvPoint2D32f u1 = f.p2d1; - CvPoint2D32f u2 = f.p2d; - cam.Camera::Undistort(u1); - cam.Camera::Undistort(u2); - if(cam.ReconstructFeature(&f.pose1, &pose, &u1, &u2, &f.p3d, 10e-6)) { - CreateShadowPoint(f.p3d_sh, f.p3d, &cam, &pose, parallax_length, triangulate_angle); - f.has_p3d = true; - } else - f.has_stored_pose = false; - f.estimation_type = 2; - } - } - } - } - - // #7. Erase poor features - cam.Reproject(container, &pose, 1, false, false, 100); // TODO: Why again... - iter = container.begin(); - while(iter != container.end()) { - bool iter_inc = true; - if (iter->second.type_id > 0) { - Feature &f = iter->second; - if ((f.estimation_type == 0) || !f.has_p2d || !f.has_p3d); - else { - // Note that the projected_p2d needs to have valid content at this point - double dist_sq = (f.p2d.x-f.projected_p2d.x)*(f.p2d.x-f.projected_p2d.x); - dist_sq += (f.p2d.y-f.projected_p2d.y)*(f.p2d.y-f.projected_p2d.y); - - if (f.estimation_type == 1) { - if (dist_sq > (reproj_err_limit_sq)) f.has_p3d = false; - } else if (f.estimation_type == 2) { - if (dist_sq > (reproj_err_limit_sq)) { - container.erase(iter++); - iter_inc = false; - } - } else if (f.estimation_type == 3) { - if (container_triangulated.size() < min_triangulate) { - if (dist_sq > (reproj_err_limit_sq)) { - container.erase(iter++); - iter_inc = false; - } - } else { - if (dist_sq > (reproj_err_limit_sq)) f.has_p3d = false; - if (dist_sq > (reproj_err_limit_sq*2)) { - std::map::iterator iter_tmp; - iter_tmp = container_triangulated.find(iter->first); - if (iter_tmp != container_triangulated.end()) { - container_triangulated.erase(iter_tmp); - } - container.erase(iter++); - iter_inc = false; - } - } - } - } - } - if (iter_inc) iter++; - } - - // #8. Remember good features that have little reprojection error while the parallax is noticeable - if (remember_3d_points && (container_triangulated.size() < max_triangulate)) { - iter = container.begin(); - while(iter != container.end()) { - if ((iter->second.type_id > 0) && - (container_triangulated.find(iter->first) == container_triangulated.end())) - { - Feature &f = iter->second; - double dist_sq = (f.p2d.x-f.projected_p2d.x)*(f.p2d.x-f.projected_p2d.x); - dist_sq += (f.p2d.y-f.projected_p2d.y)*(f.p2d.y-f.projected_p2d.y); - double dist_sh_sq = (f.projected_p2d_sh.x-f.projected_p2d.x)*(f.projected_p2d_sh.x-f.projected_p2d.x); - dist_sh_sq += (f.projected_p2d_sh.y-f.projected_p2d.y)*(f.projected_p2d_sh.y-f.projected_p2d.y); - if ((dist_sq < reproj_err_limit_sq) && (dist_sh_sq > parallax_length_sq)) { - container_triangulated[iter->first] = iter->second; - f.estimation_type = 3; - } - } - iter++; - if (container_triangulated.size() >= max_triangulate) break; - } - } - - return true; +void CreateShadowPoint(CvPoint3D32f& p3d_sh, CvPoint3D32f p3d, CameraEC* cam, + Pose* camera_pose, float parallax_length, + float triangulate_angle) +{ + // Vector from camera-point to the 3D-point in marker coordinates + float l = PointVectorFromCamera(p3d, p3d_sh, camera_pose); + + // Figure out a suitable matching shadow point distance + CvPoint2D32f p2d; + CvPoint3D32f p3d2; + cam->ProjectPoint(p3d, camera_pose, p2d); + p2d.x += parallax_length; + cam->Get3dOnDepth(camera_pose, p2d, l, p3d2); + p3d2.x -= p3d.x; + p3d2.y -= p3d.y; + p3d2.z -= p3d.z; + float pl = std::sqrt(p3d2.x * p3d2.x + p3d2.y * p3d2.y + p3d2.z * p3d2.z); + float shadow_point_dist = + float(pl / std::abs(tan(triangulate_angle * 3.1415926535 / + 180.))); // How far we need the shadow point to + // have the parallax limit match + + // Create the shadow point + p3d_sh.x /= l; + p3d_sh.y /= l; + p3d_sh.z /= l; + p3d_sh.x *= shadow_point_dist; + p3d_sh.y *= shadow_point_dist; + p3d_sh.z *= shadow_point_dist; + p3d_sh.x += p3d.x; + p3d_sh.y += p3d.y; + p3d_sh.z += p3d.z; + + // std::cout<<"p : "<::iterator iter = container.begin(); - std::map::iterator iter_end = container.end(); - for(;iter != iter_end; iter++) { - if (pose_ok && (iter->second.type_id == 1) && (!iter->second.has_stored_pose) && (!iter->second.has_p3d)) { - iter->second.pose1 = pose; - iter->second.p2d1 = iter->second.p2d; - iter->second.has_stored_pose = true; - } - } - iter = container.begin(); - for(;iter != iter_end; iter++) - { - Feature &f = iter->second; - if(update_tri && f.has_stored_pose && !f.has_p3d) { - f.tri_cntr++; - } - if(f.tri_cntr==3) { - CvPoint2D32f u1 = f.p2d1; - CvPoint2D32f u2 = f.p2d; - cam.Camera::Undistort(u1); - cam.Camera::Undistort(u2); - if(cam.ReconstructFeature(&f.pose1, &pose, &u1, &u2, &f.p3d, 10e-6)) - f.has_p3d = true; - else - f.has_stored_pose = false; - f.tri_cntr = 0; - } - } - if (pose_ok) { - double err_markers = cam.Reproject(container, &pose, 0, true, false, 100); - if(err_markers > 30) { Reset(false); return false;} - double err_features = cam.Reproject(container, &pose, 1, false, false, 100); - cam.EraseUsingReprojectionError(container, 30, 1); - } - return pose_ok; +bool SimpleSfM::Update(IplImage* image, bool assume_plane, bool triangulate, + float reproj_err_limit, float triangulate_angle) +{ + const int min_triangulate = 50, max_triangulate = 150; + const float plane_dist_limit = 100.f; + const bool remember_3d_points = true; + const float parallax_length = 10.f; + const float parallax_length_sq = parallax_length * parallax_length; + float reproj_err_limit_sq = reproj_err_limit * reproj_err_limit; + std::map::iterator iter; + std::map::iterator iter_end = container.end(); + + // #1. Detect marker corners and track features + marker_detector.Detect(image, &cam, container, 0, 0, 1023, true, false); + tf.Purge(); + tf.Track(image, 0, container, 1); // Track without adding features + // tf.Track(image, 0, container, 1, 1024, 65535); // Track with adding + // features + tf.EraseNonTracked(container, 1); + + // #2. Update pose + if (!pose_ok) + pose_ok = cam.CalcExteriorOrientation(container, &pose); + pose_ok = cam.UpdatePose(container, &pose); + if (!pose_ok) + pose_ok = cam.UpdatePose(container, &pose, 0); // Only markers + if (!pose_ok) + return false; + + // #3. Reproject features and their shadows + double err_markers = cam.Reproject(container, &pose, 0, true, false, 100); + if (err_markers > reproj_err_limit * 2) + { + Reset(false); + return false; + } + double err_features = cam.Reproject(container, &pose, 1, false, false, 100); + if ((err_markers == 0) && (err_features > reproj_err_limit * 2)) + { + // Error is so large, that we just are satisfied we have some pose + // Don't triangulate and remove points based on this result + return true; + } + + // #4. Add previously triangulated features to container if they are + // visible... + if (remember_3d_points) + { + IplImage* mask = tf.NewFeatureMask(); + iter = container_triangulated.begin(); + while (iter != container_triangulated.end()) + { + if (container.find(iter->first) == container.end()) + { + Camera* c = &cam; + c->ProjectPoint(iter->second.p3d, &pose, iter->second.projected_p2d); + if ((iter->second.projected_p2d.x >= 0) && + (iter->second.projected_p2d.y >= 0) && + (iter->second.projected_p2d.x < image->width) && + (iter->second.projected_p2d.y < image->height)) + { + CvScalar s = cvGet2D(mask, int(iter->second.projected_p2d.y), + int(iter->second.projected_p2d.x)); + if (s.val[0] == 0) + { + container[iter->first] = iter->second; + container[iter->first].estimation_type = 3; + container[iter->first].p2d = container[iter->first].projected_p2d; + } + } + } + iter++; + } + } + + // #5. Add other features to container + // Assume them initially on marker plane if (assume_plane == true) + tf.AddFeatures(container, 1, 1024, 0xffffff); // 65535); + if (assume_plane) + { + for (iter = container.begin(); iter != iter_end; iter++) + { + Feature& f = iter->second; + // if (f.type_id == 0) continue; + if (!f.has_p3d && f.estimation_type < 1) + { + // cam.Get3dOnPlane(&pose, f.p2d, f.p3d); + + // CvPoint3D32f p3d_vec; + // float l = PointVectorFromCamera(f.p3d, p3d_vec, &pose); + // if (l > plane_dist_limit) cam.Get3dOnDepth(&pose, f.p2d, + // plane_dist_limit, f.p3d); + cam.Get3dOnDepth(&pose, f.p2d, plane_dist_limit, f.p3d); + + // TODO: Now we don't create a shadow point for plane points. This is + // because + // we don't want to remember them as 3d points in + // container_triangulated. We probably get the same benefit just + // by assuming that everything is on plane? + // CreateShadowPoint(f.p3d_sh, f.p3d, &pose, shadow_point_dist); + f.p3d_sh = f.p3d; + + f.has_p3d = true; + f.estimation_type = 1; + } + } + } + + // #6. Triangulate features with good parallax + if (triangulate) + { + for (iter = container.begin(); iter != iter_end; iter++) + { + Feature& f = iter->second; + // if (f.type_id == 0) continue; + if (!f.has_p3d && !f.has_stored_pose) + { + f.pose1 = pose; + f.p2d1 = f.p2d; + f.has_stored_pose = true; + cam.Get3dOnDepth(&pose, f.p2d, 8, f.p3d); + CreateShadowPoint(f.p3d_sh, f.p3d, &cam, &pose, parallax_length, + triangulate_angle); + } + if (!f.has_p3d && f.has_stored_pose && f.estimation_type < 2) + { + double dist_sh_sq = (f.projected_p2d_sh.x - f.projected_p2d.x) * + (f.projected_p2d_sh.x - f.projected_p2d.x); + dist_sh_sq += (f.projected_p2d_sh.y - f.projected_p2d.y) * + (f.projected_p2d_sh.y - f.projected_p2d.y); + if (dist_sh_sq > parallax_length_sq) + { + CvPoint2D32f u1 = f.p2d1; + CvPoint2D32f u2 = f.p2d; + cam.Camera::Undistort(u1); + cam.Camera::Undistort(u2); + if (cam.ReconstructFeature(&f.pose1, &pose, &u1, &u2, &f.p3d, 10e-6)) + { + CreateShadowPoint(f.p3d_sh, f.p3d, &cam, &pose, parallax_length, + triangulate_angle); + f.has_p3d = true; + } + else + f.has_stored_pose = false; + f.estimation_type = 2; + } + } + } + } + + // #7. Erase poor features + cam.Reproject(container, &pose, 1, false, false, 100); // TODO: Why again... + iter = container.begin(); + while (iter != container.end()) + { + bool iter_inc = true; + if (iter->second.type_id > 0) + { + Feature& f = iter->second; + if ((f.estimation_type == 0) || !f.has_p2d || !f.has_p3d) + ; + else + { + // Note that the projected_p2d needs to have valid content at this point + double dist_sq = + (f.p2d.x - f.projected_p2d.x) * (f.p2d.x - f.projected_p2d.x); + dist_sq += + (f.p2d.y - f.projected_p2d.y) * (f.p2d.y - f.projected_p2d.y); + + if (f.estimation_type == 1) + { + if (dist_sq > (reproj_err_limit_sq)) + f.has_p3d = false; + } + else if (f.estimation_type == 2) + { + if (dist_sq > (reproj_err_limit_sq)) + { + container.erase(iter++); + iter_inc = false; + } + } + else if (f.estimation_type == 3) + { + if (container_triangulated.size() < min_triangulate) + { + if (dist_sq > (reproj_err_limit_sq)) + { + container.erase(iter++); + iter_inc = false; + } + } + else + { + if (dist_sq > (reproj_err_limit_sq)) + f.has_p3d = false; + if (dist_sq > (reproj_err_limit_sq * 2)) + { + std::map::iterator iter_tmp; + iter_tmp = container_triangulated.find(iter->first); + if (iter_tmp != container_triangulated.end()) + { + container_triangulated.erase(iter_tmp); + } + container.erase(iter++); + iter_inc = false; + } + } + } + } + } + if (iter_inc) + iter++; + } + + // #8. Remember good features that have little reprojection error while the + // parallax is noticeable + if (remember_3d_points && (container_triangulated.size() < max_triangulate)) + { + iter = container.begin(); + while (iter != container.end()) + { + if ((iter->second.type_id > 0) && + (container_triangulated.find(iter->first) == + container_triangulated.end())) + { + Feature& f = iter->second; + double dist_sq = + (f.p2d.x - f.projected_p2d.x) * (f.p2d.x - f.projected_p2d.x); + dist_sq += + (f.p2d.y - f.projected_p2d.y) * (f.p2d.y - f.projected_p2d.y); + double dist_sh_sq = (f.projected_p2d_sh.x - f.projected_p2d.x) * + (f.projected_p2d_sh.x - f.projected_p2d.x); + dist_sh_sq += (f.projected_p2d_sh.y - f.projected_p2d.y) * + (f.projected_p2d_sh.y - f.projected_p2d.y); + if ((dist_sq < reproj_err_limit_sq) && + (dist_sh_sq > parallax_length_sq)) + { + container_triangulated[iter->first] = iter->second; + f.estimation_type = 3; + } + } + iter++; + if (container_triangulated.size() >= max_triangulate) + break; + } + } + + return true; } -bool SimpleSfM::UpdateRotationsOnly(IplImage *image) { - int n_markers = marker_detector.Detect(image, &cam, container, 0, 0, 1023, true, false); - static int n_markers_prev = 0; - tf.Purge(); - tf.Track(image, 0, container, 1, 1024, 65535); - tf.EraseNonTracked(container, 1); - int type_id = -1; - if(n_markers>=1) type_id = 0; - if (n_markers==1) - { - pose_ok = cam.CalcExteriorOrientation(container, &pose, 0); - if(pose_ok) pose_original = pose; - } - else if(n_markers>1) - { - if(pose_ok) - pose_ok = cam.UpdatePose(container, &pose, 0); - else - pose_ok = cam.CalcExteriorOrientation(container, &pose, 0); - if(pose_ok) pose_original = pose; - } - else //if(pose_ok) // Tracking going on... - { - if (n_markers_prev > 0) { - pose_difference.Reset(); - pose_difference.Mirror(false, true, true); - std::map::iterator iter = container.begin(); - std::map::iterator iter_end = container.end(); - for(;iter != iter_end; iter++) { - if ((iter->second.type_id == 1) && (iter->second.has_p2d)) { - CvPoint2D32f _p2d = iter->second.p2d; - cam.Get3dOnDepth(&pose_difference, _p2d, 1, iter->second.p3d); - iter->second.has_p3d = true; - } - } - } - cam.UpdateRotation(container, &pose_difference, 1); - if(pose_ok) - { - double rot_mat[16]; - double gl_mat[16]; - pose_difference.GetMatrixGL(rot_mat); - pose_original.GetMatrixGL(gl_mat); - CvMat rot = cvMat(4, 4, CV_64F, rot_mat);// Rotation matrix (difference) from the tracker - CvMat mod = cvMat(4, 4, CV_64F, gl_mat); // Original modelview matrix (camera location) - cvMatMul(&mod, &rot, &rot); - /*if(pose_ok)*/ pose.SetMatrixGL(rot_mat); - } - //pose_ok = true; - } - n_markers_prev = n_markers; - if (pose_ok) { - if (n_markers < 1) { - std::map::iterator iter = container.begin(); - std::map::iterator iter_end = container.end(); - for(;iter != iter_end; iter++) { - if ((iter->second.type_id == 1) && (!iter->second.has_p3d) && (iter->second.has_p2d)) { - CvPoint2D32f _p2d = iter->second.p2d; - cam.Get3dOnDepth(&pose_difference, _p2d, 1, iter->second.p3d); - iter->second.has_p3d = true; - } - } - } - - double err_markers = cam.Reproject(container, &pose, 0, true, false, 100); - if(err_markers > 10) { Reset(false); return false;} - double err_features = cam.Reproject(container, &pose_difference, 1, false, false, 100); - cam.EraseUsingReprojectionError(container, 10, 1); - } - return pose_ok; +bool SimpleSfM::UpdateTriangulateOnly(IplImage* image) +{ + marker_detector.Detect(image, &cam, container, 0, 0, 1023, true, false); + tf.Purge(); + tf.Track(image, 0, container, 1, 1024, 65535); + tf.EraseNonTracked(container, 1); + if (!pose_ok) + pose_ok = cam.CalcExteriorOrientation(container, &pose); + else + pose_ok = cam.UpdatePose(container, &pose); + if (pose_ok) + update_tri = moving.UpdateDistance(&pose, scale); + std::map::iterator iter = container.begin(); + std::map::iterator iter_end = container.end(); + for (; iter != iter_end; iter++) + { + if (pose_ok && (iter->second.type_id == 1) && + (!iter->second.has_stored_pose) && (!iter->second.has_p3d)) + { + iter->second.pose1 = pose; + iter->second.p2d1 = iter->second.p2d; + iter->second.has_stored_pose = true; + } + } + iter = container.begin(); + for (; iter != iter_end; iter++) + { + Feature& f = iter->second; + if (update_tri && f.has_stored_pose && !f.has_p3d) + { + f.tri_cntr++; + } + if (f.tri_cntr == 3) + { + CvPoint2D32f u1 = f.p2d1; + CvPoint2D32f u2 = f.p2d; + cam.Camera::Undistort(u1); + cam.Camera::Undistort(u2); + if (cam.ReconstructFeature(&f.pose1, &pose, &u1, &u2, &f.p3d, 10e-6)) + f.has_p3d = true; + else + f.has_stored_pose = false; + f.tri_cntr = 0; + } + } + if (pose_ok) + { + double err_markers = cam.Reproject(container, &pose, 0, true, false, 100); + if (err_markers > 30) + { + Reset(false); + return false; + } + double err_features = cam.Reproject(container, &pose, 1, false, false, 100); + cam.EraseUsingReprojectionError(container, 30, 1); + } + return pose_ok; } -void SimpleSfM::Draw(IplImage *rgba) { - if(markers_found) return; - std::map::iterator iter = container.begin(); - std::map::iterator iter_end = container.end(); - for(;iter != iter_end;iter++) { - Feature &f = iter->second; - if (f.has_p2d) { - int r=0, g=0, b=0, rad=1; - if (f.type_id == 0) {r=255; g=0; b=0;} - else if (f.estimation_type == 0) { - if (f.has_stored_pose) {r=255; g=0; b=255;} - else {r=0; g=255; b=255;} - } - else if (f.estimation_type == 1) {r=0; g=255; b=0;} - else if (f.estimation_type == 2) {r=255; g=0; b=255;} - else if (f.estimation_type == 3) {r=0; g=0; b=255;} - if (f.has_p3d) rad=5; - else rad = f.tri_cntr+1; - - cvCircle(rgba, cvPointFrom32f(f.p2d), rad, CV_RGB(r,g,b)); - if (pose_ok) { - // The shadow point line - if (f.type_id > 0 && f.estimation_type < 3 && f.p3d_sh.x != 0.f) { - cvLine(rgba, cvPointFrom32f(f.projected_p2d), cvPointFrom32f(f.projected_p2d_sh), CV_RGB(0,255,0)); - } - // Reprojection error - if (f.has_p3d) { - cvLine(rgba, cvPointFrom32f(f.p2d), cvPointFrom32f(f.projected_p2d), CV_RGB(255,0,255)); - } - } - //if (pose_ok && f.has_p3d) { - // cvLine(rgba, cvPointFrom32f(f.p2d), cvPointFrom32f(f.projected_p2d), CV_RGB(255,0,255)); - //} - //if (f.type_id == 0) { - // int id = iter->first; - // cvCircle(rgba, cvPointFrom32f(f.p2d), 7, CV_RGB(255,0,0)); - //} else { - // int id = iter->first-1024+1; - // if (f.has_p3d) { - // cvCircle(rgba, cvPointFrom32f(f.p2d), 5, CV_RGB(0,255,0)); - // } - // else if (f.has_stored_pose) - // cvCircle(rgba, cvPointFrom32f(f.p2d), f.tri_cntr+1, CV_RGB(255,0,255)); - // else - // cvCircle(rgba, cvPointFrom32f(f.p2d), 5, CV_RGB(0,255,255)); - //} - } - } +bool SimpleSfM::UpdateRotationsOnly(IplImage* image) +{ + int n_markers = + marker_detector.Detect(image, &cam, container, 0, 0, 1023, true, false); + static int n_markers_prev = 0; + tf.Purge(); + tf.Track(image, 0, container, 1, 1024, 65535); + tf.EraseNonTracked(container, 1); + int type_id = -1; + if (n_markers >= 1) + type_id = 0; + if (n_markers == 1) + { + pose_ok = cam.CalcExteriorOrientation(container, &pose, 0); + if (pose_ok) + pose_original = pose; + } + else if (n_markers > 1) + { + if (pose_ok) + pose_ok = cam.UpdatePose(container, &pose, 0); + else + pose_ok = cam.CalcExteriorOrientation(container, &pose, 0); + if (pose_ok) + pose_original = pose; + } + else // if(pose_ok) // Tracking going on... + { + if (n_markers_prev > 0) + { + pose_difference.Reset(); + pose_difference.Mirror(false, true, true); + std::map::iterator iter = container.begin(); + std::map::iterator iter_end = container.end(); + for (; iter != iter_end; iter++) + { + if ((iter->second.type_id == 1) && (iter->second.has_p2d)) + { + CvPoint2D32f _p2d = iter->second.p2d; + cam.Get3dOnDepth(&pose_difference, _p2d, 1, iter->second.p3d); + iter->second.has_p3d = true; + } + } + } + cam.UpdateRotation(container, &pose_difference, 1); + if (pose_ok) + { + double rot_mat[16]; + double gl_mat[16]; + pose_difference.GetMatrixGL(rot_mat); + pose_original.GetMatrixGL(gl_mat); + CvMat rot = cvMat(4, 4, CV_64F, rot_mat); // Rotation matrix (difference) + // from the tracker + CvMat mod = cvMat(4, 4, CV_64F, + gl_mat); // Original modelview matrix (camera location) + cvMatMul(&mod, &rot, &rot); + /*if(pose_ok)*/ pose.SetMatrixGL(rot_mat); + } + // pose_ok = true; + } + n_markers_prev = n_markers; + if (pose_ok) + { + if (n_markers < 1) + { + std::map::iterator iter = container.begin(); + std::map::iterator iter_end = container.end(); + for (; iter != iter_end; iter++) + { + if ((iter->second.type_id == 1) && (!iter->second.has_p3d) && + (iter->second.has_p2d)) + { + CvPoint2D32f _p2d = iter->second.p2d; + cam.Get3dOnDepth(&pose_difference, _p2d, 1, iter->second.p3d); + iter->second.has_p3d = true; + } + } + } + + double err_markers = cam.Reproject(container, &pose, 0, true, false, 100); + if (err_markers > 10) + { + Reset(false); + return false; + } + double err_features = + cam.Reproject(container, &pose_difference, 1, false, false, 100); + cam.EraseUsingReprojectionError(container, 10, 1); + } + return pose_ok; } -} // namespace alvar +void SimpleSfM::Draw(IplImage* rgba) +{ + if (markers_found) + return; + std::map::iterator iter = container.begin(); + std::map::iterator iter_end = container.end(); + for (; iter != iter_end; iter++) + { + Feature& f = iter->second; + if (f.has_p2d) + { + int r = 0, g = 0, b = 0, rad = 1; + if (f.type_id == 0) + { + r = 255; + g = 0; + b = 0; + } + else if (f.estimation_type == 0) + { + if (f.has_stored_pose) + { + r = 255; + g = 0; + b = 255; + } + else + { + r = 0; + g = 255; + b = 255; + } + } + else if (f.estimation_type == 1) + { + r = 0; + g = 255; + b = 0; + } + else if (f.estimation_type == 2) + { + r = 255; + g = 0; + b = 255; + } + else if (f.estimation_type == 3) + { + r = 0; + g = 0; + b = 255; + } + if (f.has_p3d) + rad = 5; + else + rad = f.tri_cntr + 1; + + cvCircle(rgba, cvPointFrom32f(f.p2d), rad, CV_RGB(r, g, b)); + if (pose_ok) + { + // The shadow point line + if (f.type_id > 0 && f.estimation_type < 3 && f.p3d_sh.x != 0.f) + { + cvLine(rgba, cvPointFrom32f(f.projected_p2d), + cvPointFrom32f(f.projected_p2d_sh), CV_RGB(0, 255, 0)); + } + // Reprojection error + if (f.has_p3d) + { + cvLine(rgba, cvPointFrom32f(f.p2d), cvPointFrom32f(f.projected_p2d), + CV_RGB(255, 0, 255)); + } + } + // if (pose_ok && f.has_p3d) { + // cvLine(rgba, cvPointFrom32f(f.p2d), cvPointFrom32f(f.projected_p2d), + // CV_RGB(255,0,255)); + //} + // if (f.type_id == 0) { + // int id = iter->first; + // cvCircle(rgba, cvPointFrom32f(f.p2d), 7, CV_RGB(255,0,0)); + //} else { + // int id = iter->first-1024+1; + // if (f.has_p3d) { + // cvCircle(rgba, cvPointFrom32f(f.p2d), 5, CV_RGB(0,255,0)); + // } + // else if (f.has_stored_pose) + // cvCircle(rgba, cvPointFrom32f(f.p2d), f.tri_cntr+1, + // CV_RGB(255,0,255)); else cvCircle(rgba, cvPointFrom32f(f.p2d), 5, + // CV_RGB(0,255,255)); + //} + } + } +} +} // namespace alvar diff --git a/ar_track_alvar/src/Threads.cpp b/ar_track_alvar/src/Threads.cpp index fc4ef7b..114bbc3 100644 --- a/ar_track_alvar/src/Threads.cpp +++ b/ar_track_alvar/src/Threads.cpp @@ -25,21 +25,20 @@ #include "ar_track_alvar/Threads_private.h" -namespace alvar { - -Threads::Threads() - : d(new ThreadsPrivate()) +namespace alvar +{ +Threads::Threads() : d(new ThreadsPrivate()) { } Threads::~Threads() { - delete d; + delete d; } -bool Threads::create(void *(*method)(void *), void *parameters) +bool Threads::create(void* (*method)(void*), void* parameters) { - return d->create(method, parameters); + return d->create(method, parameters); } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/Threads_unix.cpp b/ar_track_alvar/src/Threads_unix.cpp index 2e91bd7..8500a27 100644 --- a/ar_track_alvar/src/Threads_unix.cpp +++ b/ar_track_alvar/src/Threads_unix.cpp @@ -26,42 +26,42 @@ #include #include -namespace alvar { - +namespace alvar +{ class ThreadsPrivateData { public: - ThreadsPrivateData() - : mHandles() - { - } + ThreadsPrivateData() : mHandles() + { + } - std::vector mHandles; + std::vector mHandles; }; -ThreadsPrivate::ThreadsPrivate() - : d(new ThreadsPrivateData()) +ThreadsPrivate::ThreadsPrivate() : d(new ThreadsPrivateData()) { } ThreadsPrivate::~ThreadsPrivate() { - for (int i = 0; i < (int)d->mHandles.size(); ++i) { - pthread_exit(&d->mHandles.at(i)); - } - d->mHandles.clear(); + for (int i = 0; i < (int)d->mHandles.size(); ++i) + { + pthread_exit(&d->mHandles.at(i)); + } + d->mHandles.clear(); - delete d; + delete d; } -bool ThreadsPrivate::create(void *(*method)(void *), void *parameters) +bool ThreadsPrivate::create(void* (*method)(void*), void* parameters) { - pthread_t thread; - if (pthread_create(&thread, 0, method, parameters)) { - d->mHandles.push_back(thread); - return true; - } - return false; + pthread_t thread; + if (pthread_create(&thread, 0, method, parameters)) + { + d->mHandles.push_back(thread); + return true; + } + return false; } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/Threads_win.cpp b/ar_track_alvar/src/Threads_win.cpp index 3ed7355..8ca8200 100644 --- a/ar_track_alvar/src/Threads_win.cpp +++ b/ar_track_alvar/src/Threads_win.cpp @@ -26,68 +26,63 @@ #include #include -namespace alvar { - +namespace alvar +{ struct StartThreadParameters { - void *(*method)(void *); - void *parameters; + void* (*method)(void*); + void* parameters; }; -static DWORD WINAPI startThread(void *parameters) +static DWORD WINAPI startThread(void* parameters) { - DWORD value; - struct StartThreadParameters *p = (struct StartThreadParameters *)parameters; - value = (DWORD)p->method(p->parameters); - delete p; - return value; + DWORD value; + struct StartThreadParameters* p = (struct StartThreadParameters*)parameters; + value = (DWORD)p->method(p->parameters); + delete p; + return value; } class ThreadsPrivateData { public: - ThreadsPrivateData() - : mHandles() - { - } + ThreadsPrivateData() : mHandles() + { + } - std::vector mHandles; + std::vector mHandles; }; -ThreadsPrivate::ThreadsPrivate() - : d(new ThreadsPrivateData()) +ThreadsPrivate::ThreadsPrivate() : d(new ThreadsPrivateData()) { } ThreadsPrivate::~ThreadsPrivate() { - for (int i = 0; i < (int)d->mHandles.size(); ++i) { - CloseHandle(d->mHandles.at(i)); - } - d->mHandles.clear(); + for (int i = 0; i < (int)d->mHandles.size(); ++i) + { + CloseHandle(d->mHandles.at(i)); + } + d->mHandles.clear(); - delete d; + delete d; } -bool ThreadsPrivate::create(void *(*method)(void *), void *parameters) +bool ThreadsPrivate::create(void* (*method)(void*), void* parameters) { - StartThreadParameters *p = new StartThreadParameters; - p->method = method; - p->parameters = parameters; + StartThreadParameters* p = new StartThreadParameters; + p->method = method; + p->parameters = parameters; - HANDLE thread = CreateThread( - NULL, - 0, - (LPTHREAD_START_ROUTINE)startThread, - p, - 0, - NULL); + HANDLE thread = + CreateThread(NULL, 0, (LPTHREAD_START_ROUTINE)startThread, p, 0, NULL); - if (thread != NULL) { - d->mHandles.push_back(thread); - return true; - } - return false; + if (thread != NULL) + { + d->mHandles.push_back(thread); + return true; + } + return false; } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/Timer.cpp b/ar_track_alvar/src/Timer.cpp index ee40e2d..379a199 100644 --- a/ar_track_alvar/src/Timer.cpp +++ b/ar_track_alvar/src/Timer.cpp @@ -25,26 +25,25 @@ #include "Timer_private.h" -namespace alvar { - -Timer::Timer() - : d(new TimerPrivate()) +namespace alvar +{ +Timer::Timer() : d(new TimerPrivate()) { } Timer::~Timer() { - delete d; + delete d; } void Timer::start() { - return d->start(); + return d->start(); } double Timer::stop() { - return d->stop(); + return d->stop(); } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/Timer_unix.cpp b/ar_track_alvar/src/Timer_unix.cpp index a34071b..5dd005e 100644 --- a/ar_track_alvar/src/Timer_unix.cpp +++ b/ar_track_alvar/src/Timer_unix.cpp @@ -25,40 +25,38 @@ #include -namespace alvar { - +namespace alvar +{ class TimerPrivateData { public: - TimerPrivateData() - : mStart() - { - } + TimerPrivateData() : mStart() + { + } - timespec mStart; + timespec mStart; }; -TimerPrivate::TimerPrivate() - : d(new TimerPrivateData()) +TimerPrivate::TimerPrivate() : d(new TimerPrivateData()) { } TimerPrivate::~TimerPrivate() { - delete d; + delete d; } void TimerPrivate::start() { - clock_gettime(CLOCK_MONOTONIC, &d->mStart); + clock_gettime(CLOCK_MONOTONIC, &d->mStart); } double TimerPrivate::stop() { - timespec stop; - clock_gettime(CLOCK_MONOTONIC, &stop); - return (stop.tv_sec - d->mStart.tv_sec) + - (stop.tv_nsec - d->mStart.tv_nsec) / 1000000000.0; + timespec stop; + clock_gettime(CLOCK_MONOTONIC, &stop); + return (stop.tv_sec - d->mStart.tv_sec) + + (stop.tv_nsec - d->mStart.tv_nsec) / 1000000000.0; } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/Timer_win.cpp b/ar_track_alvar/src/Timer_win.cpp index 3d5cb0a..eaefe90 100644 --- a/ar_track_alvar/src/Timer_win.cpp +++ b/ar_track_alvar/src/Timer_win.cpp @@ -25,61 +25,65 @@ #include -namespace alvar { - +namespace alvar +{ class TimerPrivateData { public: - TimerPrivateData() - : mPerformanceQuerySupported(false) - , mPerformanceFrequency() - , mPerformanceStart() - , mStart() - { - } + TimerPrivateData() + : mPerformanceQuerySupported(false) + , mPerformanceFrequency() + , mPerformanceStart() + , mStart() + { + } - bool mPerformanceQuerySupported; - LARGE_INTEGER mPerformanceFrequency; - LARGE_INTEGER mPerformanceStart; - DWORD mStart; + bool mPerformanceQuerySupported; + LARGE_INTEGER mPerformanceFrequency; + LARGE_INTEGER mPerformanceStart; + DWORD mStart; }; -TimerPrivate::TimerPrivate() - : d(new TimerPrivateData()) +TimerPrivate::TimerPrivate() : d(new TimerPrivateData()) { - QueryPerformanceFrequency(&d->mPerformanceFrequency); - if (d->mPerformanceFrequency.QuadPart) { - d->mPerformanceQuerySupported = true; - } + QueryPerformanceFrequency(&d->mPerformanceFrequency); + if (d->mPerformanceFrequency.QuadPart) + { + d->mPerformanceQuerySupported = true; + } } TimerPrivate::~TimerPrivate() { - delete d; + delete d; } void TimerPrivate::start() { - if (d->mPerformanceQuerySupported) { - QueryPerformanceCounter(&d->mPerformanceStart); - } - else { - d->mStart = GetTickCount(); - } + if (d->mPerformanceQuerySupported) + { + QueryPerformanceCounter(&d->mPerformanceStart); + } + else + { + d->mStart = GetTickCount(); + } } double TimerPrivate::stop() { - if (d->mPerformanceQuerySupported) { - LARGE_INTEGER stop; - LARGE_INTEGER difference; - QueryPerformanceCounter(&stop); - difference.QuadPart = stop.QuadPart - d->mPerformanceStart.QuadPart; - return double(difference.QuadPart) / d->mPerformanceFrequency.QuadPart; - } - else { - return (GetTickCount() - d->mStart) / 1000.0; - } + if (d->mPerformanceQuerySupported) + { + LARGE_INTEGER stop; + LARGE_INTEGER difference; + QueryPerformanceCounter(&stop); + difference.QuadPart = stop.QuadPart - d->mPerformanceStart.QuadPart; + return double(difference.QuadPart) / d->mPerformanceFrequency.QuadPart; + } + else + { + return (GetTickCount() - d->mStart) / 1000.0; + } } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/TrackerFeatures.cpp b/ar_track_alvar/src/TrackerFeatures.cpp index 9ddcf1a..a0f9dc4 100644 --- a/ar_track_alvar/src/TrackerFeatures.cpp +++ b/ar_track_alvar/src/TrackerFeatures.cpp @@ -26,236 +26,343 @@ using namespace std; -namespace alvar { +namespace alvar +{ using namespace std; -TrackerFeatures::TrackerFeatures(int _max_features, int _min_features, double _quality_level, double _min_distance, int _pyr_levels, int _win_size) : - x_res(0), y_res(0), frame_count(0), quality_level(0), min_distance(0), min_features(0), max_features(0), status(0), - img_eig(0), img_tmp(0), gray(0), prev_gray(0), pyramid(0), prev_pyramid(0), mask(0), next_id(0), win_size(0), pyr_levels(0), - prev_features(0), features(0), prev_feature_count(0), feature_count(0), prev_ids(0), ids(0) +TrackerFeatures::TrackerFeatures(int _max_features, int _min_features, + double _quality_level, double _min_distance, + int _pyr_levels, int _win_size) + : x_res(0) + , y_res(0) + , frame_count(0) + , quality_level(0) + , min_distance(0) + , min_features(0) + , max_features(0) + , status(0) + , img_eig(0) + , img_tmp(0) + , gray(0) + , prev_gray(0) + , pyramid(0) + , prev_pyramid(0) + , mask(0) + , next_id(0) + , win_size(0) + , pyr_levels(0) + , prev_features(0) + , features(0) + , prev_feature_count(0) + , feature_count(0) + , prev_ids(0) + , ids(0) { - next_id=1; // When this should be reset? - pyr_levels = _pyr_levels; - win_size = _win_size; - ChangeSettings(_max_features, _min_features, _quality_level, _min_distance); + next_id = 1; // When this should be reset? + pyr_levels = _pyr_levels; + win_size = _win_size; + ChangeSettings(_max_features, _min_features, _quality_level, _min_distance); } -TrackerFeatures::~TrackerFeatures() { - if (status) delete [] status; - if (prev_features) delete [] prev_features; - if (features) delete [] features; - if (prev_ids) delete [] prev_ids; - if (ids) delete [] ids; - if (img_eig) cvReleaseImage(&img_eig); - if (img_tmp) cvReleaseImage(&img_tmp); - if (gray) cvReleaseImage(&gray); - if (prev_gray) cvReleaseImage(&prev_gray); - if (pyramid) cvReleaseImage(&pyramid); - if (prev_pyramid) cvReleaseImage(&prev_pyramid); - if (mask) cvReleaseImage(&mask); +TrackerFeatures::~TrackerFeatures() +{ + if (status) + delete[] status; + if (prev_features) + delete[] prev_features; + if (features) + delete[] features; + if (prev_ids) + delete[] prev_ids; + if (ids) + delete[] ids; + if (img_eig) + cvReleaseImage(&img_eig); + if (img_tmp) + cvReleaseImage(&img_tmp); + if (gray) + cvReleaseImage(&gray); + if (prev_gray) + cvReleaseImage(&prev_gray); + if (pyramid) + cvReleaseImage(&pyramid); + if (prev_pyramid) + cvReleaseImage(&prev_pyramid); + if (mask) + cvReleaseImage(&mask); } -void TrackerFeatures::ChangeSettings(int _max_features, int _min_features, double _quality_level, double _min_distance) { - - if(_max_features==max_features && _min_features==min_features && _quality_level==quality_level && _min_distance==min_distance) return; +void TrackerFeatures::ChangeSettings(int _max_features, int _min_features, + double _quality_level, + double _min_distance) +{ + if (_max_features == max_features && _min_features == min_features && + _quality_level == quality_level && _min_distance == min_distance) + return; - int common_features = min(feature_count, _max_features); - max_features = _max_features; - min_features = _min_features; - quality_level = _quality_level; - min_distance = _min_distance; - if (status) delete [] status; status = NULL; - if (prev_ids) delete [] prev_ids; prev_ids = NULL; - if (prev_features) delete [] prev_features; prev_features = NULL; - if (ids) { - int *ids_new = new int[max_features]; - assert(common_features < max_features); - memcpy(ids_new, ids, sizeof(int)*common_features); - delete [] ids; - ids = ids_new; - } else { - ids = new int[max_features]; - } - if (features) { - CvPoint2D32f *features_new = new CvPoint2D32f[max_features]; - memcpy(features_new, features, sizeof(CvPoint2D32f)*common_features); - delete [] features; - features = features_new; - } else { - features = new CvPoint2D32f[max_features]; - } - status = new char[max_features]; - prev_ids = new int[max_features]; - prev_features = new CvPoint2D32f[max_features]; - prev_feature_count=0; - feature_count=common_features; + int common_features = min(feature_count, _max_features); + max_features = _max_features; + min_features = _min_features; + quality_level = _quality_level; + min_distance = _min_distance; + if (status) + delete[] status; + status = NULL; + if (prev_ids) + delete[] prev_ids; + prev_ids = NULL; + if (prev_features) + delete[] prev_features; + prev_features = NULL; + if (ids) + { + int* ids_new = new int[max_features]; + assert(common_features < max_features); + memcpy(ids_new, ids, sizeof(int) * common_features); + delete[] ids; + ids = ids_new; + } + else + { + ids = new int[max_features]; + } + if (features) + { + CvPoint2D32f* features_new = new CvPoint2D32f[max_features]; + memcpy(features_new, features, sizeof(CvPoint2D32f) * common_features); + delete[] features; + features = features_new; + } + else + { + features = new CvPoint2D32f[max_features]; + } + status = new char[max_features]; + prev_ids = new int[max_features]; + prev_features = new CvPoint2D32f[max_features]; + prev_feature_count = 0; + feature_count = common_features; - assert(ids); - assert(features); - assert(status); - assert(prev_ids); - assert(prev_features); + assert(ids); + assert(features); + assert(status); + assert(prev_ids); + assert(prev_features); } -void TrackerFeatures::Reset() { - feature_count=0; - frame_count=0; +void TrackerFeatures::Reset() +{ + feature_count = 0; + frame_count = 0; } -bool TrackerFeatures::DelFeature(int index) { - if (index > feature_count) return false; - feature_count--; - for (int i=index; i feature_count) + return false; + feature_count--; + for (int i = index; i < feature_count; i++) + { + features[i] = features[i + 1]; + ids[i] = ids[i + 1]; + } + return true; } -bool TrackerFeatures::DelFeatureId(int id) { - for (int i=0; iwidth) || (y_res != img->height)) { - if (img_eig) cvReleaseImage(&img_eig); - if (img_tmp) cvReleaseImage(&img_tmp); - if (gray) cvReleaseImage(&gray); - if (prev_gray) cvReleaseImage(&prev_gray); - if (pyramid) cvReleaseImage(&pyramid); - if (prev_pyramid) cvReleaseImage(&prev_pyramid); - if (mask) cvReleaseImage(&mask); - x_res = img->width; y_res = img->height; - img_eig = cvCreateImage(cvGetSize(img), IPL_DEPTH_32F, 1); - img_tmp = cvCreateImage(cvGetSize(img), IPL_DEPTH_32F, 1); - gray = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1); - prev_gray = cvCreateImage(cvGetSize(img),IPL_DEPTH_8U, 1); - pyramid = cvCreateImage(cvSize(img->width+8,img->height/3), IPL_DEPTH_8U, 1); - prev_pyramid = cvCreateImage(cvSize(img->width+8,img->height/3), IPL_DEPTH_8U, 1); - mask = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1); - frame_count=0; - if (min_distance == 0) { - min_distance = std::sqrt(double(img->width*img->height/max_features)); - min_distance *= 0.8; //(double(min_features)/max_features); - } - } - // Swap - IplImage* tmp; - CvPoint2D32f* tmp2; - CV_SWAP(prev_gray, gray, tmp); - CV_SWAP(prev_features, features, tmp2); - prev_feature_count=feature_count; - memcpy(prev_ids, ids, sizeof(int)*max_features); - if (img->nChannels == 1) { - cvCopy(img, gray); - } else { - cvCvtColor(img, gray, CV_RGB2GRAY); - } - // TODO: We used to add features here - //if (prev_feature_count < 1) return -1; - frame_count++; - if (frame_count <= 1) { - memcpy(features, prev_features, sizeof(CvPoint2D32f)*prev_feature_count); - memcpy(ids, prev_ids, sizeof(int)*prev_feature_count); - feature_count = prev_feature_count; - } else if (prev_feature_count > 0) { - // Track - cvCalcOpticalFlowPyrLK(prev_gray, gray, prev_pyramid, pyramid, - prev_features, features, prev_feature_count, cvSize(win_size, win_size), pyr_levels, status, 0, - cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,20,0.03), 0); - feature_count=0; - for (int i=0; iwidth) || (y_res != img->height)) + { + if (img_eig) + cvReleaseImage(&img_eig); + if (img_tmp) + cvReleaseImage(&img_tmp); + if (gray) + cvReleaseImage(&gray); + if (prev_gray) + cvReleaseImage(&prev_gray); + if (pyramid) + cvReleaseImage(&pyramid); + if (prev_pyramid) + cvReleaseImage(&prev_pyramid); + if (mask) + cvReleaseImage(&mask); + x_res = img->width; + y_res = img->height; + img_eig = cvCreateImage(cvGetSize(img), IPL_DEPTH_32F, 1); + img_tmp = cvCreateImage(cvGetSize(img), IPL_DEPTH_32F, 1); + gray = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1); + prev_gray = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1); + pyramid = + cvCreateImage(cvSize(img->width + 8, img->height / 3), IPL_DEPTH_8U, 1); + prev_pyramid = + cvCreateImage(cvSize(img->width + 8, img->height / 3), IPL_DEPTH_8U, 1); + mask = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1); + frame_count = 0; + if (min_distance == 0) + { + min_distance = std::sqrt(double(img->width * img->height / max_features)); + min_distance *= 0.8; //(double(min_features)/max_features); + } + } + // Swap + IplImage* tmp; + CvPoint2D32f* tmp2; + CV_SWAP(prev_gray, gray, tmp); + CV_SWAP(prev_features, features, tmp2); + prev_feature_count = feature_count; + memcpy(prev_ids, ids, sizeof(int) * max_features); + if (img->nChannels == 1) + { + cvCopy(img, gray); + } + else + { + cvCvtColor(img, gray, CV_RGB2GRAY); + } + // TODO: We used to add features here + // if (prev_feature_count < 1) return -1; + frame_count++; + if (frame_count <= 1) + { + memcpy(features, prev_features, sizeof(CvPoint2D32f) * prev_feature_count); + memcpy(ids, prev_ids, sizeof(int) * prev_feature_count); + feature_count = prev_feature_count; + } + else if (prev_feature_count > 0) + { + // Track + cvCalcOpticalFlowPyrLK( + prev_gray, gray, prev_pyramid, pyramid, prev_features, features, + prev_feature_count, cvSize(win_size, win_size), pyr_levels, status, 0, + cvTermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 0.03), 0); + feature_count = 0; + for (int i = 0; i < prev_feature_count; i++) + { + if (!status[i]) + continue; + features[feature_count] = features[i]; + ids[feature_count] = prev_ids[i]; + feature_count++; + } + } - if (add_features) AddFeatures(new_features_mask); + if (add_features) + AddFeatures(new_features_mask); - return 1; + return 1; } -double TrackerFeatures::Reset(IplImage *img, IplImage *new_features_mask) { - feature_count=0; - frame_count=0; - return TrackHid(img, new_features_mask); +double TrackerFeatures::Reset(IplImage* img, IplImage* new_features_mask) +{ + feature_count = 0; + frame_count = 0; + return TrackHid(img, new_features_mask); } -double TrackerFeatures::Track(IplImage *img, bool add_features) { - return TrackHid(img, NULL); //, add_features); +double TrackerFeatures::Track(IplImage* img, bool add_features) +{ + return TrackHid(img, NULL); //, add_features); } double TrackerFeatures::Track(IplImage* img, IplImage* mask) { - return TrackHid(img, mask); //, true); + return TrackHid(img, mask); //, true); } -IplImage *TrackerFeatures::NewFeatureMask() { - cvSet(mask, cvScalar(255)); - for (int i=0; i= 1) { - for (int i=feature_count; i= 1) + { + for (int i = feature_count; i < feature_count + new_feature_count; i++) + { + ids[i] = next_id; + next_id = ((next_id + 1) % (0x7fff)); + } + feature_count += new_feature_count; + } + } + return feature_count; } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/TrackerOrientation.cpp b/ar_track_alvar/src/TrackerOrientation.cpp index 1ed352f..a058d1a 100644 --- a/ar_track_alvar/src/TrackerOrientation.cpp +++ b/ar_track_alvar/src/TrackerOrientation.cpp @@ -26,221 +26,243 @@ using namespace std; -namespace alvar { +namespace alvar +{ using namespace std; -void TrackerOrientation::Project(CvMat* state, CvMat* projection, void *param) +void TrackerOrientation::Project(CvMat* state, CvMat* projection, void* param) { - TrackerOrientation *tracker = (TrackerOrientation*)param; - int count = projection->rows; - CvMat rot_mat = cvMat(3, 1, CV_64F, &(state->data.db[0+0])); - double zeros[3] = {0}; - CvMat zero_tra = cvMat(3, 1, CV_64F, zeros); - cvReshape(projection, projection, 2, 1); - cvProjectPoints2(tracker->_object_model, &rot_mat, &zero_tra, &(tracker->_camera->calib_K), &(tracker->_camera->calib_D), projection); - cvReshape(projection, projection, 1, count); + TrackerOrientation* tracker = (TrackerOrientation*)param; + int count = projection->rows; + CvMat rot_mat = cvMat(3, 1, CV_64F, &(state->data.db[0 + 0])); + double zeros[3] = { 0 }; + CvMat zero_tra = cvMat(3, 1, CV_64F, zeros); + cvReshape(projection, projection, 2, 1); + cvProjectPoints2(tracker->_object_model, &rot_mat, &zero_tra, + &(tracker->_camera->calib_K), &(tracker->_camera->calib_D), + projection); + cvReshape(projection, projection, 1, count); } void TrackerOrientation::Reset() -{ - _pose.Reset(); - _pose.Mirror(false, true, true); +{ + _pose.Reset(); + _pose.Mirror(false, true, true); } // Pose difference is stored... -double TrackerOrientation::Track(IplImage *image) +double TrackerOrientation::Track(IplImage* image) { - UpdateRotationOnly(image, 0); - return 0; + UpdateRotationOnly(image, 0); + return 0; } -bool TrackerOrientation::UpdatePose(IplImage *image) +bool TrackerOrientation::UpdatePose(IplImage* image) { - int count_points = _F_v.size(); - if(count_points < 6) return false; - - CvMat* _M = cvCreateMat(count_points, 1, CV_64FC3); - CvMat* image_observations = cvCreateMat(count_points*2, 1, CV_64F); // [u v u v u v ...]' - - //map::iterator it; - int ind = 0; - for(map::iterator it=_F_v.begin(); it!=_F_v.end(); it++) - { - if((it->second.status3D == Feature::USE_FOR_POSE || - it->second.status3D == Feature::IS_INITIAL) && - it->second.status2D == Feature::IS_TRACKED) - { - _M->data.db[ind*3+0] = it->second.point3d.x; - _M->data.db[ind*3+1] = it->second.point3d.y; - _M->data.db[ind*3+2] = it->second.point3d.z; - - image_observations->data.db[ind*2+0] = it->second.point.x; - image_observations->data.db[ind*2+1] = it->second.point.y; - ind++; - } - } - - if(ind < 6) - { - cvReleaseMat(&image_observations); - cvReleaseMat(&_M); - return false; - } - - double rot[3]; CvMat rotm = cvMat(3, 1, CV_64F, rot); - _pose.GetRodriques(&rotm); - - CvMat* par = cvCreateMat(3, 1, CV_64F); - memcpy(&(par->data.db[0+0]), rot, 3*sizeof(double)); - //par->data.db[3] = 0; - - CvRect r; r.x = 0; r.y = 0; r.height = ind; r.width = 1; - CvMat Msub; - cvGetSubRect(_M, &Msub, r); - _object_model = &Msub; - - r.height = 2*ind; - CvMat image_observations_sub; - cvGetSubRect(image_observations, &image_observations_sub, r); - - alvar::Optimization *opt = new alvar::Optimization(3, 2*ind); - - double foo = opt->Optimize(par, &image_observations_sub, 0.0005, 5, Project, this, alvar::Optimization::TUKEY_LM); - memcpy(rot, &(par->data.db[0+0]), 3*sizeof(double)); - _pose.SetRodriques(&rotm); - - delete opt; - - cvReleaseMat(&par); - cvReleaseMat(&image_observations); - cvReleaseMat(&_M); - - return true; + int count_points = _F_v.size(); + if (count_points < 6) + return false; + + CvMat* _M = cvCreateMat(count_points, 1, CV_64FC3); + CvMat* image_observations = + cvCreateMat(count_points * 2, 1, CV_64F); // [u v u v u v ...]' + + // map::iterator it; + int ind = 0; + for (map::iterator it = _F_v.begin(); it != _F_v.end(); it++) + { + if ((it->second.status3D == Feature::USE_FOR_POSE || + it->second.status3D == Feature::IS_INITIAL) && + it->second.status2D == Feature::IS_TRACKED) + { + _M->data.db[ind * 3 + 0] = it->second.point3d.x; + _M->data.db[ind * 3 + 1] = it->second.point3d.y; + _M->data.db[ind * 3 + 2] = it->second.point3d.z; + + image_observations->data.db[ind * 2 + 0] = it->second.point.x; + image_observations->data.db[ind * 2 + 1] = it->second.point.y; + ind++; + } + } + + if (ind < 6) + { + cvReleaseMat(&image_observations); + cvReleaseMat(&_M); + return false; + } + + double rot[3]; + CvMat rotm = cvMat(3, 1, CV_64F, rot); + _pose.GetRodriques(&rotm); + + CvMat* par = cvCreateMat(3, 1, CV_64F); + memcpy(&(par->data.db[0 + 0]), rot, 3 * sizeof(double)); + // par->data.db[3] = 0; + + CvRect r; + r.x = 0; + r.y = 0; + r.height = ind; + r.width = 1; + CvMat Msub; + cvGetSubRect(_M, &Msub, r); + _object_model = &Msub; + + r.height = 2 * ind; + CvMat image_observations_sub; + cvGetSubRect(image_observations, &image_observations_sub, r); + + alvar::Optimization* opt = new alvar::Optimization(3, 2 * ind); + + double foo = opt->Optimize(par, &image_observations_sub, 0.0005, 5, Project, + this, alvar::Optimization::TUKEY_LM); + memcpy(rot, &(par->data.db[0 + 0]), 3 * sizeof(double)); + _pose.SetRodriques(&rotm); + + delete opt; + + cvReleaseMat(&par); + cvReleaseMat(&image_observations); + cvReleaseMat(&_M); + + return true; } -bool TrackerOrientation::UpdateRotationOnly(IplImage *gray, IplImage *image) +bool TrackerOrientation::UpdateRotationOnly(IplImage* gray, IplImage* image) { - if(gray->nChannels != 1) return false; - if(!_grsc) _grsc = cvCreateImage(cvSize(_xres, _yres), 8, 1); - if((_xres!=_grsc->width)||(_yres!=_grsc->height)) - cvResize(gray, _grsc); - else - _grsc->imageData = gray->imageData; - - map::iterator it ; - for(it = _F_v.begin(); it != _F_v.end(); it++) - it->second.status2D = Feature::NOT_TRACKED; - - // Track features in image domain (distorted points) - _ft.Track(_grsc); - - // Go through image features and match to previous (_F_v) - for (int i = 0; i < _ft.feature_count; i++) - { - int id = _ft.ids[i]; - _F_v[id].point = _ft.features[i]; - _F_v[id].point.x *= _image_scale; - _F_v[id].point.y *= _image_scale; - _F_v[id].status2D = Feature::IS_TRACKED; - - // Throw outlier away - if(_F_v[id].status3D == Feature::IS_OUTLIER) - { - _ft.DelFeature(i); - } - } - - // Delete features that are not tracked -// map::iterator - it = _F_v.begin(); - while(it != _F_v.end()) - { - if(it->second.status2D == Feature::NOT_TRACKED) - _F_v.erase(it++); - else ++it; - } - - // Update pose based on current information - UpdatePose(); - - it = _F_v.begin(); - while(it != _F_v.end()) - { - Feature *f = &(it->second); - - // Add new points - if(f->status3D == Feature::NONE) - { - double wx, wy, wz; - - CvPoint2D32f fpu = f->point; - _camera->Undistort(fpu); - - // Tassa asetetaan z = inf ja lasketaan x ja y jotenkin?!? - int object_scale = 1; // TODO Same as the pose?!?!?!? - - // inv(K)*[u v 1]'*scale - wx = object_scale*(fpu.x-_camera->calib_K_data[0][2])/_camera->calib_K_data[0][0]; - wy = object_scale*(fpu.y-_camera->calib_K_data[1][2])/_camera->calib_K_data[1][1]; - wz = object_scale; - - // Now the points are in camera coordinate frame. - alvar::Pose p = _pose; - p.Invert(); - - double Xd[4] = {wx, wy, wz, 1}; - CvMat Xdm = cvMat(4, 1, CV_64F, Xd); - double Pd[16]; - CvMat Pdm = cvMat(4, 4, CV_64F, Pd); - p.GetMatrix(&Pdm); - cvMatMul(&Pdm, &Xdm, &Xdm); - f->point3d.x = Xd[0]/Xd[3]; - f->point3d.y = Xd[1]/Xd[3]; - f->point3d.z = Xd[2]/Xd[3]; - //cout<point3d.z<status3D = Feature::USE_FOR_POSE; - } - - if(image) - { - if(f->status3D == Feature::NONE) - cvCircle(image, cvPoint(int(f->point.x), int(f->point.y)), 3, CV_RGB(255,0,0), 1); - else if(f->status3D == Feature::USE_FOR_POSE) - cvCircle(image, cvPoint(int(f->point.x), int(f->point.y)), 3, CV_RGB(0,255,0), 1); - else if(f->status3D == Feature::IS_INITIAL) - cvCircle(image, cvPoint(int(f->point.x), int(f->point.y)), 3, CV_RGB(0,0,255), 1); - else if(f->status3D == Feature::IS_OUTLIER) - cvCircle(image, cvPoint(int(f->point.x), int(f->point.y)), 2, CV_RGB(255,0,255), 1); - } - - // Delete points that bk error is too big - // OK here just change state... - // NYT TEHAAN TURHAAN ASKEN MUKAAN OTETUILLE.. - if( f->status3D == Feature::USE_FOR_POSE || - f->status3D == Feature::IS_INITIAL ) - { - double p3d[3] = {f->point3d.x, f->point3d.y, f->point3d.z}; - CvMat p3dm = cvMat(1, 1, CV_64FC3, p3d); - double p2d[2]; - CvMat p2dm = cvMat(2, 1, CV_64F, p2d); - cvReshape(&p2dm, &p2dm, 2, 1); - - double gl_mat[16]; - _pose.GetMatrixGL(gl_mat); - _camera->ProjectPoints(&p3dm, gl_mat, &p2dm); - - if(image) - cvLine(image, cvPoint(int(p2d[0]), int(p2d[1])), cvPoint(int(f->point.x),int(f->point.y)), CV_RGB(255,0,255)); - - double dist = (p2d[0]-f->point.x)*(p2d[0]-f->point.x)+(p2d[1]-f->point.y)*(p2d[1]-f->point.y); - if(dist > _outlier_limit) - f->status3D = Feature::IS_OUTLIER; - } - - it++; - } - return true; + if (gray->nChannels != 1) + return false; + if (!_grsc) + _grsc = cvCreateImage(cvSize(_xres, _yres), 8, 1); + if ((_xres != _grsc->width) || (_yres != _grsc->height)) + cvResize(gray, _grsc); + else + _grsc->imageData = gray->imageData; + + map::iterator it; + for (it = _F_v.begin(); it != _F_v.end(); it++) + it->second.status2D = Feature::NOT_TRACKED; + + // Track features in image domain (distorted points) + _ft.Track(_grsc); + + // Go through image features and match to previous (_F_v) + for (int i = 0; i < _ft.feature_count; i++) + { + int id = _ft.ids[i]; + _F_v[id].point = _ft.features[i]; + _F_v[id].point.x *= _image_scale; + _F_v[id].point.y *= _image_scale; + _F_v[id].status2D = Feature::IS_TRACKED; + + // Throw outlier away + if (_F_v[id].status3D == Feature::IS_OUTLIER) + { + _ft.DelFeature(i); + } + } + + // Delete features that are not tracked + // map::iterator + it = _F_v.begin(); + while (it != _F_v.end()) + { + if (it->second.status2D == Feature::NOT_TRACKED) + _F_v.erase(it++); + else + ++it; + } + + // Update pose based on current information + UpdatePose(); + + it = _F_v.begin(); + while (it != _F_v.end()) + { + Feature* f = &(it->second); + + // Add new points + if (f->status3D == Feature::NONE) + { + double wx, wy, wz; + + CvPoint2D32f fpu = f->point; + _camera->Undistort(fpu); + + // Tassa asetetaan z = inf ja lasketaan x ja y jotenkin?!? + int object_scale = 1; // TODO Same as the pose?!?!?!? + + // inv(K)*[u v 1]'*scale + wx = object_scale * (fpu.x - _camera->calib_K_data[0][2]) / + _camera->calib_K_data[0][0]; + wy = object_scale * (fpu.y - _camera->calib_K_data[1][2]) / + _camera->calib_K_data[1][1]; + wz = object_scale; + + // Now the points are in camera coordinate frame. + alvar::Pose p = _pose; + p.Invert(); + + double Xd[4] = { wx, wy, wz, 1 }; + CvMat Xdm = cvMat(4, 1, CV_64F, Xd); + double Pd[16]; + CvMat Pdm = cvMat(4, 4, CV_64F, Pd); + p.GetMatrix(&Pdm); + cvMatMul(&Pdm, &Xdm, &Xdm); + f->point3d.x = Xd[0] / Xd[3]; + f->point3d.y = Xd[1] / Xd[3]; + f->point3d.z = Xd[2] / Xd[3]; + // cout<point3d.z<status3D = Feature::USE_FOR_POSE; + } + + if (image) + { + if (f->status3D == Feature::NONE) + cvCircle(image, cvPoint(int(f->point.x), int(f->point.y)), 3, + CV_RGB(255, 0, 0), 1); + else if (f->status3D == Feature::USE_FOR_POSE) + cvCircle(image, cvPoint(int(f->point.x), int(f->point.y)), 3, + CV_RGB(0, 255, 0), 1); + else if (f->status3D == Feature::IS_INITIAL) + cvCircle(image, cvPoint(int(f->point.x), int(f->point.y)), 3, + CV_RGB(0, 0, 255), 1); + else if (f->status3D == Feature::IS_OUTLIER) + cvCircle(image, cvPoint(int(f->point.x), int(f->point.y)), 2, + CV_RGB(255, 0, 255), 1); + } + + // Delete points that bk error is too big + // OK here just change state... + // NYT TEHAAN TURHAAN ASKEN MUKAAN OTETUILLE.. + if (f->status3D == Feature::USE_FOR_POSE || + f->status3D == Feature::IS_INITIAL) + { + double p3d[3] = { f->point3d.x, f->point3d.y, f->point3d.z }; + CvMat p3dm = cvMat(1, 1, CV_64FC3, p3d); + double p2d[2]; + CvMat p2dm = cvMat(2, 1, CV_64F, p2d); + cvReshape(&p2dm, &p2dm, 2, 1); + + double gl_mat[16]; + _pose.GetMatrixGL(gl_mat); + _camera->ProjectPoints(&p3dm, gl_mat, &p2dm); + + if (image) + cvLine(image, cvPoint(int(p2d[0]), int(p2d[1])), + cvPoint(int(f->point.x), int(f->point.y)), CV_RGB(255, 0, 255)); + + double dist = (p2d[0] - f->point.x) * (p2d[0] - f->point.x) + + (p2d[1] - f->point.y) * (p2d[1] - f->point.y); + if (dist > _outlier_limit) + f->status3D = Feature::IS_OUTLIER; + } + + it++; + } + return true; } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/TrackerPsa.cpp b/ar_track_alvar/src/TrackerPsa.cpp index 831d404..c43ea86 100644 --- a/ar_track_alvar/src/TrackerPsa.cpp +++ b/ar_track_alvar/src/TrackerPsa.cpp @@ -25,213 +25,269 @@ using namespace std; -namespace alvar { +namespace alvar +{ using namespace std; -TrackerPsa::TrackerPsa(int _max_shift) { - max_shift = _max_shift; - x_res = 0; y_res = 0; - hor = 0; horprev = 0; - ver = 0; verprev = 0; - framecount = 0; +TrackerPsa::TrackerPsa(int _max_shift) +{ + max_shift = _max_shift; + x_res = 0; + y_res = 0; + hor = 0; + horprev = 0; + ver = 0; + verprev = 0; + framecount = 0; } -TrackerPsa::~TrackerPsa() { - if (hor) delete [] hor; - if (horprev) delete [] horprev; - if (ver) delete [] ver; - if (verprev) delete [] verprev; +TrackerPsa::~TrackerPsa() +{ + if (hor) + delete[] hor; + if (horprev) + delete[] horprev; + if (ver) + delete[] ver; + if (verprev) + delete[] verprev; } -double TrackerPsa::Track(IplImage *img) { - long best_x_factor=99999999; - long best_y_factor=99999999; - long worst_x_factor=0; - long worst_y_factor=0; - double quality=0; - - //std::cout<<"Moi1"<width) || (y_res != img->height)) { - x_res = img->width; - y_res = img->height; - if (hor) delete [] hor; - if (horprev) delete [] horprev; - if (ver) delete [] ver; - if (verprev) delete [] verprev; - hor = new long [x_res]; - horprev = new long [x_res]; - ver = new long [y_res]; - verprev = new long [y_res]; - framecount=0; - } - framecount++; - - // Make shift tables - memset(hor, 0, sizeof(long)*x_res); - memset(ver, 0, sizeof(long)*y_res); - for (int y=0; y0 ? s=-s : s=-s+1) ) { - long factor=0; - long factorfirst=0; - int count=0; - for (int x=0; x 0) && (x2 worst_x_factor) worst_x_factor=factor; - } - for (int s=0; s0 ? s=-s : s=-s+1)) { - long factor=0; - int count=0; - for (int y=0; y 0) && (y2 worst_y_factor) worst_y_factor=factor; - } - // Figure out the quality? - // We assume the result is poor if the - // worst factor is very near the best factor - int qual_x = (worst_x_factor - best_x_factor)/y_res; - int qual_y = (worst_y_factor - best_y_factor)/x_res; - quality = std::min(qual_x, qual_y); - } - - // Swap memories - long *tmp; - tmp=hor; hor=horprev; horprev=tmp; - tmp=ver; ver=verprev; verprev=tmp; - - // Return confidence (bigger is better) - // Smaller than 10 is poor, and you almost certainly have - // problems with quality values less than 5. - //printf("%d\n", quality); - return quality; +double TrackerPsa::Track(IplImage* img) +{ + long best_x_factor = 99999999; + long best_y_factor = 99999999; + long worst_x_factor = 0; + long worst_y_factor = 0; + double quality = 0; + + // std::cout<<"Moi1"<width) || (y_res != img->height)) + { + x_res = img->width; + y_res = img->height; + if (hor) + delete[] hor; + if (horprev) + delete[] horprev; + if (ver) + delete[] ver; + if (verprev) + delete[] verprev; + hor = new long[x_res]; + horprev = new long[x_res]; + ver = new long[y_res]; + verprev = new long[y_res]; + framecount = 0; + } + framecount++; + + // Make shift tables + memset(hor, 0, sizeof(long) * x_res); + memset(ver, 0, sizeof(long) * y_res); + for (int y = 0; y < y_res; y++) + { + for (int x = 0; x < x_res; x++) + { + unsigned char c = (unsigned char)cvGet2D(img, y, x).val[0]; + hor[x] += c; + ver[y] += c; + } + } + + // If this is first frame -- no motion + if (framecount == 1) + { + xd = 0; + yd = 0; + } + + // Otherwais detect for motion + else + { + // Sequence for s: 0, 1, -1, 2, -2, ... -(max_shift-1), (max_shift-1) + for (int s = 0; s < max_shift; (s > 0 ? s = -s : s = -s + 1)) + { + long factor = 0; + long factorfirst = 0; + int count = 0; + for (int x = 0; x < x_res; x++) + { + int x2 = x + s; + if ((x2 > 0) && (x2 < x_res)) + { + factor += labs(hor[x2] - horprev[x]); + count++; + } + } + factor /= count; + if (factor < best_x_factor) + { + best_x_factor = factor; + xd = s; + } + if (factor > worst_x_factor) + worst_x_factor = factor; + } + for (int s = 0; s < max_shift; (s > 0 ? s = -s : s = -s + 1)) + { + long factor = 0; + int count = 0; + for (int y = 0; y < y_res; y++) + { + int y2 = y + s; + if ((y2 > 0) && (y2 < y_res)) + { + factor += labs(ver[y2] - verprev[y]); + count++; + } + } + factor /= count; + if (factor < best_y_factor) + { + best_y_factor = factor; + yd = s; + } + if (factor > worst_y_factor) + worst_y_factor = factor; + } + // Figure out the quality? + // We assume the result is poor if the + // worst factor is very near the best factor + int qual_x = (worst_x_factor - best_x_factor) / y_res; + int qual_y = (worst_y_factor - best_y_factor) / x_res; + quality = std::min(qual_x, qual_y); + } + + // Swap memories + long* tmp; + tmp = hor; + hor = horprev; + horprev = tmp; + tmp = ver; + ver = verprev; + verprev = tmp; + + // Return confidence (bigger is better) + // Smaller than 10 is poor, and you almost certainly have + // problems with quality values less than 5. + // printf("%d\n", quality); + return quality; } -void TrackerPsa::Compensate(double *x, double *y) { - *x += xd; *y += yd; +void TrackerPsa::Compensate(double* x, double* y) +{ + *x += xd; + *y += yd; } -TrackerPsaRot::TrackerPsaRot(int _max_shift) : TrackerPsa(_max_shift) { - rotd = 0; - rot=new double [360]; - rotprev=new double[360]; - rot_count=new int[360]; +TrackerPsaRot::TrackerPsaRot(int _max_shift) : TrackerPsa(_max_shift) +{ + rotd = 0; + rot = new double[360]; + rotprev = new double[360]; + rot_count = new int[360]; } -TrackerPsaRot::~TrackerPsaRot() { - if (rot) delete [] rot; - if (rotprev) delete [] rotprev; - if (rot_count) delete [] rot_count; +TrackerPsaRot::~TrackerPsaRot() +{ + if (rot) + delete[] rot; + if (rotprev) + delete[] rotprev; + if (rot_count) + delete[] rot_count; } -double TrackerPsaRot::Track(IplImage *img) { - long best_rot_factor=99999999; - double conf1 = TrackerPsa::Track(img); - - if (framecount == 1) { - rotd=0; - } - else { - memset(rot, 0, sizeof(double)*360); - memset(rot_count, 0, sizeof(int)*360); - for (int y=0; y= 0) && (y < img->height) && - (x >= 0) && (x < img->width)) - { - rot[theta] += (unsigned char)cvGet2D(img, y, x).val[0]; - rot_count[theta] ++; - } - } - } - for (int i=0; i<360; i++) { - rot[i] /= rot_count[i]; - } - for (int s=0; s<45; (s>0 ? s=-s : s=-s+1)) { - long factor=0; - for (int theta=0; theta<360; theta++) { - int theta2 = theta+s; - if (theta2 < 0) theta2+=360; - if (theta2 >= 360) theta2-=360; - factor += (long)labs(long(rot[theta2] - rotprev[theta])); - } - if (factor < best_rot_factor) { - best_rot_factor=factor; - rotd=s; - } - } - } - // Remember rotation based on center - memset(rotprev, 0, sizeof(double)*360); - memset(rot_count, 0, sizeof(int)*360); - for (int y=0; y= 0) && (y < img->height) && - (x >= 0) && (x < img->width)) - { - rotprev[theta] += (unsigned char)cvGet2D(img, y, x).val[0]; - rot_count[theta] ++; - } - } - } - for (int i=0; i<360; i++) { - rotprev[i] /= rot_count[i]; - } - return conf1 + best_rot_factor; +double TrackerPsaRot::Track(IplImage* img) +{ + long best_rot_factor = 99999999; + double conf1 = TrackerPsa::Track(img); + + if (framecount == 1) + { + rotd = 0; + } + else + { + memset(rot, 0, sizeof(double) * 360); + memset(rot_count, 0, sizeof(int) * 360); + for (int y = 0; y < y_res; y++) + { + for (int x = 0; x < x_res; x++) + { + double y2 = y - (y_res / 2) - (yd); + double x2 = x - (x_res / 2) - (xd); + double r = sqrt((double)y2 * y2 + x2 * x2); + int theta = int(atan2((double)y2, (double)x2) * 180 / 3.14159265); + if (theta < 0) + theta += 360; + if ((y >= 0) && (y < img->height) && (x >= 0) && (x < img->width)) + { + rot[theta] += (unsigned char)cvGet2D(img, y, x).val[0]; + rot_count[theta]++; + } + } + } + for (int i = 0; i < 360; i++) + { + rot[i] /= rot_count[i]; + } + for (int s = 0; s < 45; (s > 0 ? s = -s : s = -s + 1)) + { + long factor = 0; + for (int theta = 0; theta < 360; theta++) + { + int theta2 = theta + s; + if (theta2 < 0) + theta2 += 360; + if (theta2 >= 360) + theta2 -= 360; + factor += (long)labs(long(rot[theta2] - rotprev[theta])); + } + if (factor < best_rot_factor) + { + best_rot_factor = factor; + rotd = s; + } + } + } + // Remember rotation based on center + memset(rotprev, 0, sizeof(double) * 360); + memset(rot_count, 0, sizeof(int) * 360); + for (int y = 0; y < y_res; y++) + { + for (int x = 0; x < x_res; x++) + { + double y2 = y - (y_res / 2); + double x2 = x - (x_res / 2); + double r = sqrt((double)y2 * y2 + x2 * x2); + int theta = int(atan2((double)y2, (double)x2) * 180 / 3.14159265); + if (theta < 0) + theta += 360; + if ((y >= 0) && (y < img->height) && (x >= 0) && (x < img->width)) + { + rotprev[theta] += (unsigned char)cvGet2D(img, y, x).val[0]; + rot_count[theta]++; + } + } + } + for (int i = 0; i < 360; i++) + { + rotprev[i] /= rot_count[i]; + } + return conf1 + best_rot_factor; } -void TrackerPsaRot::Compensate(double *x, double *y) +void TrackerPsaRot::Compensate(double* x, double* y) { - double xx = *x - (x_res/2); - double yy = *y - (y_res/2); - double kosini = cos(rotd*3.1415926535/180); - double sini = sin(rotd*3.1415926535/180); - *x = ((kosini * xx) - (sini * yy)) + xd + (x_res/2); - *y = ((sini * xx) + (kosini * yy)) + yd + (y_res/2); + double xx = *x - (x_res / 2); + double yy = *y - (y_res / 2); + double kosini = cos(rotd * 3.1415926535 / 180); + double sini = sin(rotd * 3.1415926535 / 180); + *x = ((kosini * xx) - (sini * yy)) + xd + (x_res / 2); + *y = ((sini * xx) + (kosini * yy)) + yd + (y_res / 2); } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/TrackerStat.cpp b/ar_track_alvar/src/TrackerStat.cpp index b5e3245..8525c0d 100644 --- a/ar_track_alvar/src/TrackerStat.cpp +++ b/ar_track_alvar/src/TrackerStat.cpp @@ -25,90 +25,114 @@ using namespace std; -namespace alvar { +namespace alvar +{ using namespace std; -TrackerStat::TrackerStat(int binsize) : f(100,90) { - hist.AddDimension(binsize); // x - hist.AddDimension(binsize); // y +TrackerStat::TrackerStat(int binsize) : f(100, 90) +{ + hist.AddDimension(binsize); // x + hist.AddDimension(binsize); // y } -void TrackerStat::Reset() { - f.Reset(); +void TrackerStat::Reset() +{ + f.Reset(); } -double TrackerStat::Track(IplImage *img) +double TrackerStat::Track(IplImage* img) { - if (img == NULL) return -1; - f.Track(img); - hist.Clear(); - for (int p=0; pwidth; - y_res = img->height; - hist_rot.Clear(); - for (int p=0; pwidth; + y_res = img->height; + hist_rot.Clear(); + for (int p = 0; p < f.prev_feature_count; p++) + { + for (int c = 0; c < f.feature_count; c++) + { + if (f.prev_ids[p] != f.ids[c]) + continue; + double x_pred = f.prev_features[p].x + xd; + double y_pred = f.prev_features[p].y + yd; + double x_curr = f.features[c].x; + double y_curr = f.features[c].y; + double x = x_curr - x_pred; + double y = y_curr - y_pred; + double theta_pred = + atan2((double)y_pred - (y_res / 2), (double)x_pred - (x_res / 2)) * + 180.0 / 3.1415926535; + double theta_curr = + atan2((double)y_curr - (y_res / 2), (double)x_curr - (x_res / 2)) * + 180.0 / 3.1415926535; + hist_rot.Inc(theta_curr - theta_pred); + } + } + rotd = 0; + hist_rot.GetMax(&rotd); + return ret; } -void TrackerStatRot::Compensate(double *x, double *y) +void TrackerStatRot::Compensate(double* x, double* y) { - double xx = *x - (x_res/2); - double yy = *y - (y_res/2); - double kosini = cos(rotd*3.1415926535/180); - double sini = sin(rotd*3.1415926535/180); - *x = ((kosini * xx) - (sini * yy)) + xd + (x_res/2); - *y = ((sini * xx) + (kosini * yy)) + yd + (y_res/2); + double xx = *x - (x_res / 2); + double yy = *y - (y_res / 2); + double kosini = cos(rotd * 3.1415926535 / 180); + double sini = sin(rotd * 3.1415926535 / 180); + *x = ((kosini * xx) - (sini * yy)) + xd + (x_res / 2); + *y = ((sini * xx) + (kosini * yy)) + yd + (y_res / 2); } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/TrifocalTensor.cpp b/ar_track_alvar/src/TrifocalTensor.cpp index 0deb495..0ffc995 100644 --- a/ar_track_alvar/src/TrifocalTensor.cpp +++ b/ar_track_alvar/src/TrifocalTensor.cpp @@ -23,181 +23,148 @@ #include "TrifocalTensor.h" -namespace alvar { - -TrifocalTensor::TrifocalTensor() { +namespace alvar +{ +TrifocalTensor::TrifocalTensor() +{ } -TrifocalTensor::TrifocalTensor(const Pose &p0, const Pose &p1) { +TrifocalTensor::TrifocalTensor(const Pose& p0, const Pose& p1) +{ computeTensor(p0, p1); } -TrifocalTensor::TrifocalTensor(const Pose &p0, const Pose &p1, const Pose &p2) { +TrifocalTensor::TrifocalTensor(const Pose& p0, const Pose& p1, const Pose& p2) +{ computeTensor(p0, p1, p2); } -TrifocalTensor::~TrifocalTensor() { +TrifocalTensor::~TrifocalTensor() +{ } -void TrifocalTensor::computeTensor(const Pose &p0, const Pose &p1) { +void TrifocalTensor::computeTensor(const Pose& p0, const Pose& p1) +{ double data_p12[4][4], data_p13[4][4]; - CvMat p12 = cvMat( 4, 4, CV_64F, data_p12 ); - CvMat p13 = cvMat( 4, 4, CV_64F, data_p13 ); + CvMat p12 = cvMat(4, 4, CV_64F, data_p12); + CvMat p13 = cvMat(4, 4, CV_64F, data_p13); p0.GetMatrix(&p12); p1.GetMatrix(&p13); for (int i = 0; i < 3; i++) for (int j = 0; j < 3; j++) - for (int k = 0; k < 3; k++) { - - T[i][j][k] = - (data_p12[j][i] * data_p13[k][3]) - - - (data_p12[j][3] * data_p13[k][i]); - } + for (int k = 0; k < 3; k++) + { + T[i][j][k] = (data_p12[j][i] * data_p13[k][3]) - + (data_p12[j][3] * data_p13[k][i]); + } } -double *getRow(double* m, int row) { - return &m[4*row]; +double* getRow(double* m, int row) +{ + return &m[4 * row]; } -double det(double *r0, double *r1, double *r2, double *r3) { +double det(double* r0, double* r1, double* r2, double* r3) +{ double m[16]; - memcpy(&m[0], r0, 4*sizeof(double)); - memcpy(&m[4], r1, 4*sizeof(double)); - memcpy(&m[8], r2, 4*sizeof(double)); - memcpy(&m[12], r3, 4*sizeof(double)); + memcpy(&m[0], r0, 4 * sizeof(double)); + memcpy(&m[4], r1, 4 * sizeof(double)); + memcpy(&m[8], r2, 4 * sizeof(double)); + memcpy(&m[12], r3, 4 * sizeof(double)); CvMat M = cvMat(4, 4, CV_64F, m); return cvDet(&M); } -void TrifocalTensor::computeTensor(const Pose &p0, const Pose &p1, const Pose &p2) { +void TrifocalTensor::computeTensor(const Pose& p0, const Pose& p1, + const Pose& p2) +{ double data_p0[16], data_p1[16], data_p2[16]; - CvMat P0 = cvMat( 4, 4, CV_64F, data_p0 ); - CvMat P1 = cvMat( 4, 4, CV_64F, data_p1 ); - CvMat P2 = cvMat( 4, 4, CV_64F, data_p2 ); + CvMat P0 = cvMat(4, 4, CV_64F, data_p0); + CvMat P1 = cvMat(4, 4, CV_64F, data_p1); + CvMat P2 = cvMat(4, 4, CV_64F, data_p2); p0.GetMatrix(&P0); p1.GetMatrix(&P1); p2.GetMatrix(&P2); for (int i = 0; i < 3; i++) for (int j = 0; j < 3; j++) - for (int k = 0; k < 3; k++) { - double sign = i==1 ? -1 : 1; - T[i][j][k] = sign * det(getRow(data_p0, (i+1)%3), - getRow(data_p0, (i+2)%3), - getRow(data_p1, j), - getRow(data_p2, k)); - } + for (int k = 0; k < 3; k++) + { + double sign = i == 1 ? -1 : 1; + T[i][j][k] = sign * det(getRow(data_p0, (i + 1) % 3), + getRow(data_p0, (i + 2) % 3), + getRow(data_p1, j), getRow(data_p2, k)); + } } -double TrifocalTensor::projectAxis(const CvPoint2D64f &p0, const CvPoint2D64f &p1, int l) { - double v00 = - p1.x * ( p0.x * T[0][0][l] - + p0.y * T[1][0][l] - + T[2][0][l]) - - - p1.x * ( p0.x * T[0][0][l] - + p0.y * T[1][0][l] - + T[2][0][l]); - double v01 = - p1.x * ( p0.x * T[0][1][l] - + p0.y * T[1][1][l] - + T[2][1][l]) - - - p1.y * ( p0.x * T[0][0][l] - + p0.y * T[1][0][l] - + T[2][0][l]); - double v02 = - p1.x * ( p0.x * T[0][2][l] - + p0.y * T[1][2][l] - + T[2][2][l]) - - ( p0.x * T[0][0][l] - + p0.y * T[1][0][l] - + T[2][0][l]); - - double v10 = - p1.y * ( p0.x * T[0][0][l] - + p0.y * T[1][0][l] - + T[2][0][l]) - - - p1.x * ( p0.x * T[0][1][l] - + p0.y * T[1][1][l] - + T[2][1][l]); - double v11 = - p1.y * ( p0.x * T[0][1][l] - + p0.y * T[1][1][l] - + T[2][1][l]) - - - p1.y * ( p0.x * T[0][1][l] - + p0.y * T[1][1][l] - + T[2][1][l]); - double v12 = - p1.y * ( p0.x * T[0][2][l] - + p0.y * T[1][2][l] - + T[2][2][l]) - - ( p0.x * T[0][1][l] - + p0.y * T[1][1][l] - + T[2][1][l]); - - double v20 = - ( p0.x * T[0][0][l] - + p0.y * T[1][0][l] - + T[2][0][l]) - - - p1.x * ( p0.x * T[0][2][l] - + p0.y * T[1][2][l] - + T[2][2][l]); - double v21 = - ( p0.x * T[0][1][l] - + p0.y * T[1][1][l] - + T[2][1][l]) - - - p1.y * ( p0.x * T[0][2][l] - + p0.y * T[1][2][l] - + T[2][2][l]); - - double v22 = - ( p0.x * T[0][2][l] - + p0.y * T[1][2][l] - + T[2][2][l]) - - - ( p0.x * T[0][2][l] - + p0.y * T[1][2][l] - + T[2][2][l]); - +double TrifocalTensor::projectAxis(const CvPoint2D64f& p0, + const CvPoint2D64f& p1, int l) +{ + double v00 = p1.x * (p0.x * T[0][0][l] + p0.y * T[1][0][l] + T[2][0][l]) - + p1.x * (p0.x * T[0][0][l] + p0.y * T[1][0][l] + T[2][0][l]); + double v01 = p1.x * (p0.x * T[0][1][l] + p0.y * T[1][1][l] + T[2][1][l]) - + p1.y * (p0.x * T[0][0][l] + p0.y * T[1][0][l] + T[2][0][l]); + double v02 = p1.x * (p0.x * T[0][2][l] + p0.y * T[1][2][l] + T[2][2][l]) - + (p0.x * T[0][0][l] + p0.y * T[1][0][l] + T[2][0][l]); + + double v10 = p1.y * (p0.x * T[0][0][l] + p0.y * T[1][0][l] + T[2][0][l]) - + p1.x * (p0.x * T[0][1][l] + p0.y * T[1][1][l] + T[2][1][l]); + double v11 = p1.y * (p0.x * T[0][1][l] + p0.y * T[1][1][l] + T[2][1][l]) - + p1.y * (p0.x * T[0][1][l] + p0.y * T[1][1][l] + T[2][1][l]); + double v12 = p1.y * (p0.x * T[0][2][l] + p0.y * T[1][2][l] + T[2][2][l]) - + (p0.x * T[0][1][l] + p0.y * T[1][1][l] + T[2][1][l]); + + double v20 = (p0.x * T[0][0][l] + p0.y * T[1][0][l] + T[2][0][l]) - + p1.x * (p0.x * T[0][2][l] + p0.y * T[1][2][l] + T[2][2][l]); + double v21 = (p0.x * T[0][1][l] + p0.y * T[1][1][l] + T[2][1][l]) - + p1.y * (p0.x * T[0][2][l] + p0.y * T[1][2][l] + T[2][2][l]); + + double v22 = (p0.x * T[0][2][l] + p0.y * T[1][2][l] + T[2][2][l]) - + (p0.x * T[0][2][l] + p0.y * T[1][2][l] + T[2][2][l]); + double v = 0; - if (fabs(v00) > fabs(v)) v = v00; - if (fabs(v01) > fabs(v)) v = v01; - if (fabs(v02) > fabs(v)) v = v02; - if (fabs(v10) > fabs(v)) v = v10; - if (fabs(v11) > fabs(v)) v = v11; - if (fabs(v12) > fabs(v)) v = v12; - if (fabs(v20) > fabs(v)) v = v20; - if (fabs(v21) > fabs(v)) v = v21; - if (fabs(v22) > fabs(v)) v = v22; + if (fabs(v00) > fabs(v)) + v = v00; + if (fabs(v01) > fabs(v)) + v = v01; + if (fabs(v02) > fabs(v)) + v = v02; + if (fabs(v10) > fabs(v)) + v = v10; + if (fabs(v11) > fabs(v)) + v = v11; + if (fabs(v12) > fabs(v)) + v = v12; + if (fabs(v20) > fabs(v)) + v = v20; + if (fabs(v21) > fabs(v)) + v = v21; + if (fabs(v22) > fabs(v)) + v = v22; return v; } -void TrifocalTensor::project(const CvPoint2D64f &p0, - const CvPoint2D64f &p1, - CvPoint2D64f &p2) { +void TrifocalTensor::project(const CvPoint2D64f& p0, const CvPoint2D64f& p1, + CvPoint2D64f& p2) +{ double z = projectAxis(p0, p1, 2); p2.x = projectAxis(p0, p1, 0) / z; p2.y = projectAxis(p0, p1, 1) / z; } -double TrifocalTensor::projectError(const CvPoint2D64f &p0, - const CvPoint2D64f &p1, - const CvPoint2D64f &p2) { +double TrifocalTensor::projectError(const CvPoint2D64f& p0, + const CvPoint2D64f& p1, + const CvPoint2D64f& p2) +{ double v0 = projectAxis(p0, p1, 0); double v1 = projectAxis(p0, p1, 1); double v2 = projectAxis(p0, p1, 2); - double e0 = v0/v2 - p2.x; - double e1 = v1/v2 - p2.y; - return e0*e0+e1*e1; + double e0 = v0 / v2 - p2.x; + double e1 = v1 / v2 - p2.y; + return e0 * e0 + e1 * e1; } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/UnscentedKalman.cpp b/ar_track_alvar/src/UnscentedKalman.cpp index 00f6e59..45b9d69 100644 --- a/ar_track_alvar/src/UnscentedKalman.cpp +++ b/ar_track_alvar/src/UnscentedKalman.cpp @@ -25,50 +25,71 @@ #include #include -namespace alvar { - -UnscentedKalman::UnscentedKalman(int state_n, int obs_n, int state_k, double alpha, double beta) { +namespace alvar +{ +UnscentedKalman::UnscentedKalman(int state_n, int obs_n, int state_k, + double alpha, double beta) +{ state_k = 0; - //TODO: support a separate noise vector/covariance matrix: state_k; + // TODO: support a separate noise vector/covariance matrix: state_k; this->state_k = state_k; this->state_n = state_n; this->obs_n = obs_n; sigma_n = 2 * state_n + 1; double L = state_n + state_k; - lambda = alpha*alpha * L - L; - lambda2 = 1 - alpha*alpha + beta; - - state = cvCreateMat(state_n, 1, CV_64F); cvSetZero(state); - stateCovariance = cvCreateMat(state_n, state_n, CV_64F); cvSetZero(stateCovariance); - sqrtStateCovariance = cvCreateMat(state_n, state_n, CV_64F); cvSetZero(sqrtStateCovariance); - stateD = cvCreateMat(state_n, state_n, CV_64F); cvSetZero(stateD); - stateU = cvCreateMat(state_n, state_n, CV_64F); cvSetZero(stateU); - stateV = cvCreateMat(state_n, state_n, CV_64F); cvSetZero(stateV); - stateTmp = cvCreateMat(state_n, state_n, CV_64F); cvSetZero(stateTmp); - stateDiff = cvCreateMat(state_n, 1, CV_64F); cvSetZero(stateDiff); - - predObs = cvCreateMat(obs_n, 1, CV_64F); cvSetZero(predObs); - predObsCovariance = cvCreateMat(obs_n, obs_n, CV_64F); cvSetZero(predObsCovariance); - predObsDiff = cvCreateMat(obs_n, 1, CV_64F); cvSetZero(predObsDiff); - - invPredObsCovariance = cvCreateMat(obs_n, obs_n, CV_64F); cvSetZero(invPredObsCovariance); - statePredObsCrossCorrelation = cvCreateMat(state_n, obs_n, CV_64F); cvSetZero(statePredObsCrossCorrelation); - kalmanGain = cvCreateMat(state_n, obs_n, CV_64F); cvSetZero(kalmanGain); - kalmanTmp = cvCreateMat(state_n, obs_n, CV_64F); cvSetZero(kalmanTmp); + lambda = alpha * alpha * L - L; + lambda2 = 1 - alpha * alpha + beta; + + state = cvCreateMat(state_n, 1, CV_64F); + cvSetZero(state); + stateCovariance = cvCreateMat(state_n, state_n, CV_64F); + cvSetZero(stateCovariance); + sqrtStateCovariance = cvCreateMat(state_n, state_n, CV_64F); + cvSetZero(sqrtStateCovariance); + stateD = cvCreateMat(state_n, state_n, CV_64F); + cvSetZero(stateD); + stateU = cvCreateMat(state_n, state_n, CV_64F); + cvSetZero(stateU); + stateV = cvCreateMat(state_n, state_n, CV_64F); + cvSetZero(stateV); + stateTmp = cvCreateMat(state_n, state_n, CV_64F); + cvSetZero(stateTmp); + stateDiff = cvCreateMat(state_n, 1, CV_64F); + cvSetZero(stateDiff); + + predObs = cvCreateMat(obs_n, 1, CV_64F); + cvSetZero(predObs); + predObsCovariance = cvCreateMat(obs_n, obs_n, CV_64F); + cvSetZero(predObsCovariance); + predObsDiff = cvCreateMat(obs_n, 1, CV_64F); + cvSetZero(predObsDiff); + + invPredObsCovariance = cvCreateMat(obs_n, obs_n, CV_64F); + cvSetZero(invPredObsCovariance); + statePredObsCrossCorrelation = cvCreateMat(state_n, obs_n, CV_64F); + cvSetZero(statePredObsCrossCorrelation); + kalmanGain = cvCreateMat(state_n, obs_n, CV_64F); + cvSetZero(kalmanGain); + kalmanTmp = cvCreateMat(state_n, obs_n, CV_64F); + cvSetZero(kalmanTmp); sigma_state = new CvMat*[sigma_n]; sigma_predObs = new CvMat*[sigma_n]; - for (int i = 0; i < sigma_n; i++) { - sigma_state[i] = cvCreateMat(state_n, 1, CV_64F); cvSetZero(sigma_state[i]); - sigma_predObs[i] = cvCreateMat(obs_n, 1, CV_64F); cvSetZero(sigma_predObs[i]); + for (int i = 0; i < sigma_n; i++) + { + sigma_state[i] = cvCreateMat(state_n, 1, CV_64F); + cvSetZero(sigma_state[i]); + sigma_predObs[i] = cvCreateMat(obs_n, 1, CV_64F); + cvSetZero(sigma_predObs[i]); } sigmasUpdated = false; } -UnscentedKalman::~UnscentedKalman() { +UnscentedKalman::~UnscentedKalman() +{ cvReleaseMat(&state); cvReleaseMat(&stateCovariance); cvReleaseMat(&sqrtStateCovariance); @@ -86,7 +107,8 @@ UnscentedKalman::~UnscentedKalman() { cvReleaseMat(&predObsCovariance); cvReleaseMat(&predObsDiff); - for (int i = 0; i < sigma_n; i++) { + for (int i = 0; i < sigma_n; i++) + { cvReleaseMat(&sigma_state[i]); cvReleaseMat(&sigma_predObs[i]); } @@ -94,58 +116,57 @@ UnscentedKalman::~UnscentedKalman() { delete[] sigma_predObs; } -void UnscentedKalman::initialize() { +void UnscentedKalman::initialize() +{ // Computes new sigma points from current state estimate. // 1. Compute square root of state co-variance: //[E D] = eig(A); sqrtm(A) = E * sqrt(D) * E' where D is a diagonal matrix. - //sqrt(D) is formed by taking the square root of the diagonal entries in D. - #ifdef MYDEBUG - printf("stateCovariance:\n"); - for (int i = 0; i < 5; i++) - printf("%+.10f %+.10f %+.10f %+.10f %+.10f\n", - cvGetReal2D(stateCovariance, 0, i), - cvGetReal2D(stateCovariance, 1, i), - cvGetReal2D(stateCovariance, 2, i), - cvGetReal2D(stateCovariance, 3, i), - cvGetReal2D(stateCovariance, 4, i)); + // sqrt(D) is formed by taking the square root of the diagonal entries in D. +#ifdef MYDEBUG + printf("stateCovariance:\n"); + for (int i = 0; i < 5; i++) + printf( + "%+.10f %+.10f %+.10f %+.10f %+.10f\n", + cvGetReal2D(stateCovariance, 0, i), cvGetReal2D(stateCovariance, 1, i), + cvGetReal2D(stateCovariance, 2, i), cvGetReal2D(stateCovariance, 3, i), + cvGetReal2D(stateCovariance, 4, i)); #endif - //Another equivilant way is to use: + // Another equivilant way is to use: // [U S V] = svd(A); sqrtm(A) = U * sqrt(S) * V' - cvSVD(stateCovariance, stateD, stateU, stateV); //, CV_SVD_V_T + cvSVD(stateCovariance, stateD, stateU, stateV); //, CV_SVD_V_T double L = state_n + state_k; double scale = L + lambda; - for (int i = 0; i < state_n; i++) { + for (int i = 0; i < state_n; i++) + { double d = cvGetReal2D(stateD, i, i); - cvSetReal2D(stateD, i, i, sqrt(scale*d)); + cvSetReal2D(stateD, i, i, sqrt(scale * d)); } cvGEMM(stateD, stateV, 1., NULL, 0, stateTmp, CV_GEMM_B_T); cvGEMM(stateU, stateTmp, 1., NULL, 0, sqrtStateCovariance); #ifdef MYDEBUG - printf("sqrtStateCovariance:\n"); - for (int i = 0; i < 5; i++) - printf("%+.10f %+.10f %+.10f %+.10f %+.10f\n", - cvGetReal2D(sqrtStateCovariance, 0, i), - cvGetReal2D(sqrtStateCovariance, 1, i), - cvGetReal2D(sqrtStateCovariance, 2, i), - cvGetReal2D(sqrtStateCovariance, 3, i), - cvGetReal2D(sqrtStateCovariance, 4, i)); - cvGEMM(sqrtStateCovariance, sqrtStateCovariance, 1., NULL, 0, stateTmp); - printf("sqrtStateCovariance^2:\n"); - for (int i = 0; i < 5; i++) - printf("%+.10f %+.10f %+.10f %+.10f %+.10f\n", - cvGetReal2D(stateTmp, 0, i), - cvGetReal2D(stateTmp, 1, i), - cvGetReal2D(stateTmp, 2, i), - cvGetReal2D(stateTmp, 3, i), - cvGetReal2D(stateTmp, 4, i)); + printf("sqrtStateCovariance:\n"); + for (int i = 0; i < 5; i++) + printf("%+.10f %+.10f %+.10f %+.10f %+.10f\n", + cvGetReal2D(sqrtStateCovariance, 0, i), + cvGetReal2D(sqrtStateCovariance, 1, i), + cvGetReal2D(sqrtStateCovariance, 2, i), + cvGetReal2D(sqrtStateCovariance, 3, i), + cvGetReal2D(sqrtStateCovariance, 4, i)); + cvGEMM(sqrtStateCovariance, sqrtStateCovariance, 1., NULL, 0, stateTmp); + printf("sqrtStateCovariance^2:\n"); + for (int i = 0; i < 5; i++) + printf("%+.10f %+.10f %+.10f %+.10f %+.10f\n", cvGetReal2D(stateTmp, 0, i), + cvGetReal2D(stateTmp, 1, i), cvGetReal2D(stateTmp, 2, i), + cvGetReal2D(stateTmp, 3, i), cvGetReal2D(stateTmp, 4, i)); #endif // 2. Form new sigma points. int sigma_i = 0; cvCopy(state, sigma_state[sigma_i++]); - for (int i = 0; i < state_n; i++) { + for (int i = 0; i < state_n; i++) + { CvMat col; cvGetCol(sqrtStateCovariance, &col, i); cvAdd(state, &col, sigma_state[sigma_i++]); @@ -155,59 +176,63 @@ void UnscentedKalman::initialize() { sigmasUpdated = true; } -void UnscentedKalman::predict(UnscentedProcess *process_model) { - if (!sigmasUpdated) initialize(); +void UnscentedKalman::predict(UnscentedProcess* process_model) +{ + if (!sigmasUpdated) + initialize(); // Map sigma points through the process model and compute new state mean. cvSetZero(state); double L = state_n + state_k; double totalWeight = 0; - for (int i = 0; i < sigma_n; i++) { - double weight = i == 0 - ? lambda / (L + lambda) - : .5 / (L + lambda); - totalWeight += weight; + for (int i = 0; i < sigma_n; i++) + { + double weight = i == 0 ? lambda / (L + lambda) : .5 / (L + lambda); + totalWeight += weight; } - for (int i = 0; i < sigma_n; i++) { - CvMat *sigma = sigma_state[i]; + for (int i = 0; i < sigma_n; i++) + { + CvMat* sigma = sigma_state[i]; process_model->f(sigma); - double weight = i == 0 - ? lambda / (L + lambda) - : .5 / (L + lambda); - double scale = weight / totalWeight; + double weight = i == 0 ? lambda / (L + lambda) : .5 / (L + lambda); + double scale = weight / totalWeight; cvAddWeighted(sigma, scale, state, 1., 0., state); } // Compute new state co-variance. cvSetZero(stateCovariance); totalWeight = 0; - for (int i = 0; i < sigma_n; i++) { - double weight = i == 0 - ? lambda / (L + lambda) + lambda2 - : .5 / (L + lambda); - totalWeight += weight; + for (int i = 0; i < sigma_n; i++) + { + double weight = + i == 0 ? lambda / (L + lambda) + lambda2 : .5 / (L + lambda); + totalWeight += weight; } - for (int i = 0; i < sigma_n; i++) { - double weight = i == 0 - ? lambda / (L + lambda) + lambda2 - : .5 / (L + lambda); - double scale = weight / totalWeight; + for (int i = 0; i < sigma_n; i++) + { + double weight = + i == 0 ? lambda / (L + lambda) + lambda2 : .5 / (L + lambda); + double scale = weight / totalWeight; cvSub(sigma_state[i], state, stateDiff); cvGEMM(stateDiff, stateDiff, scale, stateCovariance, 1., stateCovariance, - CV_GEMM_B_T); + CV_GEMM_B_T); } // Add any additive noise. - CvMat *noise = process_model->getProcessNoise(); - if (noise) cvAdd(stateCovariance, noise, stateCovariance); + CvMat* noise = process_model->getProcessNoise(); + if (noise) + cvAdd(stateCovariance, noise, stateCovariance); #ifdef MYDEBUG printf("predicted state: "); - for (int i = 0; i < state_n; i++) printf("%f ", cvGetReal1D(state, i)); + for (int i = 0; i < state_n; i++) + printf("%f ", cvGetReal1D(state, i)); printf("\n"); printf("predicted stateCovariance:\n"); - for (int i = 0; i < state_n; i++) { - for (int j = 0; j < state_n; j++) printf("%+f ", cvGetReal2D(stateCovariance, i, j)); + for (int i = 0; i < state_n; i++) + { + for (int j = 0; j < state_n; j++) + printf("%+f ", cvGetReal2D(stateCovariance, i, j)); printf("\n"); } #endif @@ -215,11 +240,14 @@ void UnscentedKalman::predict(UnscentedProcess *process_model) { sigmasUpdated = false; } -void UnscentedKalman::update(UnscentedObservation *obs) { - if (!sigmasUpdated) initialize(); - CvMat *innovation = obs->getObservation(); +void UnscentedKalman::update(UnscentedObservation* obs) +{ + if (!sigmasUpdated) + initialize(); + CvMat* innovation = obs->getObservation(); int obs_n = innovation->rows; - if (obs_n > this->obs_n) { + if (obs_n > this->obs_n) + { printf("Observation exceeds maximum size!\n"); abort(); } @@ -227,55 +255,62 @@ void UnscentedKalman::update(UnscentedObservation *obs) { // Map sigma points through the observation model and compute predicted mean. CvMat predObs = cvMat(obs_n, 1, CV_64F, this->predObs->data.db); cvSetZero(&predObs); - for (int i = 0; i < sigma_n; i++) { + for (int i = 0; i < sigma_n; i++) + { CvMat sigma_h = cvMat(obs_n, 1, CV_64F, sigma_predObs[i]->data.db); - double scale = i == 0 - ? (double)state_k / (double)(state_n + state_k) - : .5 / (double)(state_n + state_k); + double scale = i == 0 ? (double)state_k / (double)(state_n + state_k) : + .5 / (double)(state_n + state_k); obs->h(&sigma_h, sigma_state[i]); cvAddWeighted(&sigma_h, scale, &predObs, 1., 0., &predObs); } // Compute predicted observation co-variance. - CvMat predObsCovariance = cvMat(obs_n, obs_n, CV_64F, - this->predObsCovariance->data.db); - CvMat statePredObsCrossCorrelation = cvMat(state_n, obs_n, CV_64F, - this->statePredObsCrossCorrelation->data.db); + CvMat predObsCovariance = + cvMat(obs_n, obs_n, CV_64F, this->predObsCovariance->data.db); + CvMat statePredObsCrossCorrelation = cvMat( + state_n, obs_n, CV_64F, this->statePredObsCrossCorrelation->data.db); CvMat predObsDiff = cvMat(obs_n, 1, CV_64F, this->predObsDiff->data.db); cvSetZero(&predObsCovariance); cvSetZero(&statePredObsCrossCorrelation); - for (int i = 0; i < sigma_n; i++) { + for (int i = 0; i < sigma_n; i++) + { CvMat sigma_h = cvMat(obs_n, 1, CV_64F, sigma_predObs[i]->data.db); - double scale = i == 0 - ? (double)state_k / (double)(state_n + state_k) - : .5 / (double)(state_n + state_k); + double scale = i == 0 ? (double)state_k / (double)(state_n + state_k) : + .5 / (double)(state_n + state_k); cvSub(sigma_state[i], state, stateDiff); cvSub(&sigma_h, &predObs, &predObsDiff); - cvGEMM(&predObsDiff, &predObsDiff, scale, &predObsCovariance, 1., &predObsCovariance, - CV_GEMM_B_T); - cvGEMM(stateDiff, &predObsDiff, scale, &statePredObsCrossCorrelation, 1., - &statePredObsCrossCorrelation, CV_GEMM_B_T); + cvGEMM(&predObsDiff, &predObsDiff, scale, &predObsCovariance, 1., + &predObsCovariance, CV_GEMM_B_T); + cvGEMM(stateDiff, &predObsDiff, scale, &statePredObsCrossCorrelation, 1., + &statePredObsCrossCorrelation, CV_GEMM_B_T); } // Add any additive noise. - CvMat *noise = obs->getObservationNoise(); - if (noise) cvAdd(&predObsCovariance, noise, &predObsCovariance); + CvMat* noise = obs->getObservationNoise(); + if (noise) + cvAdd(&predObsCovariance, noise, &predObsCovariance); #ifdef MYDEBUG printf("real observation: "); - for (int i = 0; i < obs_n; i++) printf("%+f ", cvGetReal1D(innovation ,i)); + for (int i = 0; i < obs_n; i++) + printf("%+f ", cvGetReal1D(innovation, i)); printf("\n"); printf("predicted observation: "); - for (int i = 0; i < obs_n; i++) printf("%+f ", cvGetReal1D(&predObs,i)); + for (int i = 0; i < obs_n; i++) + printf("%+f ", cvGetReal1D(&predObs, i)); printf("\n"); printf("predicted observation co-variance\n"); - for (int i = 0; i < obs_n; i++) { - for (int j = 0; j < obs_n; j++) printf("%+f ", cvGetReal2D(&predObsCovariance,i,j)); + for (int i = 0; i < obs_n; i++) + { + for (int j = 0; j < obs_n; j++) + printf("%+f ", cvGetReal2D(&predObsCovariance, i, j)); printf("\n"); } printf("state observation cross-correlation\n"); - for (int i = 0; i < state_n; i++) { - for (int j = 0; j < obs_n; j++) printf("%+f ", cvGetReal2D(&statePredObsCrossCorrelation,i,j)); + for (int i = 0; i < state_n; i++) + { + for (int j = 0; j < obs_n; j++) + printf("%+f ", cvGetReal2D(&statePredObsCrossCorrelation, i, j)); printf("\n"); } #endif @@ -286,20 +321,21 @@ void UnscentedKalman::update(UnscentedObservation *obs) { // state: x = x + _W * v // co-var: P = P - W * (R + Z) * W^T - CvMat invPredObsCovariance = cvMat(obs_n, obs_n, CV_64F, - this->invPredObsCovariance->data.db); + CvMat invPredObsCovariance = + cvMat(obs_n, obs_n, CV_64F, this->invPredObsCovariance->data.db); CvMat kalmanGain = cvMat(state_n, obs_n, CV_64F, this->kalmanGain->data.db); CvMat kalmanTmp = cvMat(state_n, obs_n, CV_64F, this->kalmanTmp->data.db); cvSub(innovation, &predObs, innovation); - //double inno_norm = cvNorm(innovation) / obs_n; - //if (inno_norm > 5.0) { + // double inno_norm = cvNorm(innovation) / obs_n; + // if (inno_norm > 5.0) { // return; //} #ifdef MYDEBUG printf("innovation: "); - for (int i = 0; i < obs_n; i++) printf("%f ", cvGetReal1D(innovation,i)); + for (int i = 0; i < obs_n; i++) + printf("%f ", cvGetReal1D(innovation, i)); printf("\n"); double inn_norm = cvNorm(innovation); printf("innivation norm: %f\n", inn_norm); @@ -310,14 +346,15 @@ void UnscentedKalman::update(UnscentedObservation *obs) { cvGEMM(&kalmanGain, innovation, 1., state, 1., state); cvMatMul(&kalmanGain, &predObsCovariance, &kalmanTmp); cvGEMM(&kalmanTmp, &kalmanGain, -1., stateCovariance, 1., stateCovariance, - CV_GEMM_B_T); + CV_GEMM_B_T); #ifdef MYDEBUG printf("estimated state: "); - for (int i = 0; i < state_n; i++) printf("%f ", cvGetReal1D(state, i)); + for (int i = 0; i < state_n; i++) + printf("%f ", cvGetReal1D(state, i)); printf("\n"); #endif sigmasUpdated = false; } -} // namespace alvar +} // namespace alvar diff --git a/ar_track_alvar/src/Util.cpp b/ar_track_alvar/src/Util.cpp index 4734fe1..8398a88 100644 --- a/ar_track_alvar/src/Util.cpp +++ b/ar_track_alvar/src/Util.cpp @@ -23,495 +23,625 @@ #include "ar_track_alvar/Util.h" #include "ar_track_alvar/FileFormatUtils.h" +#include using namespace std; -namespace alvar { +namespace alvar +{ using namespace std; -//ttesis +// ttesis + +int dot(const cv::Point& A, const cv::Point& B, const cv::Point& C) +{ + cv::Point AB, BC; + AB.x = B.x - A.x; + AB.y = B.y - A.y; + BC.x = C.x - B.x; + BC.y = C.y - B.y; + int dot = AB.x * BC.x + AB.y * BC.y; + return dot; +} + +int cross(const cv::Point& A, const cv::Point& B, const cv::Point& C) +{ + cv::Point AB, AC; + AB.x = B.x - A.x; + AB.y = B.y - A.y; + AC.x = C.x - A.x; + AC.y = C.y - A.y; + int cross = AB.x * AC.y - AB.y * AC.x; + return cross; +} + +double distance(const cv::Point& A, const cv::Point& B) +{ + double d1 = A.x - B.x; + double d2 = A.y - B.y; + return sqrt(d1 * d1 + d2 * d2); +} + +double linePointDist(const cv::Point& A, const cv::Point& B, const cv::Point& C, + bool isSegment) +{ + double dist = cross(A, B, C) / distance(A, B); + if (isSegment) + { + int dot1 = dot(A, B, C); + if (dot1 > 0) + return distance(B, C); + int dot2 = dot(B, A, C); + if (dot2 > 0) + return distance(A, C); + } + return abs(dist); +} + +double angle(const cv::Point& A, const cv::Point& B, const cv::Point& C, + const cv::Point& D, int isDirectionDependent) +{ + double angle; + double a = B.x - A.x; + double b = B.y - A.y; + double c = D.x - C.x; + double d = D.y - C.y; + angle = + acos(((a * c) + (b * d)) / (sqrt(a * a + b * b) * sqrt(c * c + d * d))); + if (isDirectionDependent) + { + return angle; + } + else + { + if (angle > CV_PI / 2) + { + return CV_PI - angle; + } + else + return angle; + } +} + +double polyLinePointDist(const std::vector& points, + const cv::Point& C, int* index, int isClosedPolygon) +{ + // Calculate minimum distance of Point C to Polygon whose points are in list + // PointList if isClosedPolygon is true polygon is closed (segnment of the + // first and last point is also checked) index is the index of point A in + // pointlist, where A is the starting point of the closest polygon segment + *index = -1; + double mindist = -1; + double dist; + for (int i = 0; i < points.size() - 1; i++) + { + dist = linePointDist(points[i], points[i + 1], C, 1); + if (mindist == -1 || dist < mindist) + { + mindist = dist; + *index = i; + } + } + if (isClosedPolygon) + { + dist = linePointDist(points[points.size() - 1], points[0], C, 1); + if (dist < mindist) + { + mindist = dist; + *index = points.size() - 1; + } + } + return mindist; +} + +// ttesis + +void FitCVEllipse(const vector& points, + cv::RotatedRect& ellipse_box) +{ + if (points.size() < 8) + return; + + cv::Mat vector = cv::Mat(1, int(points.size()), CV_64FC2); + for (size_t i = 0; i < points.size(); ++i) + { + vector.at(0, i) = cv::Vec2d(points[i].x, points[i].y); + } + ellipse_box = cv::fitEllipse(vector); + vector.release(); +} + +int exp_filt2(vector& v, vector& ret, bool clamp) +{ + double y; + int n = (int)v.size(); + + double a = pow(0.01, 8.0 / n); // 0.8; + double k = -log(a); -int dot(CvPoint *A, CvPoint *B, CvPoint *C){ - CvPoint AB, BC; - AB.x = B->x-A->x; - AB.y = B->y-A->y; - BC.x = C->x-B->x; - BC.y = C->y-B->y; - int dot = AB.x * BC.x + AB.y * BC.y; - return dot; + // Forward + vector yp(n); + + y = 0; + for (int i = 0; i < n; ++i) + y = a * y + v[i]; + + y *= 1.0 / (1.0 - pow(a, n)); + + for (int i = 0; i < n; ++i) + { + y = a * y + v[i]; + yp[i] = y; + } + + // Backward + vector ym(n); + + y = 0; + for (int i = n - 1; i >= 0; --i) + y = a * y + v[i]; + + y *= 1.0 / (1.0 - pow(a, n)); + + for (int i = n - 1; i >= 0; --i) + { + y = a * y + v[i]; + ym[i] = y; + } + + // Filter + ret.resize(n); + for (int i = 0; i < n; ++i) + { + ret[i] = (k / 2.0) * (yp[i] + ym[i] - v[i]); + } + + return int(ret.size()); +} + +int find_zero_crossings(const vector& v, vector& corners, int offs) +{ + int ind = 0; + int len = (int)v.size(); + + int state; + if (Sign(v.at(0)) == 1) + state = 1; + else + state = 2; + + corners.clear(); + for (int i = 0; i < len + offs; ++i) + { + if (i < len) + ind = i; + else + ind = i - len; + + int s = Sign(v.at(ind)); + if (state == 1 && s == -1) + state = 2; + if (state == 2 && s == 1) + { + state = 1; + bool test = true; + for (unsigned j = 0; j < corners.size(); ++j) + if (corners.at(j) == ind) + test = false; + + if (test) + corners.push_back(ind); + } + } + + return int(corners.size()); } -int cross(CvPoint *A,CvPoint *B, CvPoint *C){ - CvPoint AB, AC; - AB.x = B->x-A->x; - AB.y = B->y-A->y; - AC.x = C->x-A->x; - AC.y = C->y-A->y; - int cross = AB.x * AC.y - AB.y * AC.x; - return cross; +void out_matrix(const cv::Mat& m, const char* name) +{ + if (m.cols == 1) + { + std::cout << name << " = ["; + for (int j = 0; j < m.rows; j++) + { + std::cout << " " << m.at(j, 0); + } + std::cout << "]^T" << std::endl; + } + else if (m.rows == 1) + { + std::cout << name << " = ["; + for (int i = 0; i < m.cols; i++) + { + std::cout << " " << m.at(0, i); + } + std::cout << "]^T" << std::endl; + } + else + { + std::cout << name << " = [" << std::endl; + for (int j = 0; j < m.rows; j++) + { + for (int i = 0; i < m.cols; i++) + { + std::cout << " " << m.at(j, i); + } + std::cout << std::endl; + } + std::cout << "]" << std::endl; + } +} + +double Limit(double val, double min_val, double max_val) +{ + return max(min_val, min(max_val, val)); } -double distance(CvPoint *A,CvPoint *B){ - double d1 = A->x - B->x; - double d2 = A->y - B->y; - return sqrt(d1*d1+d2*d2); +Index::Index(int a) +{ + val.push_back(a); } +Index::Index(int a, int b) +{ + val.push_back(a); + val.push_back(b); +} -double linePointDist(CvPoint *A,CvPoint *B,CvPoint *C, bool isSegment){ - double dist = cross(A,B,C) / distance(A,B); - if(isSegment){ - int dot1 = dot(A,B,C); - if(dot1 > 0)return distance(B,C); - int dot2 = dot(B,A,C); - if(dot2 > 0)return distance(A,C); +Index::Index(int a, int b, int c) +{ + val.push_back(a); + val.push_back(b); + val.push_back(c); +} + +bool Index::operator<(const Index& index) const +{ + int comp = 0; + size_t d = 0; + // Go through the dimensions (last being the most significant) + while ((d < val.size()) || (d < index.val.size())) + { + int v0 = (d < val.size() ? val[d] : 0); + int v1 = (d < index.val.size() ? index.val[d] : 0); + if (v0 < v1) + comp = -1; + else if (v1 < v0) + comp = 1; + d++; + } + if (comp == -1) + return true; + return false; +} + +int Histogram::DimIndex(int dim, double val) +{ + int binsize = (dim < int(dim_binsize.size()) ? dim_binsize[dim] : 1); + if (val >= 0) + return int(val + (binsize / 2)) / binsize; + return int(val - (binsize / 2)) / binsize; +} +double Histogram::DimVal(int dim, int index) +{ + int binsize = (dim < int(dim_binsize.size()) ? dim_binsize[dim] : 1); + return (index * binsize); +} +void Histogram::AddDimension(int binsize) +{ + dim_binsize.push_back(binsize); +} +void Histogram::Clear() +{ + bins.clear(); +} +void Histogram::Inc(double dim0, double dim1, double dim2) +{ + Index index(DimIndex(0, dim0), DimIndex(1, dim1), DimIndex(2, dim2)); + if (bins.find(index) != bins.end()) + { + bins[index]++; + } + else + { + bins[index] = 1; + } +} +int Histogram::GetMax(double* dim0, double* dim1, double* dim2) +{ + map::const_iterator iter, max_iter; + int max = 0; + for (max_iter = iter = bins.begin(); iter != bins.end(); iter++) + { + if (iter->second > max) + { + max = iter->second; + max_iter = iter; + } + } + if (max > 0) + { + *dim0 = DimVal(0, max_iter->first.val[0]); + if (dim1) + *dim1 = DimVal(1, max_iter->first.val[1]); + if (dim2) + *dim2 = DimVal(2, max_iter->first.val[2]); + } + return max; +} +void HistogramSubpixel::Clear() +{ + bins.clear(); + acc_dim0.clear(); + acc_dim1.clear(); + acc_dim2.clear(); +} +void HistogramSubpixel::Inc(double dim0, double dim1, double dim2) +{ + Index index(DimIndex(0, dim0), DimIndex(1, dim1), DimIndex(2, dim2)); + if (bins.find(index) != bins.end()) + { + bins[index]++; + acc_dim0[index] += dim0; + acc_dim1[index] += dim1; + acc_dim2[index] += dim2; + } + else + { + bins[index] = 1; + acc_dim0[index] = dim0; + acc_dim1[index] = dim1; + acc_dim2[index] = dim2; + } +} +int HistogramSubpixel::GetMax(double* dim0, double* dim1, double* dim2) +{ + map::const_iterator iter; + int max = 0; + int divider = 0; + for (iter = bins.begin(); iter != bins.end(); iter++) + { + if (iter->second > max) + { + divider = max = iter->second; + *dim0 = acc_dim0[iter->first]; + if (dim1) + *dim1 = acc_dim1[iter->first]; + if (dim2) + *dim2 = acc_dim2[iter->first]; + } + else if (iter->second == max) + { + divider += iter->second; + *dim0 += acc_dim0[iter->first]; + if (dim1) + *dim1 += acc_dim1[iter->first]; + if (dim2) + *dim2 += acc_dim2[iter->first]; } - return abs(dist); -} - -double angle(CvPoint *A,CvPoint *B, CvPoint *C,CvPoint *D, int isDirectionDependent){ - double angle; - double a = B->x - A->x; - double b = B->y - A->y; - double c = D->x - C->x; - double d = D->y - C->y; - angle = acos( ((a * c) + (b * d) )/(sqrt(a*a + b*b) * sqrt(c*c + d*d))); - if(isDirectionDependent){ - return angle; - }else{ - if (angle > CV_PI/2){ - return CV_PI - angle; - }else - return angle; - } -} - -double polyLinePointDist(CvPoint *PointList, int nPnts,CvPoint *C, int *index, int isClosedPolygon){ -// Calculate minimum distance of Point C to Polygon whose points are in list PointList -// if isClosedPolygon is true polygon is closed (segnment of the first and last point is also checked) -// index is the index of point A in pointlist, -// where A is the starting point of the closest polygon segment - *index = -1; - double mindist= -1; - double dist; - for( int i=0; i < nPnts-1; i++){ - dist=linePointDist(&PointList[i],&PointList[i+1],C,1); - if (mindist == -1 || dist &points, cv::RotatedRect& ellipse_box) -{ - if(points.size() < 8) return; - - CvMat* vector = cvCreateMat(1, int(points.size()), CV_64FC2); - for(size_t i = 0; i < points.size(); ++i) - { - CV_MAT_ELEM(*vector, cv::Point2f, 0, i) = (cv::Point2f)points[i]; - } - ellipse_box = cv::fitEllipse(cv::cvarrToMat(&vector)); - //cvReleaseMat(&vector); -} - -int exp_filt2(vector &v, vector &ret, bool clamp) -{ - double y; - int n = (int)v.size(); - - double a = pow(0.01, 8.0/n);//0.8; - double k = -log(a); - - // Forward - vector yp(n); - - y = 0; - for(int i = 0; i < n; ++i) - y = a * y + v[i]; - - y *= 1.0 / (1.0-pow(a,n)); - - for(int i = 0; i < n; ++i) - { - y = a * y + v[i]; - yp[i] = y; - } - - // Backward - vector ym(n); - - y = 0; - for(int i = n-1; i >= 0; --i) - y = a * y + v[i]; - - y *= 1.0 / (1.0-pow(a,n)); - - for(int i = n-1; i >= 0; --i) - { - y = a * y + v[i]; - ym[i] = y; - } - - // Filter - ret.resize(n); - for(int i = 0; i < n; ++i) - { - ret[i] = (k/2.0) * (yp[i] + ym[i] - v[i]); - } - - return int(ret.size()); -} - -int find_zero_crossings(const vector& v, vector &corners, int offs) -{ - int ind = 0; - int len = (int)v.size(); - - int state; - if(Sign(v.at(0)) == 1) - state = 1; - else - state = 2; - - corners.clear(); - for(int i = 0; i < len+offs; ++i) - { - if(icols == 1) { - std::cout<rows; j++) { - std::cout<<" "<rows == 1) { - std::cout<cols; i++) { - std::cout<<" "<rows; j++) { - for (int i=0; icols; i++) { - std::cout<<" "<= 0) return int(val+(binsize/2))/binsize; - return int(val-(binsize/2))/binsize; -} -double Histogram::DimVal(int dim, int index) { - int binsize = (dim < int(dim_binsize.size()) ? dim_binsize[dim] : 1); - return (index * binsize); -} -void Histogram::AddDimension(int binsize) { - dim_binsize.push_back(binsize); -} -void Histogram::Clear() { - bins.clear(); -} -void Histogram::Inc(double dim0, double dim1, double dim2) { - Index index(DimIndex(0,dim0), DimIndex(1,dim1), DimIndex(2,dim2)); - if (bins.find(index) != bins.end()) { - bins[index]++; - } else { - bins[index] = 1; - } -} -int Histogram::GetMax(double *dim0, double *dim1, double *dim2) { - map::const_iterator iter, max_iter; - int max=0; - for (max_iter = iter = bins.begin(); iter != bins.end(); iter++) { - if (iter->second > max) { - max = iter->second; - max_iter = iter; - } - } - if (max > 0) { - *dim0 = DimVal(0, max_iter->first.val[0]); - if (dim1) *dim1 = DimVal(1, max_iter->first.val[1]); - if (dim2) *dim2 = DimVal(2, max_iter->first.val[2]); - } - return max; -} -void HistogramSubpixel::Clear() { - bins.clear(); - acc_dim0.clear(); - acc_dim1.clear(); - acc_dim2.clear(); -} -void HistogramSubpixel::Inc(double dim0, double dim1, double dim2) { - Index index(DimIndex(0,dim0), DimIndex(1,dim1), DimIndex(2,dim2)); - if (bins.find(index) != bins.end()) { - bins[index]++; - acc_dim0[index] += dim0; - acc_dim1[index] += dim1; - acc_dim2[index] += dim2; - } else { - bins[index] = 1; - acc_dim0[index] = dim0; - acc_dim1[index] = dim1; - acc_dim2[index] = dim2; - } -} -int HistogramSubpixel::GetMax(double *dim0, double *dim1, double *dim2) { - map::const_iterator iter; - int max=0; - int divider=0; - for (iter = bins.begin(); iter != bins.end(); iter++) { - if (iter->second > max) { - divider = max = iter->second; - *dim0 = acc_dim0[iter->first]; - if (dim1) *dim1 = acc_dim1[iter->first]; - if (dim2) *dim2 = acc_dim2[iter->first]; - } else if (iter->second == max) { - divider += iter->second; - *dim0 += acc_dim0[iter->first]; - if (dim1) *dim1 += acc_dim1[iter->first]; - if (dim2) *dim2 += acc_dim2[iter->first]; - } - } - if (max > 0) { - *dim0 /= divider; - if (dim1) *dim1 /= divider; - if (dim2) *dim2 /= divider; - } - return max; -} - -struct SerializationFormatterXml { - tinyxml2::XMLDocument document; - tinyxml2::XMLElement *xml_current; - SerializationFormatterXml() : xml_current(0) {} + } + if (max > 0) + { + *dim0 /= divider; + if (dim1) + *dim1 /= divider; + if (dim2) + *dim2 /= divider; + } + return max; +} + +struct SerializationFormatterXml +{ + TiXmlDocument document; + TiXmlElement* xml_current; + SerializationFormatterXml() : xml_current(0) + { + } }; -bool Serialization::Output() { - SerializationFormatterXml *xml = (SerializationFormatterXml *)formatter_handle; - if (filename.size() > 0) { - tinyxml2::XMLDeclaration* declaration=xml->document.NewDeclaration(); - xml->document.InsertFirstChild(declaration); - xml->document.SaveFile(filename.c_str()); - } else { - std::cout << "Invalid Filename" << std::endl; - exit(0); - // const tinyxml2::XMLNode *node = (xml->xml_current ? xml->xml_current : xml->document.RootElement()); - // std::basic_ostream *os = dynamic_cast *>(stream); - // (*os)<<(*node); - //(*stream)<<(*node); - } - return true; -} - -bool Serialization::Input() { - SerializationFormatterXml *xml = (SerializationFormatterXml *)formatter_handle; - if (filename.size() > 0) { - xml->document.LoadFile(filename.c_str()); - } else { - std::cout << "Invalid Filename" << std::endl; - exit(0); - // // TODO: How this should be handled with nested classes? - // tinyxml2::XMLNode *node = (xml->xml_current ? xml->xml_current : xml->document.RootElement()); - // if (node == 0) { - // node = (tinyxml2::XMLElement*)xml->document.LinkEndChild(new tinyxml2::XMLElement("root")); - // } - // std::basic_istream *is = dynamic_cast *>(stream); - // (*is)>>(*node); - //(*stream)>>(*node); - } - return true; -} - -bool Serialization::Descend(const char *id) { - SerializationFormatterXml *xml = (SerializationFormatterXml *)formatter_handle; - if (input) { - if (xml->xml_current == 0) { - xml->xml_current = xml->document.RootElement(); - if (!xml->xml_current || (strcmp(xml->xml_current->Value(), id) != 0)) { - return false; - } - } else { - xml->xml_current = (tinyxml2::XMLElement*)xml->xml_current->FirstChildElement(id); - if (xml->xml_current == NULL) return false; - } - } else { - tinyxml2::XMLElement* temp; - temp->SetName(id); - if (xml->xml_current == 0) { - xml->xml_current = (tinyxml2::XMLElement*)xml->document.LinkEndChild(temp); - } else { - xml->xml_current = (tinyxml2::XMLElement*)xml->xml_current->LinkEndChild(temp); - } - } - return true; -} -bool Serialization::Ascend() { - SerializationFormatterXml *xml = (SerializationFormatterXml *)formatter_handle; - xml->xml_current = (tinyxml2::XMLElement*)xml->xml_current->Parent(); - return true; -} - -Serialization::Serialization(std::string _filename) { - SerializationFormatterXml *xml = new SerializationFormatterXml(); - formatter_handle = xml; - filename = _filename; - input = false; // by default output -} - -Serialization::Serialization(std::basic_iostream &_stream) -{ - SerializationFormatterXml *xml = new SerializationFormatterXml(); - formatter_handle = xml; - stream = &_stream; -} - -Serialization::Serialization(std::basic_istream &_stream) { - SerializationFormatterXml *xml = new SerializationFormatterXml(); - formatter_handle = xml; - stream = &_stream; -} - -Serialization::Serialization(std::basic_ostream &_stream) { - SerializationFormatterXml *xml = new SerializationFormatterXml(); - formatter_handle = xml; - stream = &_stream; -} - -Serialization::~Serialization() { - SerializationFormatterXml *xml = (SerializationFormatterXml *)formatter_handle; - delete xml; -} - -bool Serialization::Serialize(int &data, const std::string &name) { - SerializationFormatterXml *xml = (SerializationFormatterXml *)formatter_handle; - if (!xml || !xml->xml_current) return false; - bool ret = true; - if (input) ret = (xml->xml_current->QueryIntAttribute(name.c_str(), &data) == EXIT_SUCCESS); - else xml->xml_current->SetAttribute(name.c_str(), data); - return ret; -} - -bool Serialization::Serialize(unsigned short &data, const std::string &name) { - int i = data; - bool ret = Serialize(i, name); - data = i; - return ret; -} - -bool Serialization::Serialize(unsigned long &data, const std::string &name) { - // TODO: Possible overflow here... - int i = data; - bool ret = Serialize(i, name); - data = i; - return ret; -} - -bool Serialization::Serialize(double &data, const std::string &name) { - SerializationFormatterXml *xml = (SerializationFormatterXml *)formatter_handle; - bool ret = true; - if (input) ret = (xml->xml_current->QueryDoubleAttribute(name.c_str(), &data) == EXIT_SUCCESS); - else xml->xml_current->DoubleAttribute(name.c_str(), data); - return ret; -} - -bool Serialization::Serialize(std::string &data, const std::string &name) { - SerializationFormatterXml *xml = (SerializationFormatterXml *)formatter_handle; - bool ret = true; - if (input) { - const char *tmp = xml->xml_current->Attribute(name.c_str()); - if (tmp == NULL) ret=false; - else data = tmp; - } - else xml->xml_current->SetAttribute(name.c_str(), data.c_str()); - return ret; -} - -bool Serialization::Serialize(CvMat &data, const std::string &name) { - SerializationFormatterXml *xml = (SerializationFormatterXml *)formatter_handle; - bool ret = true; - if (input) { - tinyxml2::XMLElement *xml_matrix = (tinyxml2::XMLElement*)xml->xml_current->FirstChildElement(name.c_str()); - if (xml_matrix == NULL) return false; - if (!FileFormatUtils::parseXMLMatrix(xml_matrix, &data)) return false; - } - else { - xml->xml_current->LinkEndChild(FileFormatUtils::createXMLMatrix(name.c_str(), &data)); - } - return ret; +bool Serialization::Output() +{ + SerializationFormatterXml* xml = (SerializationFormatterXml*)formatter_handle; + if (filename.size() > 0) + { + // xml->document.LinkEndChild(new TiXmlDeclaration("1.0", "UTF-8", "no")); + xml->document.InsertBeforeChild(xml->document.RootElement(), + TiXmlDeclaration("1.0", "UTF-8", "no")); + xml->document.SaveFile(filename.c_str()); + } + else + { + const TiXmlNode* node = + (xml->xml_current ? xml->xml_current : xml->document.RootElement()); + std::basic_ostream* os = + dynamic_cast*>(stream); + (*os) << (*node); + //(*stream)<<(*node); + } + return true; +} + +bool Serialization::Input() +{ + SerializationFormatterXml* xml = (SerializationFormatterXml*)formatter_handle; + if (filename.size() > 0) + { + xml->document.LoadFile(filename.c_str()); + } + else + { + // TODO: How this should be handled with nested classes? + TiXmlNode* node = + (xml->xml_current ? xml->xml_current : xml->document.RootElement()); + if (node == 0) + { + node = (TiXmlElement*)xml->document.LinkEndChild(new TiXmlElement("roo" + "t")); + } + std::basic_istream* is = + dynamic_cast*>(stream); + (*is) >> (*node); + //(*stream)>>(*node); + } + return true; +} + +bool Serialization::Descend(const char* id) +{ + SerializationFormatterXml* xml = (SerializationFormatterXml*)formatter_handle; + if (input) + { + if (xml->xml_current == 0) + { + xml->xml_current = xml->document.RootElement(); + if (!xml->xml_current || (strcmp(xml->xml_current->Value(), id) != 0)) + { + return false; + } + } + else + { + xml->xml_current = (TiXmlElement*)xml->xml_current->FirstChild(id); + if (xml->xml_current == NULL) + return false; + } + } + else + { + if (xml->xml_current == 0) + { + xml->xml_current = + (TiXmlElement*)xml->document.LinkEndChild(new TiXmlElement(id)); + } + else + { + xml->xml_current = + (TiXmlElement*)xml->xml_current->LinkEndChild(new TiXmlElement(id)); + } + } + return true; +} +bool Serialization::Ascend() +{ + SerializationFormatterXml* xml = (SerializationFormatterXml*)formatter_handle; + xml->xml_current = (TiXmlElement*)xml->xml_current->Parent(); + return true; +} + +Serialization::Serialization(std::string _filename) +{ + SerializationFormatterXml* xml = new SerializationFormatterXml(); + formatter_handle = xml; + filename = _filename; + input = false; // by default output +} + +Serialization::Serialization(std::basic_iostream& _stream) +{ + SerializationFormatterXml* xml = new SerializationFormatterXml(); + formatter_handle = xml; + stream = &_stream; +} + +Serialization::Serialization(std::basic_istream& _stream) +{ + SerializationFormatterXml* xml = new SerializationFormatterXml(); + formatter_handle = xml; + stream = &_stream; +} + +Serialization::Serialization(std::basic_ostream& _stream) +{ + SerializationFormatterXml* xml = new SerializationFormatterXml(); + formatter_handle = xml; + stream = &_stream; +} + +Serialization::~Serialization() +{ + SerializationFormatterXml* xml = (SerializationFormatterXml*)formatter_handle; + delete xml; +} + +bool Serialization::Serialize(int& data, const std::string& name) +{ + SerializationFormatterXml* xml = (SerializationFormatterXml*)formatter_handle; + if (!xml || !xml->xml_current) + return false; + bool ret = true; + if (input) + ret = (xml->xml_current->QueryIntAttribute(name, &data) == TIXML_SUCCESS); + else + xml->xml_current->SetAttribute(name, data); + return ret; +} + +bool Serialization::Serialize(unsigned short& data, const std::string& name) +{ + int i = data; + bool ret = Serialize(i, name); + data = i; + return ret; +} + +bool Serialization::Serialize(unsigned long& data, const std::string& name) +{ + // TODO: Possible overflow here... + int i = data; + bool ret = Serialize(i, name); + data = i; + return ret; } -} // namespace alvar +bool Serialization::Serialize(double& data, const std::string& name) +{ + SerializationFormatterXml* xml = (SerializationFormatterXml*)formatter_handle; + bool ret = true; + if (input) + ret = + (xml->xml_current->QueryDoubleAttribute(name, &data) == TIXML_SUCCESS); + else + xml->xml_current->SetDoubleAttribute(name.c_str(), data); + return ret; +} + +bool Serialization::Serialize(std::string& data, const std::string& name) +{ + SerializationFormatterXml* xml = (SerializationFormatterXml*)formatter_handle; + bool ret = true; + if (input) + { + const char* tmp = xml->xml_current->Attribute(name.c_str()); + if (tmp == NULL) + ret = false; + else + data = tmp; + } + else + xml->xml_current->SetAttribute(name.c_str(), data.c_str()); + return ret; +} + +bool Serialization::Serialize(cv::Mat& data, const std::string& name) +{ + SerializationFormatterXml* xml = (SerializationFormatterXml*)formatter_handle; + bool ret = true; + if (input) + { + TiXmlElement* xml_matrix = + (TiXmlElement*)xml->xml_current->FirstChild(name); + if (xml_matrix == NULL) + return false; + if (!FileFormatUtils::parseXMLMatrix(xml_matrix, data)) + return false; + } + else + { + xml->xml_current->LinkEndChild( + FileFormatUtils::createXMLMatrix(name.c_str(), data)); + } + return ret; +} + +} // namespace alvar diff --git a/ar_track_alvar/src/kinect_filtering.cpp b/ar_track_alvar/src/kinect_filtering.cpp index c7e3f20..1bc1fd3 100644 --- a/ar_track_alvar/src/kinect_filtering.cpp +++ b/ar_track_alvar/src/kinect_filtering.cpp @@ -29,9 +29,7 @@ * */ - - -//#include "rclcpp/rclcpp.hpp" +//#include #include #include // #include @@ -41,228 +39,229 @@ namespace ar_track_alvar { +namespace gm = geometry_msgs; - namespace gm=geometry_msgs; +using std::cerr; +using std::endl; +using std::ostream; +using std::vector; - using std::vector; - using std::cerr; - using std::endl; - using std::ostream; +// Distance threshold for plane fitting: how far are points +// allowed to be off the plane? +const double distance_threshold_ = 0.005; - // Distance threshold for plane fitting: how far are points - // allowed to be off the plane? - const double distance_threshold_ = 0.005; +PlaneFitResult fitPlane(ARCloud::ConstPtr cloud) +{ + PlaneFitResult res; + pcl::PointIndices::Ptr inliers = boost::make_shared(); - PlaneFitResult fitPlane (ARCloud::ConstPtr cloud) - { - PlaneFitResult res; - pcl::PointIndices::Ptr inliers=boost::make_shared(); - - pcl::SACSegmentation seg; - seg.setOptimizeCoefficients(true); - seg.setModelType(pcl::SACMODEL_PLANE); - seg.setMethodType(pcl::SAC_RANSAC); - seg.setDistanceThreshold(distance_threshold_); - - seg.setInputCloud(cloud); - seg.segment(*inliers, res.coeffs); - - pcl::ExtractIndices extracter; - extracter.setInputCloud(cloud); - extracter.setIndices(inliers); - extracter.setNegative(false); - extracter.filter(*res.inliers); - - return res; - } + pcl::SACSegmentation seg; + seg.setOptimizeCoefficients(true); + seg.setModelType(pcl::SACMODEL_PLANE); + seg.setMethodType(pcl::SAC_RANSAC); + seg.setDistanceThreshold(distance_threshold_); - ARCloud::Ptr filterCloud (const ARCloud& cloud, const vector >& pixels) - { - ARCloud::Ptr out(new ARCloud()); - //RCLCPP_INFO(rclcpp::get_logger("ArTrackAlvar"), " Filtering %zu pixels", pixels.size()); - //for (const cv::Point& p : pixels) - for(size_t i=0; ipoints.push_back(pt); - } - return out; - } + seg.setInputCloud(cloud); + seg.segment(*inliers, res.coeffs); - gm::msg::Point centroid (const ARCloud& points) - { - gm::msg::Point sum; - sum.x = 0; - sum.y = 0; - sum.z = 0; - //for (const Point& p : points) - for(size_t i=0; i extracter; + extracter.setInputCloud(cloud); + extracter.setIndices(inliers); + extracter.setNegative(false); + extracter.filter(*res.inliers); - // Helper function to construct a geometry_msgs::msg::Quaternion - inline - gm::msg::Quaternion makeQuaternion (double x, double y, double z, double w) - { - gm::msg::Quaternion q; - q.x = x; - q.y = y; - q.z = z; - q.w = w; - return q; - } + return res; +} - // Extract and normalize plane coefficients - int getCoeffs (const pcl::ModelCoefficients& coeffs, double* a, double* b, - double* c, double* d) +ARCloud::Ptr filterCloud( + const ARCloud& cloud, + const vector >& pixels) +{ + ARCloud::Ptr out(new ARCloud()); + // ROS_INFO(" Filtering %zu pixels", pixels.size()); + // for (const cv::Point& p : pixels) + for (size_t i = 0; i < pixels.size(); i++) { - if(coeffs.values.size() != 4) - return -1; - const double s = coeffs.values[0]*coeffs.values[0] + - coeffs.values[1]*coeffs.values[1] + coeffs.values[2]*coeffs.values[2]; - if(fabs(s) < 1e-6) - return -1; - *a = coeffs.values[0]/s; - *b = coeffs.values[1]/s; - *c = coeffs.values[2]/s; - *d = coeffs.values[3]/s; - return 0; + const cv::Point& p = pixels[i]; + const ARPoint& pt = cloud(p.x, p.y); + if (std::isnan(pt.x) || std::isnan(pt.y) || std::isnan(pt.z)) + { + // ROS_INFO(" Skipping (%.4f, %.4f, %.4f)", pt.x, pt.y, pt.z); + } + else + out->points.push_back(pt); } + return out; +} - // Project point onto plane - tf2::Vector3 project (const ARPoint& p, const double a, const double b, - const double c, const double d) +gm::msg::Point centroid(const ARCloud& points) +{ + gm::msg::Point sum; + sum.x = 0; + sum.y = 0; + sum.z = 0; + // for (const Point& p : points) + for (size_t i = 0; i < points.size(); i++) { - const double t = a*p.x + b*p.y + c*p.z + d; - return tf2::Vector3(p.x-t*a, p.y-t*b, p.z-t*c); + sum.x += points[i].x; + sum.y += points[i].y; + sum.z += points[i].z; } - ostream& operator<< (ostream& str, const tf2::Matrix3x3& m) - { - str << "[" << m[0][0] << ", " << m[0][1] << ", " << m[0][2] << "; " - << m[1][0] << ", " << m[1][1] << ", " << m[1][2] << "; " - << m[2][0] << ", " << m[2][1] << ", " << m[2][2] << "]"; - return str; - } + gm::msg::Point center; + const size_t n = points.size(); + center.x = sum.x / n; + center.y = sum.y / n; + center.z = sum.z / n; + return center; +} - ostream& operator<< (ostream& str, const tf2::Quaternion& q) - { - str << "[(" << q.x() << ", " << q.y() << ", " << q.z() << - "), " << q.w() << "]"; - return str; - } +// Helper function to construct a geometry_msgs::Quaternion +inline gm::msg::Quaternion makeQuaternion(double x, double y, double z, double w) +{ + gm::msg::Quaternion q; + q.x = x; + q.y = y; + q.z = z; + q.w = w; + return q; +} - ostream& operator<< (ostream& str, const tf2::Vector3& v) - { - str << "(" << v.x() << ", " << v.y() << ", " << v.z() << ")"; - return str; - } +// Extract and normalize plane coefficients +int getCoeffs(const pcl::ModelCoefficients& coeffs, double* a, double* b, + double* c, double* d) +{ + if (coeffs.values.size() != 4) + return -1; + const double s = coeffs.values[0] * coeffs.values[0] + + coeffs.values[1] * coeffs.values[1] + + coeffs.values[2] * coeffs.values[2]; + if (fabs(s) < 1e-6) + return -1; + *a = coeffs.values[0] / s; + *b = coeffs.values[1] / s; + *c = coeffs.values[2] / s; + *d = coeffs.values[3] / s; + return 0; +} + +// Project point onto plane +tf2::Vector3 project(const ARPoint& p, const double a, const double b, + const double c, const double d) +{ + const double t = a * p.x + b * p.y + c * p.z + d; + return tf2::Vector3(p.x - t * a, p.y - t * b, p.z - t * c); +} + +ostream& operator<<(ostream& str, const tf2::Matrix3x3& m) +{ + str << "[" << m[0][0] << ", " << m[0][1] << ", " << m[0][2] << "; " << m[1][0] + << ", " << m[1][1] << ", " << m[1][2] << "; " << m[2][0] << ", " + << m[2][1] << ", " << m[2][2] << "]"; + return str; +} + +ostream& operator<<(ostream& str, const tf2::Quaternion& q) +{ + str << "[(" << q.x() << ", " << q.y() << ", " << q.z() << "), " << q.w() + << "]"; + return str; +} + +ostream& operator<<(ostream& str, const tf2::Vector3& v) +{ + str << "(" << v.x() << ", " << v.y() << ", " << v.z() << ")"; + return str; +} + +int extractFrame(const pcl::ModelCoefficients& coeffs, const ARPoint& p1, + const ARPoint& p2, const ARPoint& p3, const ARPoint& p4, + tf2::Matrix3x3& retmat) +{ + // Get plane coeffs and project points onto the plane + double a = 0, b = 0, c = 0, d = 0; + if (getCoeffs(coeffs, &a, &b, &c, &d) < 0) + return -1; - int extractFrame (const pcl::ModelCoefficients& coeffs, - const ARPoint& p1, const ARPoint& p2, - const ARPoint& p3, const ARPoint& p4, - tf2::Matrix3x3 &retmat) + const tf2::Vector3 q1 = project(p1, a, b, c, d); + const tf2::Vector3 q2 = project(p2, a, b, c, d); + const tf2::Vector3 q3 = project(p3, a, b, c, d); + const tf2::Vector3 q4 = project(p4, a, b, c, d); + + // Make sure points aren't the same so things are well-defined + if ((q2 - q1).length() < 1e-3) + return -1; + + // (inverse) matrix with the given properties + const tf2::Vector3 v = (q2 - q1).normalized(); + const tf2::Vector3 n(a, b, c); + const tf2::Vector3 w = -v.cross(n); + tf2::Matrix3x3 m(v[0], v[1], v[2], w[0], w[1], w[2], n[0], n[1], n[2]); + + // Possibly flip things based on third point + const tf2::Vector3 diff = (q4 - q3).normalized(); + // ROS_INFO_STREAM("w = " << w << " and d = " << diff); + if (w.dot(diff) < 0) { - // Get plane coeffs and project points onto the plane - double a=0, b=0, c=0, d=0; - if(getCoeffs(coeffs, &a, &b, &c, &d) < 0) - return -1; - - const tf2::Vector3 q1 = project(p1, a, b, c, d); - const tf2::Vector3 q2 = project(p2, a, b, c, d); - const tf2::Vector3 q3 = project(p3, a, b, c, d); - const tf2::Vector3 q4 = project(p4, a, b, c, d); - - // Make sure points aren't the same so things are well-defined - if((q2-q1).length() < 1e-3) - return -1; - - // (inverse) matrix with the given properties - const tf2::Vector3 v = (q2-q1).normalized(); - const tf2::Vector3 n(a, b, c); - const tf2::Vector3 w = -v.cross(n); - tf2::Matrix3x3 m(v[0], v[1], v[2], w[0], w[1], w[2], n[0], n[1], n[2]); - - // Possibly flip things based on third point - const tf2::Vector3 diff = (q4-q3).normalized(); - //ROS_INFO_STREAM("w = " << w << " and d = " << diff); - if (w.dot(diff)<0) - { - //ROS_INFO_STREAM("Flipping normal based on p3. Current value: " << m); - m[1] = -m[1]; - m[2] = -m[2]; - //ROS_INFO_STREAM("New value: " << m); - } - - // Invert and return - retmat = m.inverse(); - //cerr << "Frame is " << retmat << endl; - return 0; + // ROS_INFO_STREAM("Flipping normal based on p3. Current value: " << m); + m[1] = -m[1]; + m[2] = -m[2]; + // ROS_INFO_STREAM("New value: " << m); } + // Invert and return + retmat = m.inverse(); + // cerr << "Frame is " << retmat << endl; + return 0; +} + +int getQuaternion(const tf2::Matrix3x3& m, tf2::Quaternion& retQ) +{ + if (m.determinant() <= 0) + return -1; + + // tf2Scalar y=0, p=0, r=0; + // m.getEulerZYX(y, p, r); + // retQ.setEulerZYX(y, p, r); - int getQuaternion (const tf2::Matrix3x3& m, tf2::Quaternion &retQ) + // Use Eigen for this part instead, because the ROS version of bullet appears + // to have a bug + Eigen::Matrix3f eig_m; + for (int i = 0; i < 3; i++) { - if(m.determinant() <= 0) - return -1; - - //tf2Scalar y=0, p=0, r=0; - //m.getEulerZYX(y, p, r); - //retQ.setEulerZYX(y, p, r); - - //Use Eigen for this part instead, because the ROS version of bullet appears to have a bug - Eigen::Matrix3f eig_m; - for(int i=0; i<3; i++){ - for(int j=0; j<3; j++){ - eig_m(i,j) = m[i][j]; - } + for (int j = 0; j < 3; j++) + { + eig_m(i, j) = m[i][j]; } - Eigen::Quaternion eig_quat(eig_m); - - // Translate back to bullet - tf2Scalar ex = eig_quat.x(); - tf2Scalar ey = eig_quat.y(); - tf2Scalar ez = eig_quat.z(); - tf2Scalar ew = eig_quat.w(); - tf2::Quaternion quat(ex,ey,ez,ew); - retQ = quat.normalized(); - - return 0; } + Eigen::Quaternion eig_quat(eig_m); + // Translate back to bullet + tf2Scalar ex = eig_quat.x(); + tf2Scalar ey = eig_quat.y(); + tf2Scalar ez = eig_quat.z(); + tf2Scalar ew = eig_quat.w(); + tf2::Quaternion quat(ex, ey, ez, ew); + retQ = quat.normalized(); - int extractOrientation (const pcl::ModelCoefficients& coeffs, - const ARPoint& p1, const ARPoint& p2, - const ARPoint& p3, const ARPoint& p4, - gm::msg::Quaternion &retQ) - { - tf2::Matrix3x3 m; - if(extractFrame(coeffs, p1, p2, p3, p4, m) < 0) - return -1; - tf2::Quaternion q; - if(getQuaternion(m,q) < 0) - return -1; - retQ.x = q.x(); - retQ.y = q.y(); - retQ.z = q.z(); - retQ.w = q.w(); - return 0; - } + return 0; +} + +int extractOrientation(const pcl::ModelCoefficients& coeffs, const ARPoint& p1, + const ARPoint& p2, const ARPoint& p3, const ARPoint& p4, + gm::msg::Quaternion& retQ) +{ + tf2::Matrix3x3 m; + if (extractFrame(coeffs, p1, p2, p3, p4, m) < 0) + return -1; + tf2::Quaternion q; + if (getQuaternion(m, q) < 0) + return -1; + retQ.x = q.x(); + retQ.y = q.y(); + retQ.z = q.z(); + retQ.w = q.w(); + return 0; +} -} // namespace +} // namespace ar_track_alvar diff --git a/ar_track_alvar/src/medianFilter.cpp b/ar_track_alvar/src/medianFilter.cpp index ae78636..f667f04 100644 --- a/ar_track_alvar/src/medianFilter.cpp +++ b/ar_track_alvar/src/medianFilter.cpp @@ -29,8 +29,8 @@ */ /** - * \file - * + * \file + * * N-dimensional median filter for marker poses * * \author Scott Niekum @@ -40,51 +40,68 @@ namespace ar_track_alvar { - using namespace alvar; +using namespace alvar; - MedianFilter::MedianFilter(int n){ - median_n = n; - median_ind = 0; - median_init = false; - median_poses = new Pose[median_n]; - } +MedianFilter::MedianFilter(int n) +{ + median_n = n; + median_ind = 0; + median_init = false; + median_poses = new Pose[median_n]; +} - void MedianFilter::addPose(const Pose &new_pose){ - median_poses[median_ind] = new_pose; - median_ind = (median_ind+1) % median_n; - } +void MedianFilter::addPose(const Pose& new_pose) +{ + median_poses[median_ind] = new_pose; + median_ind = (median_ind + 1) % median_n; +} - void MedianFilter::getMedian(Pose &ret_pose){ - if(!median_init){ - if(median_ind == median_n-1) - median_init = true; - ret_pose = median_poses[median_ind-1]; - } +void MedianFilter::getMedian(Pose& ret_pose) +{ + if (!median_init) + { + if (median_ind == median_n - 1) + median_init = true; + ret_pose = median_poses[median_ind - 1]; + } - else{ - double min_dist = 0; - int min_ind = 0; - for(int i=0; i -namespace alvar { -namespace plugins { - +namespace alvar +{ +namespace plugins +{ CaptureCmu::CaptureCmu(const CaptureDevice captureDevice) - : Capture(captureDevice) - , mCamera(new C1394Camera()) - , mChannels(-1) - , mReturnFrame(NULL) + : Capture(captureDevice) + , mCamera(new C1394Camera()) + , mChannels(-1) + , mReturnFrame(NULL) { } CaptureCmu::~CaptureCmu() { - stop(); - delete mCamera; + stop(); + delete mCamera; } bool CaptureCmu::start() { - if (isCapturing()) { - return isCapturing(); - } - - if (mCamera->CheckLink() != CAM_SUCCESS) { - return false; - } - - int numberCameras = mCamera->GetNumberCameras(); - bool cameraSelected = false; - for (int i = 0; i < numberCameras; i++) { - mCamera->SelectCamera(i); - LARGE_INTEGER uniqueId; - mCamera->GetCameraUniqueID(&uniqueId); - std::stringstream convert; - convert << std::hex << uniqueId.HighPart << uniqueId.LowPart; - if (captureDevice().id().compare(convert.str()) == 0) { - cameraSelected = true; - break; - } - } - - if (!cameraSelected) { - return false; - } - - if (mCamera->InitCamera(true) != CAM_SUCCESS) { - return false; - } - - // TODO: this needs to be parameterized somehow - if (mCamera->HasVideoMode(2, 4)) { // 1600x1200rgb - if (mCamera->SetVideoFormat(2) != CAM_SUCCESS) return false; - if (mCamera->SetVideoMode(4) != CAM_SUCCESS) return false; - mChannels = 3; - } - else if (mCamera->HasVideoMode(2, 1)) { // 1280x960rgb - if (mCamera->SetVideoFormat(2) != CAM_SUCCESS) return false; - if (mCamera->SetVideoMode(1) != CAM_SUCCESS) return false; - mChannels = 3; - } - else if (mCamera->HasVideoMode(1, 4)) { // 1024x768rgb - if (mCamera->SetVideoFormat(1) != CAM_SUCCESS) return false; - if (mCamera->SetVideoMode(4) != CAM_SUCCESS) return false; - mChannels = 3; - } - else if (mCamera->HasVideoMode(1, 1)) { // 800x600rgb - if (mCamera->SetVideoFormat(1) != CAM_SUCCESS) return false; - if (mCamera->SetVideoMode(1) != CAM_SUCCESS) return false; - mChannels = 3; + if (isCapturing()) + { + return isCapturing(); + } + + if (mCamera->CheckLink() != CAM_SUCCESS) + { + return false; + } + + int numberCameras = mCamera->GetNumberCameras(); + bool cameraSelected = false; + for (int i = 0; i < numberCameras; i++) + { + mCamera->SelectCamera(i); + LARGE_INTEGER uniqueId; + mCamera->GetCameraUniqueID(&uniqueId); + std::stringstream convert; + convert << std::hex << uniqueId.HighPart << uniqueId.LowPart; + if (captureDevice().id().compare(convert.str()) == 0) + { + cameraSelected = true; + break; } - else if (mCamera->HasVideoMode(0, 4)) { // 640x480rgb - if (mCamera->SetVideoFormat(0) != CAM_SUCCESS) return false; - if (mCamera->SetVideoMode(4) != CAM_SUCCESS) return false; - mChannels = 3; + } + + if (!cameraSelected) + { + return false; + } + + if (mCamera->InitCamera(true) != CAM_SUCCESS) + { + return false; + } + + // TODO: this needs to be parameterized somehow + if (mCamera->HasVideoMode(2, 4)) + { // 1600x1200rgb + if (mCamera->SetVideoFormat(2) != CAM_SUCCESS) + return false; + if (mCamera->SetVideoMode(4) != CAM_SUCCESS) + return false; + mChannels = 3; + } + else if (mCamera->HasVideoMode(2, 1)) + { // 1280x960rgb + if (mCamera->SetVideoFormat(2) != CAM_SUCCESS) + return false; + if (mCamera->SetVideoMode(1) != CAM_SUCCESS) + return false; + mChannels = 3; + } + else if (mCamera->HasVideoMode(1, 4)) + { // 1024x768rgb + if (mCamera->SetVideoFormat(1) != CAM_SUCCESS) + return false; + if (mCamera->SetVideoMode(4) != CAM_SUCCESS) + return false; + mChannels = 3; + } + else if (mCamera->HasVideoMode(1, 1)) + { // 800x600rgb + if (mCamera->SetVideoFormat(1) != CAM_SUCCESS) + return false; + if (mCamera->SetVideoMode(1) != CAM_SUCCESS) + return false; + mChannels = 3; + } + else if (mCamera->HasVideoMode(0, 4)) + { // 640x480rgb + if (mCamera->SetVideoFormat(0) != CAM_SUCCESS) + return false; + if (mCamera->SetVideoMode(4) != CAM_SUCCESS) + return false; + mChannels = 3; /* // TODO: also support YUV422 } else if (mCamera->HasVideoMode(2, 3)) { // 1600x1200yuv422 @@ -131,125 +153,159 @@ bool CaptureCmu::start() if (mCamera->SetVideoMode(1) != CAM_SUCCESS) return false; mChannels = 2; */ - } - else if (mCamera->HasVideoMode(2, 5)) { // 1600x1200mono - if (mCamera->SetVideoFormat(2) != CAM_SUCCESS) return false; - if (mCamera->SetVideoMode(5) != CAM_SUCCESS) return false; - mChannels = 1; - } - else if (mCamera->HasVideoMode(2, 2)) { // 1280x960mono - if (mCamera->SetVideoFormat(2) != CAM_SUCCESS) return false; - if (mCamera->SetVideoMode(2) != CAM_SUCCESS) return false; - mChannels = 1; - } - else if (mCamera->HasVideoMode(1, 5)) { // 1024x768mono - if (mCamera->SetVideoFormat(1) != CAM_SUCCESS) return false; - if (mCamera->SetVideoMode(5) != CAM_SUCCESS) return false; - mChannels = 1; - } - else if (mCamera->HasVideoMode(1, 2)) { // 800x600mono - if (mCamera->SetVideoFormat(1) != CAM_SUCCESS) return false; - if (mCamera->SetVideoMode(2) != CAM_SUCCESS) return false; - mChannels = 1; - } - else if (mCamera->HasVideoMode(0, 5)) { // 640x480mono - if (mCamera->SetVideoFormat(0) != CAM_SUCCESS) return false; - if (mCamera->SetVideoMode(5) != CAM_SUCCESS) return false; - mChannels = 1; - } - else { - return false; - } - - mCamera->GetVideoFrameDimensions(&mXResolution, &mYResolution); - mReturnFrame = cvCreateImage(cvSize(mXResolution, mYResolution), IPL_DEPTH_8U, mChannels); - mCamera->StartImageAcquisitionEx(6, 1, ACQ_START_VIDEO_STREAM); - mIsCapturing = true; - - return isCapturing(); + } + else if (mCamera->HasVideoMode(2, 5)) + { // 1600x1200mono + if (mCamera->SetVideoFormat(2) != CAM_SUCCESS) + return false; + if (mCamera->SetVideoMode(5) != CAM_SUCCESS) + return false; + mChannels = 1; + } + else if (mCamera->HasVideoMode(2, 2)) + { // 1280x960mono + if (mCamera->SetVideoFormat(2) != CAM_SUCCESS) + return false; + if (mCamera->SetVideoMode(2) != CAM_SUCCESS) + return false; + mChannels = 1; + } + else if (mCamera->HasVideoMode(1, 5)) + { // 1024x768mono + if (mCamera->SetVideoFormat(1) != CAM_SUCCESS) + return false; + if (mCamera->SetVideoMode(5) != CAM_SUCCESS) + return false; + mChannels = 1; + } + else if (mCamera->HasVideoMode(1, 2)) + { // 800x600mono + if (mCamera->SetVideoFormat(1) != CAM_SUCCESS) + return false; + if (mCamera->SetVideoMode(2) != CAM_SUCCESS) + return false; + mChannels = 1; + } + else if (mCamera->HasVideoMode(0, 5)) + { // 640x480mono + if (mCamera->SetVideoFormat(0) != CAM_SUCCESS) + return false; + if (mCamera->SetVideoMode(5) != CAM_SUCCESS) + return false; + mChannels = 1; + } + else + { + return false; + } + + mCamera->GetVideoFrameDimensions(&mXResolution, &mYResolution); + mReturnFrame = cvCreateImage(cvSize(mXResolution, mYResolution), IPL_DEPTH_8U, + mChannels); + mCamera->StartImageAcquisitionEx(6, 1, ACQ_START_VIDEO_STREAM); + mIsCapturing = true; + + return isCapturing(); } void CaptureCmu::stop() { - if (isCapturing()) { - mCamera->StopImageAcquisition(); - cvReleaseImage(&mReturnFrame); - mIsCapturing = false; - } + if (isCapturing()) + { + mCamera->StopImageAcquisition(); + cvReleaseImage(&mReturnFrame); + mIsCapturing = false; + } } -IplImage *CaptureCmu::captureImage() +IplImage* CaptureCmu::captureImage() { - if (!isCapturing()) { - return NULL; - } - - if (mCamera->AcquireImageEx(false, NULL) == CAM_SUCCESS) { - unsigned long length = mReturnFrame->widthStep * mYResolution; - memcpy(mReturnFrame->imageData, mCamera->GetRawData(&length), length); - } - return mReturnFrame; + if (!isCapturing()) + { + return NULL; + } + + if (mCamera->AcquireImageEx(false, NULL) == CAM_SUCCESS) + { + unsigned long length = mReturnFrame->widthStep * mYResolution; + memcpy(mReturnFrame->imageData, mCamera->GetRawData(&length), length); + } + return mReturnFrame; } bool CaptureCmu::showSettingsDialog() { - CameraControlDialog(NULL, mCamera, true); - return true; + CameraControlDialog(NULL, mCamera, true); + return true; } std::string CaptureCmu::SerializeId() { - return "CaptureCmu"; + return "CaptureCmu"; } -bool CaptureCmu::Serialize(Serialization *serialization) +bool CaptureCmu::Serialize(Serialization* serialization) { - if (!mCamera) { - return false; - } - - unsigned short value; - if (serialization->IsInput()) { - if (!serialization->Serialize(value, "Gain")) return false; - mCamera->GetCameraControl(FEATURE_GAIN)->SetAutoMode(false); - mCamera->GetCameraControl(FEATURE_GAIN)->SetValue(value); - - if (!serialization->Serialize(value, "AutoExposure")) return false; - mCamera->GetCameraControl(FEATURE_AUTO_EXPOSURE)->SetAutoMode(false); - mCamera->GetCameraControl(FEATURE_AUTO_EXPOSURE)->SetValue(value); - - if (!serialization->Serialize(value, "Shutter")) return false; - mCamera->GetCameraControl(FEATURE_SHUTTER)->SetAutoMode(false); - mCamera->GetCameraControl(FEATURE_SHUTTER)->SetValue(value); - - if (!serialization->Serialize(value, "Brightness")) return false; - mCamera->GetCameraControl(FEATURE_BRIGHTNESS)->SetAutoMode(false); - mCamera->GetCameraControl(FEATURE_BRIGHTNESS)->SetValue(value); - - if (!serialization->Serialize(value, "Gamma")) return false; - mCamera->GetCameraControl(FEATURE_GAMMA)->SetAutoMode(false); - mCamera->GetCameraControl(FEATURE_GAMMA)->SetValue(value); - } else { - mCamera->GetCameraControl(FEATURE_GAIN)->GetValue(&value); - if (!serialization->Serialize(value, "Gain")) return false; - - mCamera->GetCameraControl(FEATURE_AUTO_EXPOSURE)->GetValue(&value); - if (!serialization->Serialize(value, "AutoExposure")) return false; - - mCamera->GetCameraControl(FEATURE_SHUTTER)->GetValue(&value); - if (!serialization->Serialize(value, "Shutter")) return false; - - mCamera->GetCameraControl(FEATURE_BRIGHTNESS)->GetValue(&value); - if (!serialization->Serialize(value, "Brightness")) return false; - - mCamera->GetCameraControl(FEATURE_GAMMA)->GetValue(&value); - if (!serialization->Serialize(value, "Gamma")) return false; - } - return true; + if (!mCamera) + { + return false; + } + + unsigned short value; + if (serialization->IsInput()) + { + if (!serialization->Serialize(value, "Gain")) + return false; + mCamera->GetCameraControl(FEATURE_GAIN)->SetAutoMode(false); + mCamera->GetCameraControl(FEATURE_GAIN)->SetValue(value); + + if (!serialization->Serialize(value, "AutoExposure")) + return false; + mCamera->GetCameraControl(FEATURE_AUTO_EXPOSURE)->SetAutoMode(false); + mCamera->GetCameraControl(FEATURE_AUTO_EXPOSURE)->SetValue(value); + + if (!serialization->Serialize(value, "Shutter")) + return false; + mCamera->GetCameraControl(FEATURE_SHUTTER)->SetAutoMode(false); + mCamera->GetCameraControl(FEATURE_SHUTTER)->SetValue(value); + + if (!serialization->Serialize(value, "Brightness")) + return false; + mCamera->GetCameraControl(FEATURE_BRIGHTNESS)->SetAutoMode(false); + mCamera->GetCameraControl(FEATURE_BRIGHTNESS)->SetValue(value); + + if (!serialization->Serialize(value, "Gamma")) + return false; + mCamera->GetCameraControl(FEATURE_GAMMA)->SetAutoMode(false); + mCamera->GetCameraControl(FEATURE_GAMMA)->SetValue(value); + } + else + { + mCamera->GetCameraControl(FEATURE_GAIN)->GetValue(&value); + if (!serialization->Serialize(value, "Gain")) + return false; + + mCamera->GetCameraControl(FEATURE_AUTO_EXPOSURE)->GetValue(&value); + if (!serialization->Serialize(value, "AutoExposure")) + return false; + + mCamera->GetCameraControl(FEATURE_SHUTTER)->GetValue(&value); + if (!serialization->Serialize(value, "Shutter")) + return false; + + mCamera->GetCameraControl(FEATURE_BRIGHTNESS)->GetValue(&value); + if (!serialization->Serialize(value, "Brightness")) + return false; + + mCamera->GetCameraControl(FEATURE_GAMMA)->GetValue(&value); + if (!serialization->Serialize(value, "Gamma")) + return false; + } + return true; } -CapturePluginCmu::CapturePluginCmu(const std::string &captureType) - : CapturePlugin(captureType) +CapturePluginCmu::CapturePluginCmu(const std::string& captureType) + : CapturePlugin(captureType) { } @@ -259,42 +315,45 @@ CapturePluginCmu::~CapturePluginCmu() CapturePlugin::CaptureDeviceVector CapturePluginCmu::enumerateDevices() { - CaptureDeviceVector devices; - - C1394Camera camera; - if (camera.CheckLink() != CAM_SUCCESS) { - return devices; - } - - int numberCameras = camera.GetNumberCameras(); - for (int i = 0; i < numberCameras; i++) { - camera.SelectCamera(i); - LARGE_INTEGER uniqueId; - camera.GetCameraUniqueID(&uniqueId); - std::stringstream convert; - convert << std::hex << uniqueId.HighPart << uniqueId.LowPart; - std::stringstream description; - char buffer[500]; - camera.GetCameraVendor(buffer, 500); - description << buffer << " "; - camera.GetCameraName(buffer, 500); - description << buffer; - CaptureDevice captureDevice(mCaptureType, convert.str(), description.str()); - devices.push_back(captureDevice); - } + CaptureDeviceVector devices; + C1394Camera camera; + if (camera.CheckLink() != CAM_SUCCESS) + { return devices; + } + + int numberCameras = camera.GetNumberCameras(); + for (int i = 0; i < numberCameras; i++) + { + camera.SelectCamera(i); + LARGE_INTEGER uniqueId; + camera.GetCameraUniqueID(&uniqueId); + std::stringstream convert; + convert << std::hex << uniqueId.HighPart << uniqueId.LowPart; + std::stringstream description; + char buffer[500]; + camera.GetCameraVendor(buffer, 500); + description << buffer << " "; + camera.GetCameraName(buffer, 500); + description << buffer; + CaptureDevice captureDevice(mCaptureType, convert.str(), description.str()); + devices.push_back(captureDevice); + } + + return devices; } -Capture *CapturePluginCmu::createCapture(const CaptureDevice captureDevice) +Capture* CapturePluginCmu::createCapture(const CaptureDevice captureDevice) { - return new CaptureCmu(captureDevice); + return new CaptureCmu(captureDevice); } -void registerPlugin(const std::string &captureType, alvar::CapturePlugin *&capturePlugin) +void registerPlugin(const std::string& captureType, + alvar::CapturePlugin*& capturePlugin) { - capturePlugin = new CapturePluginCmu(captureType); + capturePlugin = new CapturePluginCmu(captureType); } -} // namespace plugins -} // namespace alvar +} // namespace plugins +} // namespace alvar diff --git a/ar_track_alvar/src/platform/capture_plugin_cmu/CapturePluginCmu.h b/ar_track_alvar/src/platform/capture_plugin_cmu/CapturePluginCmu.h index 841cfee..b3f68dc 100644 --- a/ar_track_alvar/src/platform/capture_plugin_cmu/CapturePluginCmu.h +++ b/ar_track_alvar/src/platform/capture_plugin_cmu/CapturePluginCmu.h @@ -31,13 +31,13 @@ */ #ifdef WIN32 - #ifdef ALVAR_Capture_Plugin_CMU_BUILD - #define ALVAR_CAPTURE_PLUGIN_CMU_EXPORT __declspec(dllexport) - #else - #define ALVAR_CAPTURE_PLUGIN_CMU_EXPORT __declspec(dllimport) - #endif +#ifdef ALVAR_Capture_Plugin_CMU_BUILD +#define ALVAR_CAPTURE_PLUGIN_CMU_EXPORT __declspec(dllexport) #else - #define ALVAR_CAPTURE_PLUGIN_CMU_EXPORT +#define ALVAR_CAPTURE_PLUGIN_CMU_EXPORT __declspec(dllimport) +#endif +#else +#define ALVAR_CAPTURE_PLUGIN_CMU_EXPORT #endif #include "Capture.h" @@ -45,66 +45,67 @@ #include "1394camera.h" -namespace alvar { - +namespace alvar +{ /** * \brief Dynamically loaded plugins namespace. */ -namespace plugins { - +namespace plugins +{ /** * \brief Implementation of Capture interface for Cmu plugin. */ -class ALVAR_CAPTURE_PLUGIN_CMU_EXPORT CaptureCmu - : public alvar::Capture +class ALVAR_CAPTURE_PLUGIN_CMU_EXPORT CaptureCmu : public alvar::Capture { public: - /** - * \brief Constructor. - * - * \param captureDevice Information of which camera to create. - */ - CaptureCmu(const CaptureDevice captureDevice); - /** - * \brief Destructor. - */ - ~CaptureCmu(); - bool start(); - void stop(); - IplImage *captureImage(); - bool showSettingsDialog(); - std::string SerializeId(); - bool Serialize(Serialization *serialization); + /** + * \brief Constructor. + * + * \param captureDevice Information of which camera to create. + */ + CaptureCmu(const CaptureDevice captureDevice); + /** + * \brief Destructor. + */ + ~CaptureCmu(); + bool start(); + void stop(); + IplImage* captureImage(); + bool showSettingsDialog(); + std::string SerializeId(); + bool Serialize(Serialization* serialization); + private: - C1394Camera *mCamera; - int mChannels; - IplImage *mReturnFrame; + C1394Camera* mCamera; + int mChannels; + IplImage* mReturnFrame; }; /** * \brief Implementation of CapturePlugin interface for Cmu plugin. */ class ALVAR_CAPTURE_PLUGIN_CMU_EXPORT CapturePluginCmu - : public alvar::CapturePlugin + : public alvar::CapturePlugin { public: - /** - * \brief Constructor. - * - * \param captureType A unique identifier for the capture plugin. - */ - CapturePluginCmu(const std::string &captureType); - /** - * \brief Destructor. - */ - ~CapturePluginCmu(); - CaptureDeviceVector enumerateDevices(); - Capture *createCapture(const CaptureDevice captureDevice); + /** + * \brief Constructor. + * + * \param captureType A unique identifier for the capture plugin. + */ + CapturePluginCmu(const std::string& captureType); + /** + * \brief Destructor. + */ + ~CapturePluginCmu(); + CaptureDeviceVector enumerateDevices(); + Capture* createCapture(const CaptureDevice captureDevice); }; -extern "C" ALVAR_CAPTURE_PLUGIN_CMU_EXPORT void registerPlugin(const std::string &captureType, alvar::CapturePlugin *&capturePlugin); +extern "C" ALVAR_CAPTURE_PLUGIN_CMU_EXPORT void registerPlugin( + const std::string& captureType, alvar::CapturePlugin*& capturePlugin); -} // namespace plugins -} // namespace alvar +} // namespace plugins +} // namespace alvar #endif diff --git a/ar_track_alvar/src/platform/capture_plugin_dscapture/CapturePluginDSCapture.cpp b/ar_track_alvar/src/platform/capture_plugin_dscapture/CapturePluginDSCapture.cpp index 3d4daac..d8dd28c 100644 --- a/ar_track_alvar/src/platform/capture_plugin_dscapture/CapturePluginDSCapture.cpp +++ b/ar_track_alvar/src/platform/capture_plugin_dscapture/CapturePluginDSCapture.cpp @@ -27,19 +27,20 @@ using namespace std; -namespace alvar { -namespace plugins { - +namespace alvar +{ +namespace plugins +{ CaptureDSCapture::CaptureDSCapture(const CaptureDevice captureDevice) - : Capture(captureDevice) - , sampler(this) - , m_pDSCapture(NULL) - , m_nBpp(0) - , m_nVideo_x_res(-1) - , m_nVideo_y_res(-1) - , imgBuffer(NULL) - , imgBufferForCallback(NULL) - , mReturnFrame(NULL) + : Capture(captureDevice) + , sampler(this) + , m_pDSCapture(NULL) + , m_nBpp(0) + , m_nVideo_x_res(-1) + , m_nVideo_y_res(-1) + , imgBuffer(NULL) + , imgBufferForCallback(NULL) + , mReturnFrame(NULL) { InitializeCriticalSection(&crit); next_event = CreateEvent(NULL, FALSE, FALSE, NULL); @@ -49,9 +50,10 @@ CaptureDSCapture::CaptureDSCapture(const CaptureDevice captureDevice) CaptureDSCapture::~CaptureDSCapture() { stop(); - if (m_pDSCapture) { + if (m_pDSCapture) + { m_pDSCapture->Stop(); - delete m_pDSCapture; + delete m_pDSCapture; } delete imgBuffer; delete imgBufferForCallback; @@ -60,121 +62,135 @@ CaptureDSCapture::~CaptureDSCapture() bool CaptureDSCapture::start() { - if (m_pDSCapture) { - HRESULT hr = m_pDSCapture->Init(true, false, mCaptureDevice.id().c_str(), NULL); - - if(SUCCEEDED(hr)){ - // Set video grabber media type - AM_MEDIA_TYPE mt; - ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE)); - mt.majortype = MEDIATYPE_Video; - mt.subtype = MEDIASUBTYPE_RGB24; - hr = m_pDSCapture->SetVideoGrabberFormat(&mt); - } - - if(SUCCEEDED(hr)) - hr = m_pDSCapture->ShowVideoCaptureFormatDialog(); - - // We must connect filters before we can get connected media type from grabber(s) - if(SUCCEEDED(hr)) - hr = m_pDSCapture->Connect(); - - if(SUCCEEDED(hr)){ - AM_MEDIA_TYPE mt; - ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE)); - HRESULT hr = m_pDSCapture->GetVideoGrabberFormat(&mt); - if(SUCCEEDED(hr)){ - // Examine the format block. - if ((mt.formattype == FORMAT_VideoInfo) && - (mt.cbFormat >= sizeof(VIDEOINFOHEADER)) && - (mt.pbFormat != NULL) ) - { - if(mt.subtype == MEDIASUBTYPE_RGB24) - m_nBpp = 24; - else if(mt.subtype == MEDIASUBTYPE_RGB32) - m_nBpp = 32; - - VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)mt.pbFormat; - //cout << "Video capture: "<bmiHeader.biWidth<<"x"<bmiHeader.biHeight<<" "<bmiHeader.biBitCount<<" bpp "<bmiHeader.biWidth; - m_nVideo_y_res = pVih->bmiHeader.biHeight; - } - } - } - - if(FAILED(hr)){ - return false; - } - - m_pDSCapture->AddVideoCallback(&sampler); - - buffer_size = (int)(m_nVideo_x_res * m_nVideo_y_res * (m_nBpp/8.0)); - imgBuffer = new BYTE[buffer_size]; - imgBufferForCallback = new BYTE[buffer_size]; - mReturnFrame = cvCreateImageHeader(cvSize(m_nVideo_x_res, m_nVideo_y_res), IPL_DEPTH_8U,m_nBpp / 8); - mReturnFrame->imageData = (char*)imgBuffer; + if (m_pDSCapture) + { + HRESULT hr = + m_pDSCapture->Init(true, false, mCaptureDevice.id().c_str(), NULL); + + if (SUCCEEDED(hr)) + { + // Set video grabber media type + AM_MEDIA_TYPE mt; + ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE)); + mt.majortype = MEDIATYPE_Video; + mt.subtype = MEDIASUBTYPE_RGB24; + hr = m_pDSCapture->SetVideoGrabberFormat(&mt); } - m_pDSCapture->Start(); - mIsCapturing = true; - return true; + + if (SUCCEEDED(hr)) + hr = m_pDSCapture->ShowVideoCaptureFormatDialog(); + + // We must connect filters before we can get connected media type from + // grabber(s) + if (SUCCEEDED(hr)) + hr = m_pDSCapture->Connect(); + + if (SUCCEEDED(hr)) + { + AM_MEDIA_TYPE mt; + ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE)); + HRESULT hr = m_pDSCapture->GetVideoGrabberFormat(&mt); + if (SUCCEEDED(hr)) + { + // Examine the format block. + if ((mt.formattype == FORMAT_VideoInfo) && + (mt.cbFormat >= sizeof(VIDEOINFOHEADER)) && (mt.pbFormat != NULL)) + { + if (mt.subtype == MEDIASUBTYPE_RGB24) + m_nBpp = 24; + else if (mt.subtype == MEDIASUBTYPE_RGB32) + m_nBpp = 32; + + VIDEOINFOHEADER* pVih = (VIDEOINFOHEADER*)mt.pbFormat; + // cout << "Video capture: + // "<bmiHeader.biWidth<<"x"<bmiHeader.biHeight<<" + // "<bmiHeader.biBitCount<<" bpp "<bmiHeader.biWidth; + m_nVideo_y_res = pVih->bmiHeader.biHeight; + } + } + } + + if (FAILED(hr)) + { + return false; + } + + m_pDSCapture->AddVideoCallback(&sampler); + + buffer_size = (int)(m_nVideo_x_res * m_nVideo_y_res * (m_nBpp / 8.0)); + imgBuffer = new BYTE[buffer_size]; + imgBufferForCallback = new BYTE[buffer_size]; + mReturnFrame = cvCreateImageHeader(cvSize(m_nVideo_x_res, m_nVideo_y_res), + IPL_DEPTH_8U, m_nBpp / 8); + mReturnFrame->imageData = (char*)imgBuffer; + } + m_pDSCapture->Start(); + mIsCapturing = true; + return true; } void CaptureDSCapture::stop() { - if (isCapturing()) { - HRESULT hr = m_pDSCapture->Stop(); - mIsCapturing = false; - } + if (isCapturing()) + { + HRESULT hr = m_pDSCapture->Stop(); + mIsCapturing = false; + } } -IplImage *CaptureDSCapture::captureImage() +IplImage* CaptureDSCapture::captureImage() { - if (!isCapturing()) { - return NULL; - } + if (!isCapturing()) + { + return NULL; + } - IplImage *ret = NULL; - if (WaitForSingleObject(next_event, 1000) == WAIT_OBJECT_0) { - EnterCriticalSection(&crit); - ret = mReturnFrame; - ret->origin = 1; - memcpy(imgBuffer,imgBufferForCallback,buffer_size); - LeaveCriticalSection(&crit); - } + IplImage* ret = NULL; + if (WaitForSingleObject(next_event, 1000) == WAIT_OBJECT_0) + { + EnterCriticalSection(&crit); + ret = mReturnFrame; + ret->origin = 1; + memcpy(imgBuffer, imgBufferForCallback, buffer_size); + LeaveCriticalSection(&crit); + } return ret; } bool CaptureDSCapture::showSettingsDialog() { - HRESULT hr = m_pDSCapture->ShowVideoCaptureFormatDialog(); - return SUCCEEDED(hr); + HRESULT hr = m_pDSCapture->ShowVideoCaptureFormatDialog(); + return SUCCEEDED(hr); } string CaptureDSCapture::SerializeId() { - return "CaptureDSCapture"; + return "CaptureDSCapture"; } -bool CaptureDSCapture::Serialize(Serialization *serialization) +bool CaptureDSCapture::Serialize(Serialization* serialization) { - return false; + return false; } -void CaptureDSCapture::OnVideoSample(BYTE* pBuffer, DWORD dwDataLen, REFERENCE_TIME t_start) +void CaptureDSCapture::OnVideoSample(BYTE* pBuffer, DWORD dwDataLen, + REFERENCE_TIME t_start) { - EnterCriticalSection(&crit); - if(pBuffer){ - if (dwDataLen <= buffer_size) { - memcpy(imgBufferForCallback,pBuffer,dwDataLen); - } - SetEvent(next_event); - } - LeaveCriticalSection(&crit); + EnterCriticalSection(&crit); + if (pBuffer) + { + if (dwDataLen <= buffer_size) + { + memcpy(imgBufferForCallback, pBuffer, dwDataLen); + } + SetEvent(next_event); + } + LeaveCriticalSection(&crit); } - -CapturePluginDSCapture::CapturePluginDSCapture(const string &captureType) - : CapturePlugin(captureType) +CapturePluginDSCapture::CapturePluginDSCapture(const string& captureType) + : CapturePlugin(captureType) { } @@ -184,26 +200,29 @@ CapturePluginDSCapture::~CapturePluginDSCapture() CapturePlugin::CaptureDeviceVector CapturePluginDSCapture::enumerateDevices() { - CaptureDeviceVector devices; - CDSCapture capture; - device_map* vids = capture.GetVideoCaptureDevices(); - for (device_map::iterator i = vids->begin(); i != vids->end(); ++i) { - CaptureDevice dev("dscapture", i->first, i->second); - devices.push_back(dev); - } - - return devices; + CaptureDeviceVector devices; + CDSCapture capture; + device_map* vids = capture.GetVideoCaptureDevices(); + for (device_map::iterator i = vids->begin(); i != vids->end(); ++i) + { + CaptureDevice dev("dscapture", i->first, i->second); + devices.push_back(dev); + } + + return devices; } -Capture *CapturePluginDSCapture::createCapture(const CaptureDevice captureDevice) +Capture* +CapturePluginDSCapture::createCapture(const CaptureDevice captureDevice) { - return new CaptureDSCapture(captureDevice); + return new CaptureDSCapture(captureDevice); } -void registerPlugin(const string &captureType, alvar::CapturePlugin *&capturePlugin) +void registerPlugin(const string& captureType, + alvar::CapturePlugin*& capturePlugin) { - capturePlugin = new CapturePluginDSCapture(captureType); + capturePlugin = new CapturePluginDSCapture(captureType); } -} // namespace plugins -} // namespace alvar +} // namespace plugins +} // namespace alvar diff --git a/ar_track_alvar/src/platform/capture_plugin_dscapture/CapturePluginDSCapture.h b/ar_track_alvar/src/platform/capture_plugin_dscapture/CapturePluginDSCapture.h index e35697d..abd68c5 100644 --- a/ar_track_alvar/src/platform/capture_plugin_dscapture/CapturePluginDSCapture.h +++ b/ar_track_alvar/src/platform/capture_plugin_dscapture/CapturePluginDSCapture.h @@ -34,13 +34,13 @@ */ #ifdef WIN32 - #ifdef ALVAR_Capture_Plugin_DSCapture_BUILD - #define ALVAR_CAPTURE_PLUGIN_DSCAPTURE_EXPORT __declspec(dllexport) - #else - #define ALVAR_CAPTURE_PLUGIN_DSCAPTURE_EXPORT __declspec(dllimport) - #endif +#ifdef ALVAR_Capture_Plugin_DSCapture_BUILD +#define ALVAR_CAPTURE_PLUGIN_DSCAPTURE_EXPORT __declspec(dllexport) #else - #define ALVAR_CAPTURE_PLUGIN_DSCAPTURE_EXPORT +#define ALVAR_CAPTURE_PLUGIN_DSCAPTURE_EXPORT __declspec(dllimport) +#endif +#else +#define ALVAR_CAPTURE_PLUGIN_DSCAPTURE_EXPORT #endif #include "Capture.h" @@ -48,13 +48,13 @@ #include "dscapture.h" -namespace alvar { - +namespace alvar +{ /** * \brief Dynamically loaded plugins namespace. */ -namespace plugins { - +namespace plugins +{ /** * \brief Implementation of Capture interface for DSCapture plugin. * @@ -62,52 +62,57 @@ namespace plugins { * the build by default. */ class ALVAR_CAPTURE_PLUGIN_DSCAPTURE_EXPORT CaptureDSCapture - : public alvar::Capture + : public alvar::Capture { - class VideoSampler : public IVideoCallback { - public: - CaptureDSCapture *parent; - VideoSampler(CaptureDSCapture *_parent) : parent(_parent) {} - void OnVideoSample(BYTE* pBuffer, DWORD dwDataLen, REFERENCE_TIME t_start) { - parent->OnVideoSample(pBuffer, dwDataLen, t_start); - } - bool operator=(const VideoSampler &vs) { return parent == vs.parent; } - } sampler; - friend class VideoSampler; - - void OnVideoSample(BYTE* pBuffer, DWORD dwDataLen, REFERENCE_TIME t_start); - + class VideoSampler : public IVideoCallback + { + public: + CaptureDSCapture* parent; + VideoSampler(CaptureDSCapture* _parent) : parent(_parent) + { + } + void OnVideoSample(BYTE* pBuffer, DWORD dwDataLen, REFERENCE_TIME t_start) + { + parent->OnVideoSample(pBuffer, dwDataLen, t_start); + } + bool operator=(const VideoSampler& vs) + { + return parent == vs.parent; + } + } sampler; + friend class VideoSampler; + + void OnVideoSample(BYTE* pBuffer, DWORD dwDataLen, REFERENCE_TIME t_start); + public: - - /** - * \brief Constructor. - * - * \param captureDevice Information of which camera to create. - */ - CaptureDSCapture(const CaptureDevice captureDevice); - /** - * \brief Destructor. - */ - ~CaptureDSCapture(); - bool start(); - void stop(); - IplImage *captureImage(); - bool showSettingsDialog(); - std::string SerializeId(); - bool Serialize(Serialization *serialization); - + /** + * \brief Constructor. + * + * \param captureDevice Information of which camera to create. + */ + CaptureDSCapture(const CaptureDevice captureDevice); + /** + * \brief Destructor. + */ + ~CaptureDSCapture(); + bool start(); + void stop(); + IplImage* captureImage(); + bool showSettingsDialog(); + std::string SerializeId(); + bool Serialize(Serialization* serialization); private: - CDSCapture *m_pDSCapture; - int m_nBpp; - int m_nVideo_x_res; - int m_nVideo_y_res; - BYTE *imgBuffer; - BYTE *imgBufferForCallback; - IplImage *mReturnFrame; - CRITICAL_SECTION crit; - unsigned int buffer_size; - HANDLE next_event; + CDSCapture* m_pDSCapture; + int m_nBpp; + int m_nVideo_x_res; + int m_nVideo_y_res; + BYTE* imgBuffer; + BYTE* imgBufferForCallback; + IplImage* mReturnFrame; + CRITICAL_SECTION crit; + unsigned int buffer_size; + HANDLE next_event; }; /** @@ -117,26 +122,27 @@ class ALVAR_CAPTURE_PLUGIN_DSCAPTURE_EXPORT CaptureDSCapture * the build by default. */ class ALVAR_CAPTURE_PLUGIN_DSCAPTURE_EXPORT CapturePluginDSCapture - : public alvar::CapturePlugin + : public alvar::CapturePlugin { public: - /** - * \brief Constructor. - * - * \param captureType A unique identifier for the capture plugin. - */ - CapturePluginDSCapture(const std::string &captureType); - /** - * \brief Destructor. - */ - ~CapturePluginDSCapture(); - CaptureDeviceVector enumerateDevices(); - Capture *createCapture(const CaptureDevice captureDevice); + /** + * \brief Constructor. + * + * \param captureType A unique identifier for the capture plugin. + */ + CapturePluginDSCapture(const std::string& captureType); + /** + * \brief Destructor. + */ + ~CapturePluginDSCapture(); + CaptureDeviceVector enumerateDevices(); + Capture* createCapture(const CaptureDevice captureDevice); }; -extern "C" ALVAR_CAPTURE_PLUGIN_DSCAPTURE_EXPORT void registerPlugin(const std::string &captureType, alvar::CapturePlugin *&capturePlugin); +extern "C" ALVAR_CAPTURE_PLUGIN_DSCAPTURE_EXPORT void registerPlugin( + const std::string& captureType, alvar::CapturePlugin*& capturePlugin); -} // namespace plugins -} // namespace alvar +} // namespace plugins +} // namespace alvar #endif diff --git a/ar_track_alvar/src/platform/capture_plugin_file/CapturePluginFile.cpp b/ar_track_alvar/src/platform/capture_plugin_file/CapturePluginFile.cpp index 94f55c1..0728f47 100644 --- a/ar_track_alvar/src/platform/capture_plugin_file/CapturePluginFile.cpp +++ b/ar_track_alvar/src/platform/capture_plugin_file/CapturePluginFile.cpp @@ -23,87 +23,92 @@ #include "CapturePluginFile.h" -namespace alvar { -namespace plugins { - +namespace alvar +{ +namespace plugins +{ CaptureFile::CaptureFile(const CaptureDevice captureDevice) - : Capture(captureDevice) - , mVideoCapture() - , mMatrix() - , mImage() + : Capture(captureDevice), mVideoCapture(), mMatrix(), mImage() { } CaptureFile::~CaptureFile() { - stop(); + stop(); } bool CaptureFile::start() { - if (isCapturing()) { - return isCapturing(); - } + if (isCapturing()) + { + return isCapturing(); + } - mVideoCapture.open(captureDevice().id().c_str()); - if (mVideoCapture.isOpened()) { - mXResolution = (int)mVideoCapture.get(CV_CAP_PROP_FRAME_WIDTH); - mYResolution = (int)mVideoCapture.get(CV_CAP_PROP_FRAME_HEIGHT); - mIsCapturing = true; - } + mVideoCapture.open(captureDevice().id().c_str()); + if (mVideoCapture.isOpened()) + { + mXResolution = (int)mVideoCapture.get(CV_CAP_PROP_FRAME_WIDTH); + mYResolution = (int)mVideoCapture.get(CV_CAP_PROP_FRAME_HEIGHT); + mIsCapturing = true; + } - return isCapturing(); + return isCapturing(); } void CaptureFile::stop() { - if (isCapturing()) { - mVideoCapture.release(); - mIsCapturing = false; - } + if (isCapturing()) + { + mVideoCapture.release(); + mIsCapturing = false; + } } -IplImage *CaptureFile::captureImage() +IplImage* CaptureFile::captureImage() { - if (!isCapturing()) { - return NULL; + if (!isCapturing()) + { + return NULL; + } + + if (!mVideoCapture.grab()) + { + // try to restart the capturing when end of file is reached + mVideoCapture.release(); + mVideoCapture.open(captureDevice().id().c_str()); + if (!mVideoCapture.isOpened()) + { + mIsCapturing = false; + return NULL; } - - if (!mVideoCapture.grab()) { - // try to restart the capturing when end of file is reached - mVideoCapture.release(); - mVideoCapture.open(captureDevice().id().c_str()); - if (!mVideoCapture.isOpened()) { - mIsCapturing = false; - return NULL; - } - if (!mVideoCapture.grab()) { - return NULL; - } + if (!mVideoCapture.grab()) + { + return NULL; } - mVideoCapture.retrieve(mMatrix); - mImage = mMatrix; - return &mImage; + } + mVideoCapture.retrieve(mMatrix); + mImage = mMatrix; + return &mImage; } bool CaptureFile::showSettingsDialog() { - // TODO: implement this method - return false; + // TODO: implement this method + return false; } std::string CaptureFile::SerializeId() { - return "CaptureFile"; + return "CaptureFile"; } -bool CaptureFile::Serialize(Serialization *serialization) +bool CaptureFile::Serialize(Serialization* serialization) { - return false; + return false; } -CapturePluginFile::CapturePluginFile(const std::string &captureType) - : CapturePlugin(captureType) +CapturePluginFile::CapturePluginFile(const std::string& captureType) + : CapturePlugin(captureType) { } @@ -113,19 +118,20 @@ CapturePluginFile::~CapturePluginFile() CapturePlugin::CaptureDeviceVector CapturePluginFile::enumerateDevices() { - CaptureDeviceVector devices; - return devices; + CaptureDeviceVector devices; + return devices; } -Capture *CapturePluginFile::createCapture(const CaptureDevice captureDevice) +Capture* CapturePluginFile::createCapture(const CaptureDevice captureDevice) { - return new CaptureFile(captureDevice); + return new CaptureFile(captureDevice); } -void registerPlugin(const std::string &captureType, alvar::CapturePlugin *&capturePlugin) +void registerPlugin(const std::string& captureType, + alvar::CapturePlugin*& capturePlugin) { - capturePlugin = new CapturePluginFile(captureType); + capturePlugin = new CapturePluginFile(captureType); } -} // namespace plugins -} // namespace alvar +} // namespace plugins +} // namespace alvar diff --git a/ar_track_alvar/src/platform/capture_plugin_file/CapturePluginFile.h b/ar_track_alvar/src/platform/capture_plugin_file/CapturePluginFile.h index 237afff..8314312 100644 --- a/ar_track_alvar/src/platform/capture_plugin_file/CapturePluginFile.h +++ b/ar_track_alvar/src/platform/capture_plugin_file/CapturePluginFile.h @@ -31,13 +31,13 @@ */ #ifdef WIN32 - #ifdef ALVAR_Capture_Plugin_File_BUILD - #define ALVAR_CAPTURE_PLUGIN_FILE_EXPORT __declspec(dllexport) - #else - #define ALVAR_CAPTURE_PLUGIN_FILE_EXPORT __declspec(dllimport) - #endif +#ifdef ALVAR_Capture_Plugin_File_BUILD +#define ALVAR_CAPTURE_PLUGIN_FILE_EXPORT __declspec(dllexport) #else - #define ALVAR_CAPTURE_PLUGIN_FILE_EXPORT +#define ALVAR_CAPTURE_PLUGIN_FILE_EXPORT __declspec(dllimport) +#endif +#else +#define ALVAR_CAPTURE_PLUGIN_FILE_EXPORT #endif #include "Capture.h" @@ -45,66 +45,67 @@ #include "highgui.h" -namespace alvar { - +namespace alvar +{ /** * \brief Dynamically loaded plugins namespace. */ -namespace plugins { - +namespace plugins +{ /** * \brief Implementation of Capture interface for File plugin. */ -class ALVAR_CAPTURE_PLUGIN_FILE_EXPORT CaptureFile - : public alvar::Capture +class ALVAR_CAPTURE_PLUGIN_FILE_EXPORT CaptureFile : public alvar::Capture { public: - /** - * \brief Constructor. - * - * \param captureDevice Information of which camera to create. - */ - CaptureFile(const CaptureDevice captureDevice); - /** - * \brief Destructor. - */ - ~CaptureFile(); - bool start(); - void stop(); - IplImage *captureImage(); - bool showSettingsDialog(); - std::string SerializeId(); - bool Serialize(Serialization *serialization); + /** + * \brief Constructor. + * + * \param captureDevice Information of which camera to create. + */ + CaptureFile(const CaptureDevice captureDevice); + /** + * \brief Destructor. + */ + ~CaptureFile(); + bool start(); + void stop(); + IplImage* captureImage(); + bool showSettingsDialog(); + std::string SerializeId(); + bool Serialize(Serialization* serialization); + private: - cv::VideoCapture mVideoCapture; - cv::Mat mMatrix; - IplImage mImage; + cv::VideoCapture mVideoCapture; + cv::Mat mMatrix; + IplImage mImage; }; /** * \brief Implementation of CapturePlugin interface for File plugin. */ class ALVAR_CAPTURE_PLUGIN_FILE_EXPORT CapturePluginFile - : public alvar::CapturePlugin + : public alvar::CapturePlugin { public: - /** - * \brief Constructor. - * - * \param captureType A unique identifier for the capture plugin. - */ - CapturePluginFile(const std::string &captureType); - /** - * \brief Destructor. - */ - ~CapturePluginFile(); - CaptureDeviceVector enumerateDevices(); - Capture *createCapture(const CaptureDevice captureDevice); + /** + * \brief Constructor. + * + * \param captureType A unique identifier for the capture plugin. + */ + CapturePluginFile(const std::string& captureType); + /** + * \brief Destructor. + */ + ~CapturePluginFile(); + CaptureDeviceVector enumerateDevices(); + Capture* createCapture(const CaptureDevice captureDevice); }; -extern "C" ALVAR_CAPTURE_PLUGIN_FILE_EXPORT void registerPlugin(const std::string &captureType, alvar::CapturePlugin *&capturePlugin); +extern "C" ALVAR_CAPTURE_PLUGIN_FILE_EXPORT void registerPlugin( + const std::string& captureType, alvar::CapturePlugin*& capturePlugin); -} // namespace plugins -} // namespace alvar +} // namespace plugins +} // namespace alvar #endif diff --git a/ar_track_alvar/src/platform/capture_plugin_highgui/CapturePluginHighgui.cpp b/ar_track_alvar/src/platform/capture_plugin_highgui/CapturePluginHighgui.cpp index e624569..1522b24 100644 --- a/ar_track_alvar/src/platform/capture_plugin_highgui/CapturePluginHighgui.cpp +++ b/ar_track_alvar/src/platform/capture_plugin_highgui/CapturePluginHighgui.cpp @@ -25,89 +25,94 @@ #include -namespace alvar { -namespace plugins { - +namespace alvar +{ +namespace plugins +{ CaptureHighgui::CaptureHighgui(const CaptureDevice captureDevice) - : Capture(captureDevice) - , mVideoCapture() - , mMatrix() - , mImage() + : Capture(captureDevice), mVideoCapture(), mMatrix(), mImage() { } CaptureHighgui::~CaptureHighgui() { - stop(); + stop(); } -void CaptureHighgui::setResolution(const unsigned long xResolution, const unsigned long yResolution) +void CaptureHighgui::setResolution(const unsigned long xResolution, + const unsigned long yResolution) { - if (mVideoCapture.isOpened()) { - mVideoCapture.set(CV_CAP_PROP_FRAME_WIDTH, xResolution); - mVideoCapture.set(CV_CAP_PROP_FRAME_HEIGHT, yResolution); - mXResolution = (int)mVideoCapture.get(CV_CAP_PROP_FRAME_WIDTH); - mYResolution = (int)mVideoCapture.get(CV_CAP_PROP_FRAME_HEIGHT); - } + if (mVideoCapture.isOpened()) + { + mVideoCapture.set(CV_CAP_PROP_FRAME_WIDTH, xResolution); + mVideoCapture.set(CV_CAP_PROP_FRAME_HEIGHT, yResolution); + mXResolution = (int)mVideoCapture.get(CV_CAP_PROP_FRAME_WIDTH); + mYResolution = (int)mVideoCapture.get(CV_CAP_PROP_FRAME_HEIGHT); + } } bool CaptureHighgui::start() { - if (isCapturing()) { - return isCapturing(); - } + if (isCapturing()) + { + return isCapturing(); + } - std::istringstream convert(captureDevice().id()); - int id; - convert >> id; + std::istringstream convert(captureDevice().id()); + int id; + convert >> id; - mVideoCapture.open(id); - if (mVideoCapture.isOpened()) { - mIsCapturing = true; - } + mVideoCapture.open(id); + if (mVideoCapture.isOpened()) + { + mIsCapturing = true; + } - return isCapturing(); + return isCapturing(); } void CaptureHighgui::stop() { - if (isCapturing()) { - mVideoCapture.release(); - mIsCapturing = false; - } + if (isCapturing()) + { + mVideoCapture.release(); + mIsCapturing = false; + } } -IplImage *CaptureHighgui::captureImage() +IplImage* CaptureHighgui::captureImage() { - if (!isCapturing()) { - return NULL; - } - if (!mVideoCapture.grab()) { - return NULL; - } - mVideoCapture.retrieve(mMatrix); - mImage = mMatrix; - return &mImage; + if (!isCapturing()) + { + return NULL; + } + if (!mVideoCapture.grab()) + { + return NULL; + } + mVideoCapture.retrieve(mMatrix); + mImage = mMatrix; + return &mImage; } bool CaptureHighgui::showSettingsDialog() { - // TODO: implement this method - return false; + // TODO: implement this method + return false; } std::string CaptureHighgui::SerializeId() { - return "CaptureHighgui"; + return "CaptureHighgui"; } -bool CaptureHighgui::Serialize(Serialization *serialization) +bool CaptureHighgui::Serialize(Serialization* serialization) { - return false; + return false; } -CapturePluginHighgui::CapturePluginHighgui(const std::string &captureType) - : CapturePlugin(captureType) +CapturePluginHighgui::CapturePluginHighgui(const std::string& captureType) + : CapturePlugin(captureType) { } @@ -117,48 +122,54 @@ CapturePluginHighgui::~CapturePluginHighgui() CapturePlugin::CaptureDeviceVector CapturePluginHighgui::enumerateDevices() { - CaptureDeviceVector devices; - - bool loop = true; - int id = 0; - cv::VideoCapture videoCapture; - - while (loop) { - std::stringstream convert; - convert << id; - CaptureDevice captureDevice(mCaptureType, convert.str()); - - videoCapture.open(id); - if (videoCapture.isOpened()) { - int width = (int)videoCapture.get(CV_CAP_PROP_FRAME_WIDTH); - int height = (int)videoCapture.get(CV_CAP_PROP_FRAME_HEIGHT); - if (width > 0 && height > 0) { - devices.push_back(captureDevice); - } - else { - loop = false; - } - } - else { - loop = false; - } - - id++; + CaptureDeviceVector devices; + + bool loop = true; + int id = 0; + cv::VideoCapture videoCapture; + + while (loop) + { + std::stringstream convert; + convert << id; + CaptureDevice captureDevice(mCaptureType, convert.str()); + + videoCapture.open(id); + if (videoCapture.isOpened()) + { + int width = (int)videoCapture.get(CV_CAP_PROP_FRAME_WIDTH); + int height = (int)videoCapture.get(CV_CAP_PROP_FRAME_HEIGHT); + if (width > 0 && height > 0) + { + devices.push_back(captureDevice); + } + else + { + loop = false; + } + } + else + { + loop = false; } - videoCapture.release(); - return devices; + id++; + } + videoCapture.release(); + + return devices; } -Capture *CapturePluginHighgui::createCapture(const CaptureDevice captureDevice) +Capture* CapturePluginHighgui::createCapture(const CaptureDevice captureDevice) { - return new CaptureHighgui(captureDevice); + return new CaptureHighgui(captureDevice); } -void registerPlugin(const std::string &captureType, alvar::CapturePlugin *&capturePlugin) +void registerPlugin(const std::string& captureType, + alvar::CapturePlugin*& capturePlugin) { - capturePlugin = new CapturePluginHighgui(captureType); + capturePlugin = new CapturePluginHighgui(captureType); } -} // namespace plugins -} // namespace alvar +} // namespace plugins +} // namespace alvar diff --git a/ar_track_alvar/src/platform/capture_plugin_highgui/CapturePluginHighgui.h b/ar_track_alvar/src/platform/capture_plugin_highgui/CapturePluginHighgui.h index b215036..721204d 100644 --- a/ar_track_alvar/src/platform/capture_plugin_highgui/CapturePluginHighgui.h +++ b/ar_track_alvar/src/platform/capture_plugin_highgui/CapturePluginHighgui.h @@ -31,13 +31,13 @@ */ #ifdef WIN32 - #ifdef ALVAR_Capture_Plugin_Highgui_BUILD - #define ALVAR_CAPTURE_PLUGIN_HIGHGUI_EXPORT __declspec(dllexport) - #else - #define ALVAR_CAPTURE_PLUGIN_HIGHGUI_EXPORT __declspec(dllimport) - #endif +#ifdef ALVAR_Capture_Plugin_Highgui_BUILD +#define ALVAR_CAPTURE_PLUGIN_HIGHGUI_EXPORT __declspec(dllexport) #else - #define ALVAR_CAPTURE_PLUGIN_HIGHGUI_EXPORT +#define ALVAR_CAPTURE_PLUGIN_HIGHGUI_EXPORT __declspec(dllimport) +#endif +#else +#define ALVAR_CAPTURE_PLUGIN_HIGHGUI_EXPORT #endif #include "Capture.h" @@ -45,67 +45,69 @@ #include "highgui.h" -namespace alvar { - +namespace alvar +{ /** * \brief Dynamically loaded plugins namespace. */ -namespace plugins { - +namespace plugins +{ /** * \brief Implementation of Capture interface for Highgui plugin. */ -class ALVAR_CAPTURE_PLUGIN_HIGHGUI_EXPORT CaptureHighgui - : public alvar::Capture +class ALVAR_CAPTURE_PLUGIN_HIGHGUI_EXPORT CaptureHighgui : public alvar::Capture { public: - /** - * \brief Constructor. - * - * \param captureDevice Information of which camera to create. - */ - CaptureHighgui(const CaptureDevice captureDevice); - /** - * \brief Destructor. - */ - ~CaptureHighgui(); - void setResolution(const unsigned long xResolution, const unsigned long yResolution); - bool start(); - void stop(); - IplImage *captureImage(); - bool showSettingsDialog(); - std::string SerializeId(); - bool Serialize(Serialization *serialization); + /** + * \brief Constructor. + * + * \param captureDevice Information of which camera to create. + */ + CaptureHighgui(const CaptureDevice captureDevice); + /** + * \brief Destructor. + */ + ~CaptureHighgui(); + void setResolution(const unsigned long xResolution, + const unsigned long yResolution); + bool start(); + void stop(); + IplImage* captureImage(); + bool showSettingsDialog(); + std::string SerializeId(); + bool Serialize(Serialization* serialization); + private: - cv::VideoCapture mVideoCapture; - cv::Mat mMatrix; - IplImage mImage; + cv::VideoCapture mVideoCapture; + cv::Mat mMatrix; + IplImage mImage; }; /** * \brief Implementation of CapturePlugin interface for Highgui plugin. */ class ALVAR_CAPTURE_PLUGIN_HIGHGUI_EXPORT CapturePluginHighgui - : public alvar::CapturePlugin + : public alvar::CapturePlugin { public: - /** - * \brief Constructor. - * - * \param captureType A unique identifier for the capture plugin. - */ - CapturePluginHighgui(const std::string &captureType); - /** - * \brief Destructor. - */ - ~CapturePluginHighgui(); - CaptureDeviceVector enumerateDevices(); - Capture *createCapture(const CaptureDevice captureDevice); + /** + * \brief Constructor. + * + * \param captureType A unique identifier for the capture plugin. + */ + CapturePluginHighgui(const std::string& captureType); + /** + * \brief Destructor. + */ + ~CapturePluginHighgui(); + CaptureDeviceVector enumerateDevices(); + Capture* createCapture(const CaptureDevice captureDevice); }; -extern "C" ALVAR_CAPTURE_PLUGIN_HIGHGUI_EXPORT void registerPlugin(const std::string &captureType, alvar::CapturePlugin *&capturePlugin); +extern "C" ALVAR_CAPTURE_PLUGIN_HIGHGUI_EXPORT void registerPlugin( + const std::string& captureType, alvar::CapturePlugin*& capturePlugin); -} // namespace plugins -} // namespace alvar +} // namespace plugins +} // namespace alvar #endif diff --git a/ar_track_alvar/src/platform/capture_plugin_ptgrey/CapturePluginPtgrey.cpp b/ar_track_alvar/src/platform/capture_plugin_ptgrey/CapturePluginPtgrey.cpp index 606515a..8a67ee0 100644 --- a/ar_track_alvar/src/platform/capture_plugin_ptgrey/CapturePluginPtgrey.cpp +++ b/ar_track_alvar/src/platform/capture_plugin_ptgrey/CapturePluginPtgrey.cpp @@ -31,148 +31,172 @@ using namespace std; -namespace alvar { -namespace plugins { - +namespace alvar +{ +namespace plugins +{ CapturePtgrey::CapturePtgrey(const CaptureDevice captureDevice) - : Capture(captureDevice) - , mCamera(new FlyCapture2::Camera) - , mImage(new FlyCapture2::Image) - , mChannels(-1) - , mReturnFrame(NULL) + : Capture(captureDevice) + , mCamera(new FlyCapture2::Camera) + , mImage(new FlyCapture2::Image) + , mChannels(-1) + , mReturnFrame(NULL) { } CapturePtgrey::~CapturePtgrey() { - stop(); - delete mCamera; - delete mImage; + stop(); + delete mCamera; + delete mImage; } bool CapturePtgrey::start() { - if (isCapturing()) { - return isCapturing(); - } - - stringstream id(captureDevice().id()); - id.setf(ios_base::hex, ios_base::basefield); - id >> mGUID.value[0]; id.get(); - id >> mGUID.value[1]; id.get(); - id >> mGUID.value[2]; id.get(); - id >> mGUID.value[3]; - - if (mCamera->Connect(&mGUID) != FlyCapture2::PGRERROR_OK) { - return false; - } - - FlyCapture2::VideoMode videoMode; - FlyCapture2::FrameRate frameRate; - if (mCamera->GetVideoModeAndFrameRate (&videoMode, &frameRate) != FlyCapture2::PGRERROR_OK) { - return false; - } - - if (videoMode == FlyCapture2::VIDEOMODE_640x480RGB) { - mChannels = 3; - mXResolution = 640; - mYResolution = 480; - - } else if (videoMode == FlyCapture2::VIDEOMODE_640x480Y8) { - mChannels = 1; - mXResolution = 640; - mYResolution = 480; - - } else if (videoMode == FlyCapture2::VIDEOMODE_800x600RGB) { - mChannels = 3; - mXResolution = 800; - mYResolution = 600; - - } else if (videoMode == FlyCapture2::VIDEOMODE_800x600Y8) { - mChannels = 1; - mXResolution = 800; - mYResolution = 600; - - } else if (videoMode == FlyCapture2::VIDEOMODE_1024x768RGB) { - mChannels = 3; - mXResolution = 1024; - mYResolution = 768; - - } else if (videoMode == FlyCapture2::VIDEOMODE_1024x768Y8) { - mChannels = 1; - mXResolution = 1024; - mYResolution = 768; - - } else if (videoMode == FlyCapture2::VIDEOMODE_1280x960RGB) { - mChannels = 3; - mXResolution = 1280; - mYResolution = 960; - - } else if (videoMode == FlyCapture2::VIDEOMODE_1280x960Y8) { - mChannels = 1; - mXResolution = 1280; - mYResolution = 960; - - } else if (videoMode == FlyCapture2::VIDEOMODE_1600x1200RGB) { - mChannels = 3; - mXResolution = 1600; - mYResolution = 1200; - - } else if (videoMode == FlyCapture2::VIDEOMODE_1600x1200Y8) { - mChannels = 1; - mXResolution = 1600; - mYResolution = 1200; - - } else { - return false; - } - - mReturnFrame = cvCreateImage(cvSize(mXResolution, mYResolution), IPL_DEPTH_8U, mChannels); - if (mCamera->StartCapture() != FlyCapture2::PGRERROR_OK) { - return false; - } - mIsCapturing = true; + if (isCapturing()) + { return isCapturing(); + } + + stringstream id(captureDevice().id()); + id.setf(ios_base::hex, ios_base::basefield); + id >> mGUID.value[0]; + id.get(); + id >> mGUID.value[1]; + id.get(); + id >> mGUID.value[2]; + id.get(); + id >> mGUID.value[3]; + + if (mCamera->Connect(&mGUID) != FlyCapture2::PGRERROR_OK) + { + return false; + } + + FlyCapture2::VideoMode videoMode; + FlyCapture2::FrameRate frameRate; + if (mCamera->GetVideoModeAndFrameRate(&videoMode, &frameRate) != + FlyCapture2::PGRERROR_OK) + { + return false; + } + + if (videoMode == FlyCapture2::VIDEOMODE_640x480RGB) + { + mChannels = 3; + mXResolution = 640; + mYResolution = 480; + } + else if (videoMode == FlyCapture2::VIDEOMODE_640x480Y8) + { + mChannels = 1; + mXResolution = 640; + mYResolution = 480; + } + else if (videoMode == FlyCapture2::VIDEOMODE_800x600RGB) + { + mChannels = 3; + mXResolution = 800; + mYResolution = 600; + } + else if (videoMode == FlyCapture2::VIDEOMODE_800x600Y8) + { + mChannels = 1; + mXResolution = 800; + mYResolution = 600; + } + else if (videoMode == FlyCapture2::VIDEOMODE_1024x768RGB) + { + mChannels = 3; + mXResolution = 1024; + mYResolution = 768; + } + else if (videoMode == FlyCapture2::VIDEOMODE_1024x768Y8) + { + mChannels = 1; + mXResolution = 1024; + mYResolution = 768; + } + else if (videoMode == FlyCapture2::VIDEOMODE_1280x960RGB) + { + mChannels = 3; + mXResolution = 1280; + mYResolution = 960; + } + else if (videoMode == FlyCapture2::VIDEOMODE_1280x960Y8) + { + mChannels = 1; + mXResolution = 1280; + mYResolution = 960; + } + else if (videoMode == FlyCapture2::VIDEOMODE_1600x1200RGB) + { + mChannels = 3; + mXResolution = 1600; + mYResolution = 1200; + } + else if (videoMode == FlyCapture2::VIDEOMODE_1600x1200Y8) + { + mChannels = 1; + mXResolution = 1600; + mYResolution = 1200; + } + else + { + return false; + } + + mReturnFrame = cvCreateImage(cvSize(mXResolution, mYResolution), IPL_DEPTH_8U, + mChannels); + if (mCamera->StartCapture() != FlyCapture2::PGRERROR_OK) + { + return false; + } + mIsCapturing = true; + return isCapturing(); } void CapturePtgrey::stop() { - if (isCapturing()) { - mCamera->StopCapture(); - cvReleaseImage(&mReturnFrame); - } + if (isCapturing()) + { + mCamera->StopCapture(); + cvReleaseImage(&mReturnFrame); + } } -IplImage *CapturePtgrey::captureImage() +IplImage* CapturePtgrey::captureImage() { - if (!isCapturing()) { - return NULL; - } - - if (mCamera->RetrieveBuffer(mImage) == FlyCapture2::PGRERROR_OK) { - unsigned long length = mReturnFrame->widthStep * mYResolution; - memcpy(mReturnFrame->imageData, mImage->GetData(), length); - } - return mReturnFrame; + if (!isCapturing()) + { + return NULL; + } + + if (mCamera->RetrieveBuffer(mImage) == FlyCapture2::PGRERROR_OK) + { + unsigned long length = mReturnFrame->widthStep * mYResolution; + memcpy(mReturnFrame->imageData, mImage->GetData(), length); + } + return mReturnFrame; } bool CapturePtgrey::showSettingsDialog() { - return false; + return false; } string CapturePtgrey::SerializeId() { - return "CapturePtgrey"; + return "CapturePtgrey"; } -bool CapturePtgrey::Serialize(Serialization *serialization) +bool CapturePtgrey::Serialize(Serialization* serialization) { - return false; + return false; } -CapturePluginPtgrey::CapturePluginPtgrey(const string &captureType) - : CapturePlugin(captureType) +CapturePluginPtgrey::CapturePluginPtgrey(const string& captureType) + : CapturePlugin(captureType) { } @@ -182,47 +206,52 @@ CapturePluginPtgrey::~CapturePluginPtgrey() CapturePlugin::CaptureDeviceVector CapturePluginPtgrey::enumerateDevices() { - CaptureDeviceVector devices; - - FlyCapture2::BusManager bus; - if (bus.RescanBus() != FlyCapture2::PGRERROR_OK) { - return devices; - } - - unsigned int numberCameras = 0; - bus.GetNumOfCameras(&numberCameras); - - for (unsigned int i = 0; i < numberCameras; i++) { - FlyCapture2::PGRGuid guid; - bus.GetCameraFromIndex(i, &guid); - stringstream convert; - convert << hex << guid.value[0]; - convert << "_" << hex << guid.value[1]; - convert << "_" << hex << guid.value[2]; - convert << "_" << hex << guid.value[3]; - stringstream description; - FlyCapture2::Camera camera; - if (camera.Connect(&guid) != FlyCapture2::PGRERROR_OK) continue; - FlyCapture2::CameraInfo info; - if (camera.GetCameraInfo (&info) != FlyCapture2::PGRERROR_OK) continue; - description << info.vendorName << " "; - description << info.modelName; - CaptureDevice captureDevice(mCaptureType, convert.str(), description.str()); - devices.push_back(captureDevice); - } + CaptureDeviceVector devices; + FlyCapture2::BusManager bus; + if (bus.RescanBus() != FlyCapture2::PGRERROR_OK) + { return devices; + } + + unsigned int numberCameras = 0; + bus.GetNumOfCameras(&numberCameras); + + for (unsigned int i = 0; i < numberCameras; i++) + { + FlyCapture2::PGRGuid guid; + bus.GetCameraFromIndex(i, &guid); + stringstream convert; + convert << hex << guid.value[0]; + convert << "_" << hex << guid.value[1]; + convert << "_" << hex << guid.value[2]; + convert << "_" << hex << guid.value[3]; + stringstream description; + FlyCapture2::Camera camera; + if (camera.Connect(&guid) != FlyCapture2::PGRERROR_OK) + continue; + FlyCapture2::CameraInfo info; + if (camera.GetCameraInfo(&info) != FlyCapture2::PGRERROR_OK) + continue; + description << info.vendorName << " "; + description << info.modelName; + CaptureDevice captureDevice(mCaptureType, convert.str(), description.str()); + devices.push_back(captureDevice); + } + + return devices; } -Capture *CapturePluginPtgrey::createCapture(const CaptureDevice captureDevice) +Capture* CapturePluginPtgrey::createCapture(const CaptureDevice captureDevice) { - return new CapturePtgrey(captureDevice); + return new CapturePtgrey(captureDevice); } -void registerPlugin(const string &captureType, alvar::CapturePlugin *&capturePlugin) +void registerPlugin(const string& captureType, + alvar::CapturePlugin*& capturePlugin) { - capturePlugin = new CapturePluginPtgrey(captureType); + capturePlugin = new CapturePluginPtgrey(captureType); } -} // namespace plugins -} // namespace alvar +} // namespace plugins +} // namespace alvar diff --git a/ar_track_alvar/src/platform/capture_plugin_ptgrey/CapturePluginPtgrey.h b/ar_track_alvar/src/platform/capture_plugin_ptgrey/CapturePluginPtgrey.h index 17a583a..347a34f 100644 --- a/ar_track_alvar/src/platform/capture_plugin_ptgrey/CapturePluginPtgrey.h +++ b/ar_track_alvar/src/platform/capture_plugin_ptgrey/CapturePluginPtgrey.h @@ -34,13 +34,13 @@ */ #ifdef WIN32 - #ifdef ALVAR_Capture_Plugin_Ptgrey_BUILD - #define ALVAR_CAPTURE_PLUGIN_PTGREY_EXPORT __declspec(dllexport) - #else - #define ALVAR_CAPTURE_PLUGIN_PTGREY_EXPORT __declspec(dllimport) - #endif +#ifdef ALVAR_Capture_Plugin_Ptgrey_BUILD +#define ALVAR_CAPTURE_PLUGIN_PTGREY_EXPORT __declspec(dllexport) #else - #define ALVAR_CAPTURE_PLUGIN_PTGREY_EXPORT +#define ALVAR_CAPTURE_PLUGIN_PTGREY_EXPORT __declspec(dllimport) +#endif +#else +#define ALVAR_CAPTURE_PLUGIN_PTGREY_EXPORT #endif #include "Capture.h" @@ -49,50 +49,51 @@ #include "FlyCapture2Defs.h" // Forward declaration for PTGrey specific classes. -namespace FlyCapture2 { - class Camera; - class Image; -} +namespace FlyCapture2 +{ +class Camera; +class Image; +} // namespace FlyCapture2 -namespace alvar { - +namespace alvar +{ /** * \brief Dynamically loaded plugins namespace. */ -namespace plugins { - +namespace plugins +{ /** * \brief Implementation of Capture interface for Ptgrey plugin. * * \note The PointGrey plugin is currently experimental and not included in * the build by default. */ -class ALVAR_CAPTURE_PLUGIN_PTGREY_EXPORT CapturePtgrey - : public alvar::Capture +class ALVAR_CAPTURE_PLUGIN_PTGREY_EXPORT CapturePtgrey : public alvar::Capture { public: - /** - * \brief Constructor. - * - * \param captureDevice Information of which camera to create. - */ - CapturePtgrey(const CaptureDevice captureDevice); - /** - * \brief Destructor. - */ - ~CapturePtgrey(); - bool start(); - void stop(); - IplImage *captureImage(); - bool showSettingsDialog(); - std::string SerializeId(); - bool Serialize(Serialization *serialization); + /** + * \brief Constructor. + * + * \param captureDevice Information of which camera to create. + */ + CapturePtgrey(const CaptureDevice captureDevice); + /** + * \brief Destructor. + */ + ~CapturePtgrey(); + bool start(); + void stop(); + IplImage* captureImage(); + bool showSettingsDialog(); + std::string SerializeId(); + bool Serialize(Serialization* serialization); + private: - FlyCapture2::Camera *mCamera; - FlyCapture2::Image *mImage; - FlyCapture2::PGRGuid mGUID; - int mChannels; - IplImage *mReturnFrame; + FlyCapture2::Camera* mCamera; + FlyCapture2::Image* mImage; + FlyCapture2::PGRGuid mGUID; + int mChannels; + IplImage* mReturnFrame; }; /** @@ -102,26 +103,27 @@ class ALVAR_CAPTURE_PLUGIN_PTGREY_EXPORT CapturePtgrey * the build by default. */ class ALVAR_CAPTURE_PLUGIN_PTGREY_EXPORT CapturePluginPtgrey - : public alvar::CapturePlugin + : public alvar::CapturePlugin { public: - /** - * \brief Constructor. - * - * \param captureType A unique identifier for the capture plugin. - */ - CapturePluginPtgrey(const std::string &captureType); - /** - * \brief Destructor. - */ - ~CapturePluginPtgrey(); - CaptureDeviceVector enumerateDevices(); - Capture *createCapture(const CaptureDevice captureDevice); + /** + * \brief Constructor. + * + * \param captureType A unique identifier for the capture plugin. + */ + CapturePluginPtgrey(const std::string& captureType); + /** + * \brief Destructor. + */ + ~CapturePluginPtgrey(); + CaptureDeviceVector enumerateDevices(); + Capture* createCapture(const CaptureDevice captureDevice); }; -extern "C" ALVAR_CAPTURE_PLUGIN_PTGREY_EXPORT void registerPlugin(const std::string &captureType, alvar::CapturePlugin *&capturePlugin); +extern "C" ALVAR_CAPTURE_PLUGIN_PTGREY_EXPORT void registerPlugin( + const std::string& captureType, alvar::CapturePlugin*& capturePlugin); -} // namespace plugins -} // namespace alvar +} // namespace plugins +} // namespace alvar #endif diff --git a/ar_track_alvar/test/test_ar_legacy.py b/ar_track_alvar/test/test_ar_legacy.py index 3cb7382..b94a256 100644 --- a/ar_track_alvar/test/test_ar_legacy.py +++ b/ar_track_alvar/test/test_ar_legacy.py @@ -38,19 +38,19 @@ def generate_test_description(): # Bag Node launch.actions.ExecuteProcess( - cmd=['ros2', 'bag', 'play', '-s',"rosbag_v2",bag_name], + cmd=['ros2', 'bag', 'play', '--loop','-s',"rosbag_v2",bag_name], output='screen' ), Node( package='ar_track_alvar', - executable='individualMarkersNoKinect', + executable='individualMarkers',#NoKinect', name='individual_markers', remappings=[ ("camera_image", cam_image_topic), ("camera_info",cam_info_topic) ], - arguments=[marker_size, max_new_marker_error, max_track_error, cam_image_topic, cam_info_topic, output_frame], + arguments=[marker_size,max_new_marker_error, max_track_error,cam_image_topic, cam_info_topic, output_frame, max_frequency, marker_resolution, marker_margin], output='screen' ), launch_testing.actions.ReadyToTest(), diff --git a/ar_track_alvar/test/test_ar_track_alvar_individual_markers.py b/ar_track_alvar/test/test_ar_track_alvar_individual_markers.py index d459acf..f7eb52a 100644 --- a/ar_track_alvar/test/test_ar_track_alvar_individual_markers.py +++ b/ar_track_alvar/test/test_ar_track_alvar_individual_markers.py @@ -47,7 +47,7 @@ def generate_test_description(): ("camera_image", cam_image_topic), ("camera_info",cam_info_topic) ], - arguments=[marker_size, max_new_marker_error, max_track_error, cam_image_topic, cam_info_topic, output_frame], + arguments=[marker_size,max_new_marker_error, max_track_error,cam_image_topic, cam_info_topic, output_frame, max_frequency, marker_resolution, marker_margin], output='screen' ), launch_testing.actions.ReadyToTest(), From 25718cfeb3fca5ff19f66b9048cfa1cedc0a224c Mon Sep 17 00:00:00 2001 From: Patrick Musau Date: Tue, 3 Aug 2021 17:22:29 -0500 Subject: [PATCH 19/27] Unit tests and k matrix fix --- .../nodes/IndividualMarkersNoKinect.cpp | 212 ++++++++++-------- ar_track_alvar/src/Camera.cpp | 76 ++----- ar_track_alvar/src/Marker.cpp | 26 +-- ar_track_alvar/src/MarkerDetector.cpp | 22 +- ar_track_alvar/test/test_ar_legacy.py | 42 ++-- 5 files changed, 180 insertions(+), 198 deletions(-) diff --git a/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp b/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp index c470b4f..555a8c4 100644 --- a/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp +++ b/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp @@ -42,12 +42,16 @@ #include #include #include +#include "tf2/convert.h" +#include "tf2_geometry_msgs/tf2_geometry_msgs.h" +#include "tf2/LinearMath/Transform.h" #include "tf2_ros/buffer.h" -#include "tf2_ros/create_timer_ros.h" +#include "tf2_ros/message_filter.h" +#include "tf2_ros/transform_broadcaster.h" #include "tf2_ros/transform_listener.h" -#include -#include +#include "tf2_ros/create_timer_ros.h" #include +#include using namespace alvar; using namespace std; @@ -67,18 +71,25 @@ class IndividualMarkersNoKinect : public rclcpp::Node rclcpp::Subscription::SharedPtr enable_sub_; rclcpp::Subscription::SharedPtr cam_sub_; + tf2::TimePoint prev_stamp_; + + // image_transport::Subscriber cam_sub_; + rclcpp::Publisher::SharedPtr arMarkerPub_; rclcpp::Publisher::SharedPtr rvizMarkerPub_; rclcpp::Subscription::SharedPtr info_sub_; - tf2_ros::TransformBroadcaster tf_broadcaster_; - tf2_ros::TransformListener tf_listener_; - tf2_ros::Buffer tf2_; + + std::shared_ptr tf2_; + std::shared_ptr tf_listener_; + std::shared_ptr tf_broadcaster_; + + MarkerDetector marker_detector; bool enableSwitched = false; bool enabled = true; - double max_frequency; + double max_frequency=8.0; double marker_size; double max_new_marker_error; double max_track_error; @@ -91,8 +102,14 @@ class IndividualMarkersNoKinect : public rclcpp::Node public: - IndividualMarkersNoKinect(int argc, char* argv[]) : Node("marker_detect"), tf2_(this->get_clock()), tf_listener_(tf2_), tf_broadcaster_(this)//, it_(this) + IndividualMarkersNoKinect(int argc, char* argv[]) : Node("marker_detect") //, tf2_(this->get_clock()), tf_listener_(tf2_), tf_broadcaster_(this)//, it_(this) { + + rclcpp::Clock::SharedPtr clock = this->get_clock(); + tf2_ = std::make_shared(clock); + tf_listener_ = std::make_shared(*tf2_); + tf_broadcaster_ = std::make_shared(*this); + if(argc > 1) { RCLCPP_WARN(this->get_logger(), "Command line arguments are deprecated. Consider using ROS parameters and remappings."); @@ -115,6 +132,8 @@ class IndividualMarkersNoKinect : public rclcpp::Node cam_info_topic = argv[5]; output_frame = argv[6]; + RCLCPP_INFO(this->get_logger(),"marker_size: %f, max_new_marker_error: %f, max_track_error: %f",marker_size,max_new_marker_error,max_track_error); + if (argc > 7) { max_frequency = atof(argv[7]); @@ -166,6 +185,8 @@ class IndividualMarkersNoKinect : public rclcpp::Node marker_detector.SetMarkerSize(marker_size, marker_resolution, marker_margin); cam = new Camera(); + + prev_stamp_ = tf2::get_now(); //Give tf a chance to catch up before the camera callback starts asking for transforms // It will also reconfigure parameters for the first time, setting the default values @@ -181,8 +202,6 @@ class IndividualMarkersNoKinect : public rclcpp::Node enable_sub_ = this->create_subscription("enable_detection", 1, std::bind(&IndividualMarkersNoKinect::enableCallback, this, std::placeholders::_1)); - - RCLCPP_INFO(this->get_logger(),"Subscribing to info topic"); info_sub_ = this->create_subscription(cam_info_topic, 1, std::bind(&IndividualMarkersNoKinect::InfoCallback, this, std::placeholders::_1)); @@ -200,13 +219,16 @@ class IndividualMarkersNoKinect : public rclcpp::Node { cam->SetCameraInfo(cam_info); cam->getCamInfo_ = true; - //sub_.reset(); + // info_sub_.reset(); } } void getCapCallback (const sensor_msgs::msg::Image::SharedPtr image_msg) + //void getCapCallback(const sensor_msgs::msg::Image::ConstSharedPtr& image_msg) { + std::string tf_error; + //If we've already gotten the cam info, then go ahead if(cam->getCamInfo_){ try @@ -214,11 +236,12 @@ class IndividualMarkersNoKinect : public rclcpp::Node geometry_msgs::msg::TransformStamped CamToOutput; try { - tf2_.canTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp); - CamToOutput = tf2_.lookupTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp); + tf2::TimePoint tf2_time = tf2_ros::fromMsg(image_msg->header.stamp); + CamToOutput = tf2_->lookupTransform(output_frame, image_msg->header.frame_id,tf2_time,tf2_time - prev_stamp_); } catch (tf2::TransformException ex){ RCLCPP_ERROR(this->get_logger(), "%s",ex.what()); + tf_error.clear(); } //Convert the image @@ -236,14 +259,10 @@ class IndividualMarkersNoKinect : public rclcpp::Node true); arPoseMarkers_.markers.clear(); - - //marker_detector.Detect(ipl_image, cam, true, true, max_new_marker_error, max_track_error, CVSEQ, true); - arPoseMarkers_.markers.clear (); - - for (size_t i=0; isize(); i++) - { + for (size_t i = 0; i < marker_detector.markers->size(); i++) + { // Get the pose relative to the camera - int id = (*(marker_detector.markers))[i].GetId(); + int id = (*(marker_detector.markers))[i].GetId(); Pose p = (*(marker_detector.markers))[i].pose; double px = p.translation[0] / 100.0; double py = p.translation[1] / 100.0; @@ -255,7 +274,6 @@ class IndividualMarkersNoKinect : public rclcpp::Node double qy = quat.at(2,0); //p.quaternion[2]; double qz = quat.at(3,0); //p.quaternion[3]; double qw = quat.at(0,0); //p.quaternion[0]; - tf2::Quaternion rotation (qx,qy,qz,qw); tf2::Vector3 origin (px,py,pz); tf2::Transform t (rotation, origin); @@ -296,84 +314,84 @@ class IndividualMarkersNoKinect : public rclcpp::Node camToMarker.transform.translation = trans; camToMarker.transform.rotation = rot; - tf_broadcaster_.sendTransform(camToMarker); + tf_broadcaster_->sendTransform(camToMarker); - //Create the rviz visualization messages - rvizMarker_.pose.position.x = markerPose.getOrigin().getX(); - rvizMarker_.pose.position.y = markerPose.getOrigin().getY(); - rvizMarker_.pose.position.z = markerPose.getOrigin().getZ(); - rvizMarker_.pose.orientation.x = markerPose.getRotation().getX(); - rvizMarker_.pose.orientation.y = markerPose.getRotation().getY(); - rvizMarker_.pose.orientation.z = markerPose.getRotation().getZ(); - rvizMarker_.pose.orientation.w = markerPose.getRotation().getW(); - rvizMarker_.header.frame_id = image_msg->header.frame_id; - rvizMarker_.header.stamp = image_msg->header.stamp; - rvizMarker_.id = id; - - rvizMarker_.scale.x = 1.0 * marker_size/100.0; - rvizMarker_.scale.y = 1.0 * marker_size/100.0; - rvizMarker_.scale.z = 0.2 * marker_size/100.0; - rvizMarker_.ns = "basic_shapes"; - rvizMarker_.type = visualization_msgs::msg::Marker::CUBE; - rvizMarker_.action = visualization_msgs::msg::Marker::ADD; - switch (id) - { - case 0: - rvizMarker_.color.r = 0.0f; - rvizMarker_.color.g = 0.0f; - rvizMarker_.color.b = 1.0f; - rvizMarker_.color.a = 1.0; - break; - case 1: - rvizMarker_.color.r = 1.0f; - rvizMarker_.color.g = 0.0f; - rvizMarker_.color.b = 0.0f; - rvizMarker_.color.a = 1.0; - break; - case 2: - rvizMarker_.color.r = 0.0f; - rvizMarker_.color.g = 1.0f; - rvizMarker_.color.b = 0.0f; - rvizMarker_.color.a = 1.0; - break; - case 3: - rvizMarker_.color.r = 0.0f; - rvizMarker_.color.g = 0.5f; - rvizMarker_.color.b = 0.5f; - rvizMarker_.color.a = 1.0; - break; - case 4: - rvizMarker_.color.r = 0.5f; - rvizMarker_.color.g = 0.5f; - rvizMarker_.color.b = 0.0; - rvizMarker_.color.a = 1.0; - break; - default: - rvizMarker_.color.r = 0.5f; - rvizMarker_.color.g = 0.0f; - rvizMarker_.color.b = 0.5f; - rvizMarker_.color.a = 1.0; - break; - } - rvizMarker_.lifetime = rclcpp::Duration (1.0); - rvizMarkerPub_->publish (rvizMarker_); - - //Create the pose marker messages - ar_track_alvar_msgs::msg::AlvarMarker ar_pose_marker; - //Get the pose of the tag in the camera frame, then the output frame (usually torso) - tf2::doTransform(ar_pose_marker.pose, ar_pose_marker.pose,CamToOutput); - - - ar_pose_marker.header.frame_id = output_frame; - ar_pose_marker.header.stamp = image_msg->header.stamp; - ar_pose_marker.id = id; - arPoseMarkers_.markers.push_back (ar_pose_marker); - } - arMarkerPub_->publish (arPoseMarkers_); - } - catch (cv_bridge::Exception& e){ + //Create the rviz visualization messages + rvizMarker_.pose.position.x = markerPose.getOrigin().getX(); + rvizMarker_.pose.position.y = markerPose.getOrigin().getY(); + rvizMarker_.pose.position.z = markerPose.getOrigin().getZ(); + rvizMarker_.pose.orientation.x = markerPose.getRotation().getX(); + rvizMarker_.pose.orientation.y = markerPose.getRotation().getY(); + rvizMarker_.pose.orientation.z = markerPose.getRotation().getZ(); + rvizMarker_.pose.orientation.w = markerPose.getRotation().getW(); + rvizMarker_.header.frame_id = image_msg->header.frame_id; + rvizMarker_.header.stamp = image_msg->header.stamp; + rvizMarker_.id = id; + + rvizMarker_.scale.x = 1.0 * marker_size/100.0; + rvizMarker_.scale.y = 1.0 * marker_size/100.0; + rvizMarker_.scale.z = 0.2 * marker_size/100.0; + rvizMarker_.ns = "basic_shapes"; + rvizMarker_.type = visualization_msgs::msg::Marker::CUBE; + rvizMarker_.action = visualization_msgs::msg::Marker::ADD; + switch (id) + { + case 0: + rvizMarker_.color.r = 0.0f; + rvizMarker_.color.g = 0.0f; + rvizMarker_.color.b = 1.0f; + rvizMarker_.color.a = 1.0; + break; + case 1: + rvizMarker_.color.r = 1.0f; + rvizMarker_.color.g = 0.0f; + rvizMarker_.color.b = 0.0f; + rvizMarker_.color.a = 1.0; + break; + case 2: + rvizMarker_.color.r = 0.0f; + rvizMarker_.color.g = 1.0f; + rvizMarker_.color.b = 0.0f; + rvizMarker_.color.a = 1.0; + break; + case 3: + rvizMarker_.color.r = 0.0f; + rvizMarker_.color.g = 0.5f; + rvizMarker_.color.b = 0.5f; + rvizMarker_.color.a = 1.0; + break; + case 4: + rvizMarker_.color.r = 0.5f; + rvizMarker_.color.g = 0.5f; + rvizMarker_.color.b = 0.0; + rvizMarker_.color.a = 1.0; + break; + default: + rvizMarker_.color.r = 0.5f; + rvizMarker_.color.g = 0.0f; + rvizMarker_.color.b = 0.5f; + rvizMarker_.color.a = 1.0; + break; + } + rvizMarker_.lifetime = rclcpp::Duration (1.0); + rvizMarkerPub_->publish (rvizMarker_); + + //Create the pose marker messages + ar_track_alvar_msgs::msg::AlvarMarker ar_pose_marker; + //Get the pose of the tag in the camera frame, then the output frame (usually torso) + tf2::doTransform(ar_pose_marker.pose, ar_pose_marker.pose,CamToOutput); + ar_pose_marker.header.frame_id = output_frame; + ar_pose_marker.header.stamp = image_msg->header.stamp; + ar_pose_marker.id = id; + arPoseMarkers_.markers.push_back (ar_pose_marker); + } + arMarkerPub_->publish (arPoseMarkers_); + } + catch (cv_bridge::Exception& e) + { RCLCPP_ERROR(this->get_logger(),"Could not convert from '%s' to 'rgb8'.", image_msg->encoding.c_str ()); - } + } + prev_stamp_ = tf2_ros::fromMsg(image_msg->header.stamp); } } diff --git a/ar_track_alvar/src/Camera.cpp b/ar_track_alvar/src/Camera.cpp index e6426b6..c3d3814 100644 --- a/ar_track_alvar/src/Camera.cpp +++ b/ar_track_alvar/src/Camera.cpp @@ -141,44 +141,6 @@ Camera::Camera() getCamInfo_ = false; } -// Camera::Camera(ros::NodeHandle& n, std::string cam_info_topic) : n_(n) -// { -// calib_K = cv::Mat(3, 3, CV_64F, calib_K_data); -// calib_D = cv::Mat(4, 1, CV_64F, calib_D_data); -// memset(calib_K_data, 0, sizeof(double) * 3 * 3); -// memset(calib_D_data, 0, sizeof(double) * 4); -// calib_K_data[0][0] = 550; // Just some focal length by default -// calib_K_data[1][1] = 550; // Just some focal length by default -// calib_K_data[0][2] = 320; -// calib_K_data[1][2] = 240; -// calib_K_data[2][2] = 1; -// calib_x_res = 640; -// calib_y_res = 480; -// x_res = 640; -// y_res = 480; -// cameraInfoTopic_ = cam_info_topic; -// ROS_INFO("Subscribing to info topic"); -// sub_ = n_.subscribe(cameraInfoTopic_, 1, &Camera::camInfoCallback, this); -// getCamInfo_ = false; -// } - -// -// Camera::Camera(int w, int h) { -// calib_K = cvMat(3, 3, CV_64F, calib_K_data); -// calib_D = cvMat(4, 1, CV_64F, calib_D_data); -// memset(calib_K_data,0,sizeof(double)*3*3); -// memset(calib_D_data,0,sizeof(double)*4); -// calib_K_data[0][0] = w/2; -// calib_K_data[1][1] = w/2; -// calib_K_data[0][2] = w/2; -// calib_K_data[1][2] = h/2; -// calib_K_data[2][2] = 1; -// calib_x_res = w; -// calib_y_res = h; -// x_res = w; -// y_res = h; -//} - void Camera::SetSimpleCalib(int _x_res, int _y_res, double f_fac) { memset(calib_K_data, 0, sizeof(double) * 3 * 3); @@ -252,29 +214,27 @@ bool Camera::LoadCalibOpenCV(const char* calibfile) void Camera::SetCameraInfo(const sensor_msgs::msg::CameraInfo::SharedPtr cam_info) { - //cam_info_ = cam_info; - - calib_x_res = cam_info_.width; - calib_y_res = cam_info_.height; + calib_x_res = cam_info->width; + calib_y_res = cam_info->height; x_res = calib_x_res; y_res = calib_y_res; - calib_K.at(0, 0) = cam_info_.k[0]; - calib_K.at(0, 1) = cam_info_.k[1]; - calib_K.at(0, 2) = cam_info_.k[2]; - calib_K.at(1, 0) = cam_info_.k[3]; - calib_K.at(1, 1) = cam_info_.k[4]; - calib_K.at(1, 2) = cam_info_.k[5]; - calib_K.at(2, 0) = cam_info_.k[6]; - calib_K.at(2, 1) = cam_info_.k[7]; - calib_K.at(2, 2) = cam_info_.k[8]; - - if (cam_info_.d.size() >= 4) + calib_K.at(0, 0) = cam_info->k[0]; + calib_K.at(0, 1) = cam_info->k[1]; + calib_K.at(0, 2) = cam_info->k[2]; + calib_K.at(1, 0) = cam_info->k[3]; + calib_K.at(1, 1) = cam_info->k[4]; + calib_K.at(1, 2) = cam_info->k[5]; + calib_K.at(2, 0) = cam_info->k[6]; + calib_K.at(2, 1) = cam_info->k[7]; + calib_K.at(2, 2) = cam_info->k[8]; + + if (cam_info->d.size() >= 4) { - calib_D.at(0, 0) = cam_info_.d[0]; - calib_D.at(1, 0) = cam_info_.d[1]; - calib_D.at(2, 0) = cam_info_.d[2]; - calib_D.at(3, 0) = cam_info_.d[3]; + calib_D.at(0, 0) = cam_info->d[0]; + calib_D.at(1, 0) = cam_info->d[1]; + calib_D.at(2, 0) = cam_info->d[2]; + calib_D.at(3, 0) = cam_info->d[3]; } else { @@ -694,7 +654,7 @@ void Camera::CalcExteriorOrientation(const vector& pw, } tra.setTo(cv::Scalar::all(0)); - rodriques.setTo(cv::Scalar::all(0)); + rodriques.setTo(cv::Scalar::all(0)); cv::solvePnP(pw, pi2, calib_K, cv::Mat(), rodriques, tra, false, cv::SOLVEPNP_ITERATIVE); } diff --git a/ar_track_alvar/src/Marker.cpp b/ar_track_alvar/src/Marker.cpp index 071b726..4d67ec6 100644 --- a/ar_track_alvar/src/Marker.cpp +++ b/ar_track_alvar/src/Marker.cpp @@ -425,12 +425,16 @@ void Marker::UpdatePose(vector& _marker_corners_img, Camera* cam, // Calculate exterior orientation if (orientation > 0) + { std::rotate(marker_corners_img.begin(), marker_corners_img.begin() + orientation, marker_corners_img.end()); + } if (update_pose) + { cam->CalcExteriorOrientation(marker_corners, marker_corners_img, &pose); + } } bool Marker::DecodeContent(int* orientation) { @@ -511,7 +515,9 @@ void Marker::SetMarkerSize(double _edge_length, int _res, double _margin) // Rest can be done only if we have existing resolution if (res <= 0) + { return; + } // marker_points marker_points.clear(); @@ -852,42 +858,25 @@ void MarkerData::DecodeOrientation(int* error, int* total, int* orientation) { (*total)++; if ((int)marker_content.at(j, i) != 0) - { errors[0]++; - } - if ((int)marker_content.at(i, j) != 0) - { errors[1]++; - } if ((int)marker_content.at(j, i) != 255) - { errors[2]++; - } if ((int)marker_content.at(i, j) != 255) - { errors[3]++; - } } else if (j > (res / 2)) { (*total)++; if ((int)marker_content.at(j, i) != 255) - { errors[0]++; - } if ((int)marker_content.at(i, j) != 255) - { errors[1]++; - } if ((int)marker_content.at(j, i) != 0) - { errors[2]++; - } if ((int)marker_content.at(i, j) != 0) - { errors[3]++; - } } } *orientation = min_element(errors.begin(), errors.end()) - errors.begin(); @@ -1203,6 +1192,7 @@ bool MarkerData::DecodeContent(int* orientation) { // bool decode(vector& colors, int *orientation, double *error) { *orientation = 0; + BitsetExt bs; int erroneous = 0; int total = 0; @@ -1494,4 +1484,4 @@ void MarkerData::SetContent(MarkerContentType _content_type, unsigned long _id, } } -} // namespace alvar +} // namespace alvar \ No newline at end of file diff --git a/ar_track_alvar/src/MarkerDetector.cpp b/ar_track_alvar/src/MarkerDetector.cpp index 4174c8f..efb0e03 100644 --- a/ar_track_alvar/src/MarkerDetector.cpp +++ b/ar_track_alvar/src/MarkerDetector.cpp @@ -93,13 +93,11 @@ int MarkerDetectorImpl::Detect(cv::Mat& image, Camera* cam, bool track, LabelingMethod labeling_method, bool update_pose) { double error = -1; - + // Swap marker tables _swap_marker_tables(); _markers_clear(); - - switch (labeling_method) { case CVSEQ: @@ -122,18 +120,23 @@ int MarkerDetectorImpl::Detect(cv::Mat& image, Camera* cam, bool track, { for (size_t ii = 0; ii < _track_markers_size(); ii++) { - Marker* mn = _track_markers_at(ii); if (mn->GetError(Marker::DECODE_ERROR | Marker::MARGIN_ERROR) > 0) - continue; // We track only perfectly decoded markers + { + continue; // We track only perfectly decoded markers + } + int track_i = -1; int track_orientation = 0; double track_error = 1e200; for (unsigned i = 0; i < blob_corners.size() /*blobs_ret.size()*/; ++i) { if (blob_corners[i].empty()) + { continue; + } mn->CompareCorners(blob_corners[i], &orientation, &error); + if (error < track_error) { track_i = i; @@ -159,11 +162,9 @@ int MarkerDetectorImpl::Detect(cv::Mat& image, Camera* cam, bool track, } } - // Now we go through the rest of the blobs -- in case there are new markers... for (size_t i = 0; i < blob_corners.size(); ++i) { - if (blob_corners[i].empty()) { continue; @@ -185,14 +186,15 @@ int MarkerDetectorImpl::Detect(cv::Mat& image, Camera* cam, bool track, _markers_push_back(mn); if (visualize) + { mn->Visualize(image, cam, CV_RGB(255, 0, 0)); + } } + delete mn; } - int val = (int)_markers_size(); - - return val; + return (int)_markers_size(); } int MarkerDetectorImpl::DetectAdditional(cv::Mat& image, Camera* cam, diff --git a/ar_track_alvar/test/test_ar_legacy.py b/ar_track_alvar/test/test_ar_legacy.py index b94a256..08c47eb 100644 --- a/ar_track_alvar/test/test_ar_legacy.py +++ b/ar_track_alvar/test/test_ar_legacy.py @@ -4,6 +4,7 @@ import sys import time import unittest +import numpy as np from ament_index_python.packages import get_package_share_directory from launch import LaunchDescription @@ -44,7 +45,7 @@ def generate_test_description(): Node( package='ar_track_alvar', - executable='individualMarkers',#NoKinect', + executable='individualMarkersNoKinect', name='individual_markers', remappings=[ ("camera_image", cam_image_topic), @@ -79,12 +80,13 @@ def test_markers_received(self): # lambda msg: msgs_received.append(msg), # 1 # ) + + + transforms =[] start_time = time.time() while rclpy.ok() and (time.time() - start_time) < 120: rclpy.spin_once(self.node, timeout_sec=0.1) - now = self.node.get_clock().now() - dur = Duration() dur.sec = 40 dur.nsec = 0 @@ -97,19 +99,29 @@ def test_markers_received(self): for i in range (0, len(tf_expected)): try: target_frame = 'ar_marker_{}'.format(i) - (trans, rot) = self.tfBuffer.lookup_transform('camera', target_frame, now, dur) - break + time_stamp = rclpy.time.Time(seconds=0, nanoseconds=0).to_msg() + obj = self.tfBuffer.lookup_transform('camera', target_frame,time=time_stamp) + trans = obj.transform.translation + rot = obj.transform.rotation + + item = [[trans.x,trans.y,trans.z],[rot.x,rot.y,rot.z,rot.w]] + transforms.append(item) + except (LookupException, ConnectivityException, ExtrapolationException) as e: - self.node.get_logger().error(str(e) + ' target_frame={}'.format(target_frame)) continue + if(len(transforms)>=4): + break + print(transforms[0]) + print(tf_expected[0]) + + # there are four markers in the test bag + for i in range(4): + + trans = np.abs(np.asarray(transforms[i][0]) - np.asarray(tf_expected[i][0])) + assert np.max(trans) < 0.1 + rot = np.abs( np.asarray(transforms[i][1]) - np.asarray(tf_expected[i][1])) + assert np.max(trans) < 0.1 + + - # Compare each translation element (x, y, z) - for v_ret, v_expected in zip(trans, tf_expected[i][0]): - # Given that sigfig ignores the leading zero, we only compare the first sigfig. - numpy.testing.assert_approx_equal( - v_ret, v_expected, significant=1) - # Compare each orientation element (x, y, z, w) - for v_ret, v_expected in zip(rot, tf_expected[i][1]): - numpy.testing.assert_approx_equal( - v_ret, v_expected, significant=1) From 7b7a2cdf6a3715e8b22a9629200fac169bb4495b Mon Sep 17 00:00:00 2001 From: Patrick Musau Date: Wed, 4 Aug 2021 07:29:56 -0500 Subject: [PATCH 20/27] transforms and unit tests --- ar_track_alvar/CMakeLists.txt | 22 ++-------- ar_track_alvar/nodes/FindMarkerBundles.cpp | 42 +++++++++++++------ .../nodes/FindMarkerBundlesNoKinect.cpp | 28 +++++++++---- ar_track_alvar/nodes/IndividualMarkers.cpp | 35 +++++++++++----- .../nodes/IndividualMarkersNoKinect.cpp | 40 ++---------------- ar_track_alvar/nodes/TrainMarkerBundle.cpp | 28 +++++++++---- ar_track_alvar/src/Camera.cpp | 10 ----- ar_track_alvar/test/test_ar_legacy.py | 26 ++++-------- 8 files changed, 108 insertions(+), 123 deletions(-) diff --git a/ar_track_alvar/CMakeLists.txt b/ar_track_alvar/CMakeLists.txt index 67990fe..98f7daf 100644 --- a/ar_track_alvar/CMakeLists.txt +++ b/ar_track_alvar/CMakeLists.txt @@ -5,18 +5,11 @@ find_package(ament_cmake REQUIRED) find_package(ament_cmake_auto REQUIRED) ament_auto_find_build_dependencies() -find_package(OpenCV REQUIRED COMPONENTS calib3d core highgui imgproc) -if(OpenCV_VERSION VERSION_GREATER 4.5.0) - target_compile_definitions(library PUBLIC USE_LEGACY_TRACKING=1) -endif() - +find_package(OpenCV REQUIRED) if(NOT DEFINED CMAKE_SUPPRESS_DEVELOPER_WARNINGS) set(CMAKE_SUPPRESS_DEVELOPER_WARNINGS 1 CACHE INTERNAL "No dev warnings") endif() -find_package(ament_cmake_auto REQUIRED) -ament_auto_find_build_dependencies() - find_package(rclcpp REQUIRED) find_package(tf2_ros REQUIRED) find_package(tf2 REQUIRED) @@ -34,13 +27,11 @@ find_package(geometry_msgs REQUIRED) find_package(rosbag2_bag_v2_plugins REQUIRED) find_package(visualization_msgs REQUIRED) find_package(PCL REQUIRED QUIET COMPONENTS common io) - find_package(Eigen3 REQUIRED) -find_package(tinyxml_vendor) +find_package(tinyxml_vendor REQUIRED) include_directories(include) - # Kinect filtering code set(KINECT_FILTERING_TARGETS kinect_filtering medianFilter) set(ALVAR_TARGETS ar_track_alvar individualMarkers individualMarkersNoKinect trainMarkerBundle findMarkerBundles findMarkerBundlesNoKinect createMarker) @@ -194,10 +185,7 @@ if(BUILD_TESTING) ) endif() -#TODO fix these tests -# if(BUILD_TESTING) -# add_subdirectory(test) -# endif() + # if (BUILD_TESTING) @@ -223,8 +211,4 @@ endif() # file(GLOB LAUNCH_FILES launch/*.launch test/*.test) # endif() -include(CheckCXXCompilerFlag) -CHECK_CXX_COMPILER_FLAG("-std=c++11" COMPILER_SUPPORTS_CXX11) -CHECK_CXX_COMPILER_FLAG("-std=c++0x" COMPILER_SUPPORTS_CXX0X) - ament_package() \ No newline at end of file diff --git a/ar_track_alvar/nodes/FindMarkerBundles.cpp b/ar_track_alvar/nodes/FindMarkerBundles.cpp index ef56cec..f0c99a2 100644 --- a/ar_track_alvar/nodes/FindMarkerBundles.cpp +++ b/ar_track_alvar/nodes/FindMarkerBundles.cpp @@ -103,9 +103,11 @@ class FindMarkerBundles : public rclcpp::Node rclcpp::Subscription::SharedPtr info_sub_; - tf2_ros::TransformBroadcaster tf_broadcaster_; - tf2_ros::TransformListener tf_listener_; - tf2_ros::Buffer tf2_; + tf2::TimePoint prev_stamp_; + tf2::TimePoint prev_stamp2_; + std::shared_ptr tf2_; + std::shared_ptr tf_listener_; + std::shared_ptr tf_broadcaster_; MarkerDetector marker_detector; MultiMarkerBundle **multi_marker_bundles=NULL; @@ -130,8 +132,17 @@ class FindMarkerBundles : public rclcpp::Node public: - FindMarkerBundles(int argc, char* argv[]):Node("marker_detect"), tf2_(this->get_clock()), tf_listener_(tf2_), tf_broadcaster_(this) + FindMarkerBundles(int argc, char* argv[]):Node("marker_detect") { + + rclcpp::Clock::SharedPtr clock = this->get_clock(); + tf2_ = std::make_shared(clock); + tf_listener_ = std::make_shared(*tf2_); + tf_broadcaster_ = std::make_shared(*this); + prev_stamp_ = tf2::get_now(); + prev_stamp2_ = tf2::get_now(); + + if(argc < 9) { std::cout << std::endl; @@ -217,7 +228,7 @@ class FindMarkerBundles : public rclcpp::Node { cam->SetCameraInfo(cam_info); cam->getCamInfo_ = true; - //sub_.reset(); + info_sub_.reset(); } } @@ -370,17 +381,22 @@ class FindMarkerBundles : public rclcpp::Node p.point.z = corner_coord.z()/100.0; try{ + geometry_msgs::msg::TransformStamped pt_transform; + tf2::TimePoint tf2_time = tf2_ros::fromMsg(rclcpp::Time(cloud.header.stamp)); + pt_transform = tf2_->lookupTransform(cloud.header.frame_id, marker_frame,tf2_time,tf2_time - prev_stamp2_); - tf2_.canTransform(cloud.header.frame_id, marker_frame, rclcpp::Time(0), rclcpp::Duration(0.1)); - geometry_msgs::msg::TransformStamped pt_transform; - pt_transform = tf2_.lookupTransform(cloud.header.frame_id, marker_frame, rclcpp::Time(0), rclcpp::Duration(0.1)); + // tf2_.canTransform(cloud.header.frame_id, marker_frame, rclcpp::Time(0), rclcpp::Duration(0.1)); + // geometry_msgs::msg::TransformStamped pt_transform; + // pt_transform = tf2_.lookupTransform(cloud.header.frame_id, marker_frame, rclcpp::Time(0), rclcpp::Duration(0.1)); tf2::doTransform(p, output_p,pt_transform); } catch (tf2::TransformException ex){ RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "ERROR InferCorners: %s",ex.what()); return -1; } + prev_stamp2_ = tf2_ros::fromMsg(rclcpp::Time(cloud.header.stamp)); + bund_corners[j].x += output_p.point.x; bund_corners[j].y += output_p.point.y; @@ -637,7 +653,7 @@ class FindMarkerBundles : public rclcpp::Node camToMarker.transform.translation = trans; camToMarker.transform.rotation = rot; - tf_broadcaster_.sendTransform(camToMarker); + tf_broadcaster_->sendTransform(camToMarker); //Create the rviz visualization message rvizMarker->pose.position.x = markerPose.getOrigin().getX(); @@ -716,11 +732,12 @@ class FindMarkerBundles : public rclcpp::Node if(cam->getCamInfo_){ try{ //Get the transformation from the Camera to the output frame for this image capture - geometry_msgs::msg::TransformStamped CamToOutput; + std::string tf_error; + geometry_msgs::msg::TransformStamped CamToOutput; try { - tf2_.canTransform(output_frame, msg->header.frame_id, msg->header.stamp, rclcpp::Duration(1.0)); - CamToOutput = tf2_.lookupTransform(output_frame, msg->header.frame_id, msg->header.stamp, rclcpp::Duration(1.0)); + tf2::TimePoint tf2_time = tf2_ros::fromMsg(image_msg.header.stamp); + CamToOutput = tf2_->lookupTransform(output_frame, image_msg.header.frame_id,tf2_time,tf2_time - prev_stamp_); } catch (tf2::TransformException ex){ RCLCPP_ERROR(this->get_logger(), "%s",ex.what()); @@ -794,6 +811,7 @@ class FindMarkerBundles : public rclcpp::Node { RCLCPP_ERROR (this->get_logger(),"ar_track_alvar: Image error: %s", image_msg.encoding.c_str ()); } + prev_stamp_ = tf2_ros::fromMsg(image_msg.header.stamp); } } diff --git a/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp b/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp index cb1e4fb..9a8e1f8 100644 --- a/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp +++ b/ar_track_alvar/nodes/FindMarkerBundlesNoKinect.cpp @@ -68,9 +68,10 @@ class FindMarkerBundlesNoKinect : public rclcpp::Node image_transport::Subscriber cam_sub_; ar_track_alvar_msgs::msg::AlvarMarkers arPoseMarkers_; - tf2_ros::TransformBroadcaster tf_broadcaster_; - tf2_ros::TransformListener tf_listener_; - tf2_ros::Buffer tf2_; + tf2::TimePoint prev_stamp_; + std::shared_ptr tf2_; + std::shared_ptr tf_listener_; + std::shared_ptr tf_broadcaster_; rclcpp::Publisher::SharedPtr arMarkerPub_; rclcpp::Publisher::SharedPtr rvizMarkerPub_; @@ -95,8 +96,15 @@ class FindMarkerBundlesNoKinect : public rclcpp::Node public: - FindMarkerBundlesNoKinect(int argc, char* argv[]):Node("marker_detect"), tf2_(this->get_clock()), tf_listener_(tf2_), tf_broadcaster_(this) - { + FindMarkerBundlesNoKinect(int argc, char* argv[]):Node("marker_detect") + { + + rclcpp::Clock::SharedPtr clock = this->get_clock(); + tf2_ = std::make_shared(clock); + tf_listener_ = std::make_shared(*tf2_); + tf_broadcaster_ = std::make_shared(*this); + prev_stamp_ = tf2::get_now(); + if(argc < 8) { std::cout << std::endl; @@ -189,7 +197,7 @@ class FindMarkerBundlesNoKinect : public rclcpp::Node { cam->SetCameraInfo(cam_info); cam->getCamInfo_ = true; - //sub_.reset(); + info_sub_.reset(); } } @@ -239,7 +247,7 @@ class FindMarkerBundlesNoKinect : public rclcpp::Node camToMarker.transform.translation = trans; camToMarker.transform.rotation = rot; - tf_broadcaster_.sendTransform(camToMarker); + tf_broadcaster_->sendTransform(camToMarker); } //Create the rviz visualization message @@ -316,9 +324,10 @@ class FindMarkerBundlesNoKinect : public rclcpp::Node { //Get the transformation from the Camera to the output frame for this image capture geometry_msgs::msg::TransformStamped CamToOutput; + std::string tf_error; try{ - tf2_.canTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp, rclcpp::Duration(1.0)); - CamToOutput = tf2_.lookupTransform(output_frame, image_msg->header.frame_id, image_msg->header.stamp, rclcpp::Duration(1.0)); + tf2::TimePoint tf2_time = tf2_ros::fromMsg(image_msg->header.stamp); + CamToOutput = tf2_->lookupTransform(output_frame, image_msg->header.frame_id,tf2_time,tf2_time - prev_stamp_); } catch (tf2::TransformException ex){ RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "%s",ex.what()); @@ -400,6 +409,7 @@ class FindMarkerBundlesNoKinect : public rclcpp::Node { RCLCPP_ERROR (this->get_logger(),"Could not convert from '%s' to 'rgb8'.", image_msg->encoding.c_str ()); } + prev_stamp_ = tf2_ros::fromMsg(image_msg->header.stamp); } } diff --git a/ar_track_alvar/nodes/IndividualMarkers.cpp b/ar_track_alvar/nodes/IndividualMarkers.cpp index 1c6f2a2..abc3752 100644 --- a/ar_track_alvar/nodes/IndividualMarkers.cpp +++ b/ar_track_alvar/nodes/IndividualMarkers.cpp @@ -76,13 +76,16 @@ class IndividualMarkers : public rclcpp::Node rclcpp::Publisher::SharedPtr rvizMarkerPub2_; rclcpp::Subscription::SharedPtr info_sub_; - tf2_ros::TransformBroadcaster tf_broadcaster_; - tf2_ros::TransformListener tf_listener_; - tf2_ros::Buffer tf2_; + + tf2::TimePoint prev_stamp_; + std::shared_ptr tf2_; + std::shared_ptr tf_listener_; + std::shared_ptr tf_broadcaster_; ar_track_alvar_msgs::msg::AlvarMarkers arPoseMarkers_; visualization_msgs::msg::Marker rvizMarker_; + MarkerDetector marker_detector; bool enableSwitched = false; bool enabled = true; @@ -101,8 +104,15 @@ class IndividualMarkers : public rclcpp::Node public: - IndividualMarkers(int argc, char* argv[]):Node("marker_detect"), tf2_(this->get_clock()), tf_listener_(tf2_), tf_broadcaster_(this) + IndividualMarkers(int argc, char* argv[]):Node("marker_detect") { + + rclcpp::Clock::SharedPtr clock = this->get_clock(); + tf2_ = std::make_shared(clock); + tf_listener_ = std::make_shared(*tf2_); + tf_broadcaster_ = std::make_shared(*this); + prev_stamp_ = tf2::get_now(); + if(argc > 1) { RCLCPP_WARN(this->get_logger(), "Command line arguments are deprecated. Consider using ROS parameters and remappings."); @@ -188,7 +198,7 @@ class IndividualMarkers : public rclcpp::Node RCLCPP_INFO(this->get_logger(), "Subscribing to image topic"); cloud_sub_ = this->create_subscription(cam_image_topic, 1, - std::bind(&IndividualMarkers::getPointCloudCallback, this, std::placeholders::_1)); + std::bind(&IndividualMarkers::getPointCloudCallback, this, std::placeholders::_1)); } RCLCPP_INFO(this->get_logger(),"Subscribing to info topic"); @@ -201,7 +211,7 @@ class IndividualMarkers : public rclcpp::Node { cam->SetCameraInfo(cam_info); cam->getCamInfo_ = true; - //sub_.reset(); + info_sub_.reset(); } } @@ -489,11 +499,13 @@ void draw3dPoints(ARCloud::Ptr cloud, string frame, int color, int id, double ra //Use the kinect to improve the pose Pose ret_pose; GetMarkerPoses(&ipl_image, cloud); - + std::string tf_error; geometry_msgs::msg::TransformStamped CamToOutput; - try { - tf2_.canTransform(output_frame, image_msg.header.frame_id, image_msg.header.stamp,rclcpp::Duration(1.0)); - CamToOutput = tf2_.lookupTransform(output_frame, image_msg.header.frame_id, image_msg.header.stamp, rclcpp::Duration(1.0)); + try + { + tf2::TimePoint tf2_time = tf2_ros::fromMsg(image_msg.header.stamp); + CamToOutput = tf2_->lookupTransform(output_frame, image_msg.header.frame_id,tf2_time,tf2_time - prev_stamp_); + } catch (tf2::TransformException ex) { RCLCPP_ERROR(this->get_logger(), "%s",ex.what()); } @@ -546,7 +558,7 @@ void draw3dPoints(ARCloud::Ptr cloud, string frame, int color, int id, double ra camToMarker.transform.translation = trans; camToMarker.transform.rotation = rot; - tf_broadcaster_.sendTransform(camToMarker); + tf_broadcaster_->sendTransform(camToMarker); //Create the rviz visualization messages @@ -631,6 +643,7 @@ void draw3dPoints(ARCloud::Ptr cloud, string frame, int color, int id, double ra { RCLCPP_ERROR(this->get_logger(),"Could not convert from '%s' to 'rgb8'.", image_msg.encoding.c_str ()); } + prev_stamp_ = tf2_ros::fromMsg(image_msg.header.stamp); } } }; diff --git a/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp b/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp index 555a8c4..2903df1 100644 --- a/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp +++ b/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp @@ -71,15 +71,16 @@ class IndividualMarkersNoKinect : public rclcpp::Node rclcpp::Subscription::SharedPtr enable_sub_; rclcpp::Subscription::SharedPtr cam_sub_; - tf2::TimePoint prev_stamp_; + // image_transport::Subscriber cam_sub_; + rclcpp::Publisher::SharedPtr arMarkerPub_; rclcpp::Publisher::SharedPtr rvizMarkerPub_; rclcpp::Subscription::SharedPtr info_sub_; - + tf2::TimePoint prev_stamp_; std::shared_ptr tf2_; std::shared_ptr tf_listener_; std::shared_ptr tf_broadcaster_; @@ -95,7 +96,6 @@ class IndividualMarkersNoKinect : public rclcpp::Node double max_track_error; std::string cam_image_topic; std::string cam_info_topic; - //image_transport::ImageTransport it_ std::string output_frame; int marker_resolution = 5; // default marker resolution int marker_margin = 2; // default marker margin @@ -219,7 +219,7 @@ class IndividualMarkersNoKinect : public rclcpp::Node { cam->SetCameraInfo(cam_info); cam->getCamInfo_ = true; - // info_sub_.reset(); + info_sub_.reset(); } } @@ -405,36 +405,4 @@ int main(int argc, char *argv[]) rclcpp::shutdown(); return 0; - // TODO Figure out how to do this - // image_transport::ImageTransport it_(n); - - // // Run at the configured rate, discarding pointcloud msgs if necessary - // rclcpp::Rate rate(max_frequency); - - - // enableSwitched = true; - // while (rclcpp::ok()) - // { - // rclcpp::spin_some(node); - // rate.sleep(); - - // if (std::abs((rate.expectedCycleTime() - rclcpp::Duration(1.0 / max_frequency)).toSec()) > 0.001) - // { - // // Change rate dynamically; if must be above 0, as 0 will provoke a segfault on next spinOnce - // RCLCPP_DEBUG(rclcpp::get_logger("ArTrackAlvar"), "Changing frequency from %.2f to %.2f", 1.0 / rate.expectedCycleTime().toSec(), max_frequency); - // rate = rclcpp::Rate(max_frequency); - // } - - // if (enableSwitched) - // { - // // Enable/disable switch: subscribe/unsubscribe to make use of pointcloud processing nodelet - // // lazy publishing policy; in CPU-scarce computer as TurtleBot's laptop this is a huge saving - // if (enabled) - // cam_sub_ = it_.subscribe(cam_image_topic, 1, &getCapCallback); - // else - // cam_sub_.shutdown(); - // enableSwitched = false; - // } - // } - } diff --git a/ar_track_alvar/nodes/TrainMarkerBundle.cpp b/ar_track_alvar/nodes/TrainMarkerBundle.cpp index 3e1c3fb..2008e40 100644 --- a/ar_track_alvar/nodes/TrainMarkerBundle.cpp +++ b/ar_track_alvar/nodes/TrainMarkerBundle.cpp @@ -68,9 +68,14 @@ class TrainMarkerBundle : public rclcpp::Node image_transport::Subscriber cam_sub_; rclcpp::Publisher::SharedPtr arMarkerPub_; rclcpp::Publisher::SharedPtr rvizMarkerPub_; - tf2_ros::TransformBroadcaster tf_broadcaster_; - tf2_ros::TransformListener tf_listener_; - tf2_ros::Buffer tf2_; + + + tf2::TimePoint prev_stamp_; + std::shared_ptr tf2_; + std::shared_ptr tf_listener_; + std::shared_ptr tf_broadcaster_; + + MarkerDetector marker_detector; MultiMarkerInitializer *multi_marker_init=NULL; MultiMarkerBundle *multi_marker_bundle=NULL; @@ -97,9 +102,15 @@ class TrainMarkerBundle : public rclcpp::Node public: - TrainMarkerBundle(int argc, char* argv[]):Node("marker_detect"), tf2_(this->get_clock()), tf_listener_(tf2_), tf_broadcaster_(this) + TrainMarkerBundle(int argc, char* argv[]):Node("marker_detect") { + rclcpp::Clock::SharedPtr clock = this->get_clock(); + tf2_ = std::make_shared(clock); + tf_listener_ = std::make_shared(*tf2_); + tf_broadcaster_ = std::make_shared(*this); + prev_stamp_ = tf2::get_now(); + if(argc < 8){ std::cout << std::endl; @@ -167,10 +178,10 @@ class TrainMarkerBundle : public rclcpp::Node if(cam->getCamInfo_){ try{ //Get the transformation from the Camera to the output frame for this image capture - geometry_msgs::msg::TransformStamped CamToOutput; + geometry_msgs::msg::TransformStamped CamToOutput; try{ - tf2_.canTransform(image_msg->header.frame_id, output_frame, image_msg->header.stamp, rclcpp::Duration(1.0)); - CamToOutput= tf2_.lookupTransform(image_msg->header.frame_id, output_frame, image_msg->header.stamp,rclcpp::Duration(1.0)); + tf2::TimePoint tf2_time = tf2_ros::fromMsg(image_msg->header.stamp); + CamToOutput = tf2_->lookupTransform(output_frame, image_msg->header.frame_id,tf2_time,tf2_time - prev_stamp_); } catch (tf2::TransformException ex){ RCLCPP_ERROR(rclcpp::get_logger("ArTrackAlvar"), "%s",ex.what()); @@ -218,6 +229,7 @@ class TrainMarkerBundle : public rclcpp::Node catch (cv_bridge::Exception& e){ RCLCPP_ERROR (this->get_logger(),"Could not convert from '%s' to 'rgb8'.", image_msg->encoding.c_str ()); } + prev_stamp_ = tf2_ros::fromMsg(image_msg->header.stamp); } //Sleep if we are auto collecting @@ -349,7 +361,7 @@ class TrainMarkerBundle : public rclcpp::Node camToMarker.transform.translation = trans; camToMarker.transform.rotation = rot; - tf_broadcaster_.sendTransform(camToMarker); + tf_broadcaster_->sendTransform(camToMarker); } //Create the rviz visualization message diff --git a/ar_track_alvar/src/Camera.cpp b/ar_track_alvar/src/Camera.cpp index c3d3814..0fcc26f 100644 --- a/ar_track_alvar/src/Camera.cpp +++ b/ar_track_alvar/src/Camera.cpp @@ -245,16 +245,6 @@ void Camera::SetCameraInfo(const sensor_msgs::msg::CameraInfo::SharedPtr cam_inf } } -// void Camera::camInfoCallback(const sensor_msgs::CameraInfoConstPtr& cam_info) -// { -// if (!getCamInfo_) -// { -// SetCameraInfo(*cam_info); -// getCamInfo_ = true; -// sub_.shutdown(); -// } -// } - bool Camera::SetCalib(const char* calibfile, int _x_res, int _y_res, FILE_FORMAT format) { diff --git a/ar_track_alvar/test/test_ar_legacy.py b/ar_track_alvar/test/test_ar_legacy.py index 08c47eb..1f4a28d 100644 --- a/ar_track_alvar/test/test_ar_legacy.py +++ b/ar_track_alvar/test/test_ar_legacy.py @@ -36,13 +36,13 @@ def generate_test_description(): return LaunchDescription([ - # Bag Node - + # Launch bag with four markers launch.actions.ExecuteProcess( cmd=['ros2', 'bag', 'play', '--loop','-s',"rosbag_v2",bag_name], output='screen' ), - + + # Invidividual Marker Detector Node( package='ar_track_alvar', executable='individualMarkersNoKinect', @@ -71,17 +71,7 @@ def tearDownClass(cls): rclpy.shutdown() - def test_markers_received(self): - # # Expect the disparity node to publish on '/disparity' topic - # msgs_received = [] - # self.node.create_subscription( - # AlvarMarkers, - # "ar_pose_marker", - # lambda msg: msgs_received.append(msg), - # 1 - # ) - - + def test_marker_pose_estimation(self): transforms =[] start_time = time.time() while rclpy.ok() and (time.time() - start_time) < 120: @@ -109,14 +99,14 @@ def test_markers_received(self): except (LookupException, ConnectivityException, ExtrapolationException) as e: continue + + # Break when we've received all four markers if(len(transforms)>=4): break - print(transforms[0]) - print(tf_expected[0]) - # there are four markers in the test bag + # There are four markers in the test bag. + # Make sure the deviation in rotation and translation is less than 0.1 for i in range(4): - trans = np.abs(np.asarray(transforms[i][0]) - np.asarray(tf_expected[i][0])) assert np.max(trans) < 0.1 rot = np.abs( np.asarray(transforms[i][1]) - np.asarray(tf_expected[i][1])) From 59a2392d4daeed25c83f91534d0f510aec749f54 Mon Sep 17 00:00:00 2001 From: Patrick Musau Date: Wed, 4 Aug 2021 11:04:34 -0500 Subject: [PATCH 21/27] port launch files --- ar_track_alvar/launch/pr2_bundle.launch | 14 --------- ar_track_alvar/launch/pr2_bundle.py | 30 +++++++++++++++++++ .../launch/pr2_bundle_no_kinect.launch | 13 -------- ar_track_alvar/launch/pr2_bundle_no_kinect.py | 27 +++++++++++++++++ ar_track_alvar/launch/pr2_indiv.launch | 20 ------------- ar_track_alvar/launch/pr2_indiv.py | 30 +++++++++++++++++++ .../launch/pr2_indiv_no_kinect.launch | 18 ----------- ar_track_alvar/launch/pr2_indiv_no_kinect.py | 30 +++++++++++++++++++ ar_track_alvar/launch/pr2_train.launch | 11 ------- ar_track_alvar/launch/pr2_train.py | 27 +++++++++++++++++ 10 files changed, 144 insertions(+), 76 deletions(-) delete mode 100755 ar_track_alvar/launch/pr2_bundle.launch create mode 100644 ar_track_alvar/launch/pr2_bundle.py delete mode 100755 ar_track_alvar/launch/pr2_bundle_no_kinect.launch create mode 100644 ar_track_alvar/launch/pr2_bundle_no_kinect.py delete mode 100755 ar_track_alvar/launch/pr2_indiv.launch create mode 100644 ar_track_alvar/launch/pr2_indiv.py delete mode 100755 ar_track_alvar/launch/pr2_indiv_no_kinect.launch create mode 100644 ar_track_alvar/launch/pr2_indiv_no_kinect.py delete mode 100755 ar_track_alvar/launch/pr2_train.launch create mode 100644 ar_track_alvar/launch/pr2_train.py diff --git a/ar_track_alvar/launch/pr2_bundle.launch b/ar_track_alvar/launch/pr2_bundle.launch deleted file mode 100755 index 69d49da..0000000 --- a/ar_track_alvar/launch/pr2_bundle.launch +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - - - - - - - - - diff --git a/ar_track_alvar/launch/pr2_bundle.py b/ar_track_alvar/launch/pr2_bundle.py new file mode 100644 index 0000000..190f352 --- /dev/null +++ b/ar_track_alvar/launch/pr2_bundle.py @@ -0,0 +1,30 @@ + + +from launch import LaunchDescription +from launch_ros.actions import Node +from ament_index_python.packages import get_package_share_directory + +# command line arguments +marker_size=4.4 +max_new_marker_error=0.08 +max_track_error=0.0 +cam_image_topic="/kinect_head/depth_registered/points" +cam_info_topic="/kinect_head/rgb/camera_info" +output_frame="/torso_lift_link" +med_filt_size=10 +truth_table_leg = os.path.join(get_package_share_directory('ar_track_alvar'),'bundles','truthTableLeg.xml') +table_bundle = os.path.join(get_package_share_directory('ar_track_alvar'),'bundles','table_8_9_10.xml') + +def generate_launch_description(): + return LaunchDescription([ + Node( + package='ar_track_alvar', + node_executable='findMarkerBundles', + output='screen', + remappings=[ + ("camera_image", cam_image_topic), + ("camera_info",cam_info_topic) + ], + arguments=[marker_size, max_new_marker_error, max_track_error, cam_image_topic, cam_info_topic, output_frame, med_filt_size, truth_table_leg, table_bundle], + ), + ]) \ No newline at end of file diff --git a/ar_track_alvar/launch/pr2_bundle_no_kinect.launch b/ar_track_alvar/launch/pr2_bundle_no_kinect.launch deleted file mode 100755 index c7892dd..0000000 --- a/ar_track_alvar/launch/pr2_bundle_no_kinect.launch +++ /dev/null @@ -1,13 +0,0 @@ - - - - - - - - - - - - - diff --git a/ar_track_alvar/launch/pr2_bundle_no_kinect.py b/ar_track_alvar/launch/pr2_bundle_no_kinect.py new file mode 100644 index 0000000..969d5c3 --- /dev/null +++ b/ar_track_alvar/launch/pr2_bundle_no_kinect.py @@ -0,0 +1,27 @@ +from launch import LaunchDescription +from launch_ros.actions import Node +from ament_index_python.packages import get_package_share_directory + +# command line arguments +marker_size=4.4 +max_new_marker_error=0.08 +max_track_error=0.2 +cam_image_topic="/wide_stereo/left/image_color" +cam_info_topic="/wide_stereo/left/camera_info" +output_frame="/torso_lift_link" +truth_table_leg = os.path.join(get_package_share_directory('ar_track_alvar'),'bundles','truthTableLeg.xml') +table_bundle = os.path.join(get_package_share_directory('ar_track_alvar'),'bundles','table_8_9_10.xml') + +def generate_launch_description(): + return LaunchDescription([ + Node( + package='ar_track_alvar', + node_executable='findMarkerBundlesNoKinect', + output='screen', + remappings=[ + ("camera_image", cam_image_topic), + ("camera_info",cam_info_topic) + ], + arguments=[marker_size, max_new_marker_error, max_track_error, cam_image_topic, cam_info_topic, output_frame, truth_table_leg, table_bundle], + ), + ]) \ No newline at end of file diff --git a/ar_track_alvar/launch/pr2_indiv.launch b/ar_track_alvar/launch/pr2_indiv.launch deleted file mode 100755 index 27e4689..0000000 --- a/ar_track_alvar/launch/pr2_indiv.launch +++ /dev/null @@ -1,20 +0,0 @@ - - - - - - - - - - - - - - - - - - - - diff --git a/ar_track_alvar/launch/pr2_indiv.py b/ar_track_alvar/launch/pr2_indiv.py new file mode 100644 index 0000000..4c2c288 --- /dev/null +++ b/ar_track_alvar/launch/pr2_indiv.py @@ -0,0 +1,30 @@ + + +from launch import LaunchDescription +import launch_ros.actions + +# command line arguments +marker_size=4.4 +max_new_marker_error=0.08 +max_track_error=0.2 +cam_image_topic="/wide_stereo/left/image_color" +cam_info_topic="/wide_stereo/left/camera_info" +output_frame="/torso_lift_link" + +def generate_launch_description(): + return LaunchDescription([ + launch_ros.actions.Node( + package='ar_track_alvar', + node_executable='individualMarkers', + output='screen', + remappings=[ + ("camera_image", cam_image_topic), + ("camera_info",cam_info_topic) + ], + parameters=[ + {"marker_size":marker_size}, + {"max_new_marker_error":arg max_new_marker_error}, + {"max_track_error":max_track_error}, + {"output_frame":output_frame} + ], + ]) \ No newline at end of file diff --git a/ar_track_alvar/launch/pr2_indiv_no_kinect.launch b/ar_track_alvar/launch/pr2_indiv_no_kinect.launch deleted file mode 100755 index 4428714..0000000 --- a/ar_track_alvar/launch/pr2_indiv_no_kinect.launch +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - - - - - - - - - - - - - diff --git a/ar_track_alvar/launch/pr2_indiv_no_kinect.py b/ar_track_alvar/launch/pr2_indiv_no_kinect.py new file mode 100644 index 0000000..2e43bc5 --- /dev/null +++ b/ar_track_alvar/launch/pr2_indiv_no_kinect.py @@ -0,0 +1,30 @@ + + +from launch import LaunchDescription +import launch_ros.actions + +# command line arguments +marker_size=4.4 +max_new_marker_error=0.08 +max_track_error=0.2 +cam_image_topic="/wide_stereo/left/image_color" +cam_info_topic="/wide_stereo/left/camera_info" +output_frame="/torso_lift_link" + +def generate_launch_description(): + return LaunchDescription([ + launch_ros.actions.Node( + package='ar_track_alvar', + node_executable='individualMarkersNoKinect', + output='screen', + remappings=[ + ("camera_image", cam_image_topic), + ("camera_info",cam_info_topic) + ], + parameters=[ + {"marker_size":marker_size}, + {"max_new_marker_error":arg max_new_marker_error}, + {"max_track_error":max_track_error}, + {"output_frame":output_frame} + ], + ]) \ No newline at end of file diff --git a/ar_track_alvar/launch/pr2_train.launch b/ar_track_alvar/launch/pr2_train.launch deleted file mode 100755 index b929260..0000000 --- a/ar_track_alvar/launch/pr2_train.launch +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - diff --git a/ar_track_alvar/launch/pr2_train.py b/ar_track_alvar/launch/pr2_train.py new file mode 100644 index 0000000..4a22e72 --- /dev/null +++ b/ar_track_alvar/launch/pr2_train.py @@ -0,0 +1,27 @@ + + +from launch import LaunchDescription +from launch_ros.actions import Node + +# command line arguments +nof_markers=8 +marker_size=4.4 +max_new_marker_error=0.08 +max_track_error=0.2 +cam_image_topic="/wide_stereo/left/image_color" +cam_info_topic="/wide_stereo/left/camera_info" +output_frame="/torso_lift_link" + +def generate_launch_description(): + return LaunchDescription([ + Node( + package='ar_track_alvar', + node_executable='trainMarkerBundle', + output='screen', + remappings=[ + ("camera_image", cam_image_topic), + ("camera_info",cam_info_topic) + ], + arguments=[nof_markers, marker_size ,arg max_new_marker_error, max_track_error ,cam_image_topic, cam_info_topic, output_frame] + ), + ]) \ No newline at end of file From 4423d0921c9d5a274fd3dd4151f04ff558f6cdd9 Mon Sep 17 00:00:00 2001 From: Patrick Musau Date: Wed, 4 Aug 2021 15:06:50 -0500 Subject: [PATCH 22/27] unit tests and launch files --- ar_track_alvar/CMakeLists.txt | 39 ++------- ar_track_alvar/launch/pr2_bundle.py | 4 +- ar_track_alvar/launch/pr2_bundle_no_kinect.py | 4 +- ar_track_alvar/launch/pr2_indiv.py | 3 +- ar_track_alvar/launch/pr2_indiv_no_kinect.py | 13 +-- ar_track_alvar/launch/pr2_train.py | 5 +- .../nodes/IndividualMarkersNoKinect.cpp | 2 +- .../test/resources/alvar-marker-pose-test.bag | Bin 0 -> 26774661 bytes ar_track_alvar/test/test_ar_legacy.py | 2 +- .../test/test_ar_track_alvar_bag.py | 4 +- .../test_ar_track_alvar_individual_markers.py | 2 +- ar_track_alvar/test/test_launch_files.py | 78 ++++++++++++++++++ 12 files changed, 107 insertions(+), 49 deletions(-) create mode 100644 ar_track_alvar/test/resources/alvar-marker-pose-test.bag create mode 100644 ar_track_alvar/test/test_launch_files.py diff --git a/ar_track_alvar/CMakeLists.txt b/ar_track_alvar/CMakeLists.txt index 98f7daf..4b6ea8a 100644 --- a/ar_track_alvar/CMakeLists.txt +++ b/ar_track_alvar/CMakeLists.txt @@ -156,14 +156,7 @@ if(BUILD_TESTING) find_package(ament_lint_auto REQUIRED) ament_lint_auto_find_test_dependencies() - find_package(ament_download) - - ament_download(http://download.ros.org/data/ar_track_alvar/ar_track_alvar_4markers_tork_2017-02-08-11-21-14.bag - FILENAME ar_track_alvar_4markers_tork_2017-02-08-11-21-14.bag - DESTINATION ${CMAKE_INSTALL_PREFIX}/share/${PROJECT_NAME} - MD5 627aa0316bbfe4334e06023d7c2b4087 - ) - + set(_PYTHON_EXECUTABLE "${PYTHON_EXECUTABLE}") if(WIN32 AND CMAKE_BUILD_TYPE STREQUAL "Debug") set(PYTHON_EXECUTABLE "${PYTHON_EXECUTABLE_DEBUG}") @@ -184,31 +177,11 @@ if(BUILD_TESTING) PYTHON_EXECUTABLE "${PYTHON_EXECUTABLE}" ) -endif() - - + # Test Launch Files + add_launch_test("test/test_launch_files.py" + PYTHON_EXECUTABLE "${PYTHON_EXECUTABLE}" + ) -# if (BUILD_TESTING) -# find_package(catkin REQUIRED COMPONENTS roslaunch rostest) -# add_rostest(test/marker_arg_config-basic.test DEPENDENCIES ${PROJECT_NAME}_4markers_tork.bag) -# add_rostest(test/marker_arg_config-full.test DEPENDENCIES ${PROJECT_NAME}_4markers_tork.bag) -# add_rostest(test/marker_param_config-basic.test DEPENDENCIES ${PROJECT_NAME}_4markers_tork.bag) -# add_rostest(test/marker_param_config-full.test DEPENDENCIES ${PROJECT_NAME}_4markers_tork.bag) - -# catkin_download_test_data( -# ${PROJECT_NAME}_4markers_tork.bag -# http://download.ros.org/data/ar_track_alvar/ar_track_alvar_4markers_tork_2017-02-08-11-21-14.bag -# # Workaround the issue http://answers.ros.org/question/253787/accessing-data-downloaded-via-catkin_download_test_data/ -# # by downloading into source folder. -# #DESTINATION ${CATKIN_DEVEL_PREFIX}/${CATKIN_PACKAGE_SHARE_DESTINATION}/test -# DESTINATION ${CATKIN_DEVEL_PREFIX}/${CATKIN_PACKAGE_SHARE_DESTINATION}/test -# MD5 627aa0316bbfe4334e06023d7c2b4087 -# ) -# foreach(LAUNCH_FILE ${LAUNCH_FILES}) -# roslaunch_add_file_check(${LAUNCH_FILE} USE_TEST_DEPENDENCIES) -# endforeach() - -# file(GLOB LAUNCH_FILES launch/*.launch test/*.test) -# endif() +endif() ament_package() \ No newline at end of file diff --git a/ar_track_alvar/launch/pr2_bundle.py b/ar_track_alvar/launch/pr2_bundle.py index 190f352..57957b2 100644 --- a/ar_track_alvar/launch/pr2_bundle.py +++ b/ar_track_alvar/launch/pr2_bundle.py @@ -3,6 +3,7 @@ from launch import LaunchDescription from launch_ros.actions import Node from ament_index_python.packages import get_package_share_directory +import os # command line arguments marker_size=4.4 @@ -19,7 +20,8 @@ def generate_launch_description(): return LaunchDescription([ Node( package='ar_track_alvar', - node_executable='findMarkerBundles', + name='findMarkerBundles', + executable='findMarkerBundles', output='screen', remappings=[ ("camera_image", cam_image_topic), diff --git a/ar_track_alvar/launch/pr2_bundle_no_kinect.py b/ar_track_alvar/launch/pr2_bundle_no_kinect.py index 969d5c3..f4129b8 100644 --- a/ar_track_alvar/launch/pr2_bundle_no_kinect.py +++ b/ar_track_alvar/launch/pr2_bundle_no_kinect.py @@ -1,6 +1,7 @@ from launch import LaunchDescription from launch_ros.actions import Node from ament_index_python.packages import get_package_share_directory +import os # command line arguments marker_size=4.4 @@ -16,7 +17,8 @@ def generate_launch_description(): return LaunchDescription([ Node( package='ar_track_alvar', - node_executable='findMarkerBundlesNoKinect', + name='findMarkerBundlesNoKinect' + executable='findMarkerBundlesNoKinect', output='screen', remappings=[ ("camera_image", cam_image_topic), diff --git a/ar_track_alvar/launch/pr2_indiv.py b/ar_track_alvar/launch/pr2_indiv.py index 4c2c288..02b35eb 100644 --- a/ar_track_alvar/launch/pr2_indiv.py +++ b/ar_track_alvar/launch/pr2_indiv.py @@ -2,6 +2,7 @@ from launch import LaunchDescription import launch_ros.actions +import os # command line arguments marker_size=4.4 @@ -15,7 +16,7 @@ def generate_launch_description(): return LaunchDescription([ launch_ros.actions.Node( package='ar_track_alvar', - node_executable='individualMarkers', + executable='individualMarkers', output='screen', remappings=[ ("camera_image", cam_image_topic), diff --git a/ar_track_alvar/launch/pr2_indiv_no_kinect.py b/ar_track_alvar/launch/pr2_indiv_no_kinect.py index 2e43bc5..573862b 100644 --- a/ar_track_alvar/launch/pr2_indiv_no_kinect.py +++ b/ar_track_alvar/launch/pr2_indiv_no_kinect.py @@ -2,20 +2,21 @@ from launch import LaunchDescription import launch_ros.actions +import os # command line arguments marker_size=4.4 max_new_marker_error=0.08 max_track_error=0.2 -cam_image_topic="/wide_stereo/left/image_color" -cam_info_topic="/wide_stereo/left/camera_info" -output_frame="/torso_lift_link" +cam_image_topic="wide_stereo/left/image_color" +cam_info_topic="wide_stereo/left/camera_info" +output_frame="torso_lift_link" def generate_launch_description(): return LaunchDescription([ launch_ros.actions.Node( package='ar_track_alvar', - node_executable='individualMarkersNoKinect', + executable='individualMarkersNoKinect', output='screen', remappings=[ ("camera_image", cam_image_topic), @@ -23,8 +24,8 @@ def generate_launch_description(): ], parameters=[ {"marker_size":marker_size}, - {"max_new_marker_error":arg max_new_marker_error}, + {"max_new_marker_error":max_new_marker_error}, {"max_track_error":max_track_error}, {"output_frame":output_frame} - ], + ],) ]) \ No newline at end of file diff --git a/ar_track_alvar/launch/pr2_train.py b/ar_track_alvar/launch/pr2_train.py index 4a22e72..eb5e240 100644 --- a/ar_track_alvar/launch/pr2_train.py +++ b/ar_track_alvar/launch/pr2_train.py @@ -2,6 +2,7 @@ from launch import LaunchDescription from launch_ros.actions import Node +import os # command line arguments nof_markers=8 @@ -16,12 +17,12 @@ def generate_launch_description(): return LaunchDescription([ Node( package='ar_track_alvar', - node_executable='trainMarkerBundle', + executable='trainMarkerBundle', output='screen', remappings=[ ("camera_image", cam_image_topic), ("camera_info",cam_info_topic) ], - arguments=[nof_markers, marker_size ,arg max_new_marker_error, max_track_error ,cam_image_topic, cam_info_topic, output_frame] + arguments=[nof_markers, marker_size ,arg max_new_marker_error, max_track_error, cam_image_topic, cam_info_topic, output_frame] ), ]) \ No newline at end of file diff --git a/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp b/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp index 2903df1..be39441 100644 --- a/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp +++ b/ar_track_alvar/nodes/IndividualMarkersNoKinect.cpp @@ -110,7 +110,7 @@ class IndividualMarkersNoKinect : public rclcpp::Node tf_listener_ = std::make_shared(*tf2_); tf_broadcaster_ = std::make_shared(*this); - if(argc > 1) + if(argc > 2) { RCLCPP_WARN(this->get_logger(), "Command line arguments are deprecated. Consider using ROS parameters and remappings."); diff --git a/ar_track_alvar/test/resources/alvar-marker-pose-test.bag b/ar_track_alvar/test/resources/alvar-marker-pose-test.bag new file mode 100644 index 0000000000000000000000000000000000000000..2adfb441c77e2378c241d68eab1424169d69b098 GIT binary patch literal 26774661 zcmeFaORQ(tmEOm0yJMR`93wECI6>?`M8qO(iX!!DNe0uhT5enDB;8QU?oI@OzO1@M zUaNR-xo(xn>P#AbpvijN_xt^RR28WX5D${T>15&rnPg;S7C7?^G6*usAo+dYxAy*@ zTSc`yv5d}ug~h{j&wrnN_St8j^{sEMwfA}Hd%yM@-~MZVWAwMb^xEq${`D7Lc;S!p z-^{Hy|Jc9D|EH#}Pk!*$t(n<3|Bt`5@pBw_;osoDYctc+Z(W^}uI6 z@c-u?cm)byet=&vy`OpS*6qpJ*{PZ7H>YQ&C;t?`&rbdJ4xZ+zwJU;FCU-}u^BzxtCI{ z``(+^zA^FDuU~uP?bqLalSnM=G!%O~EM zeCzt;jj8FWJGzONM&EyTYIan=jQq=_{=PFYQZsMAH=%MP_C^z<+{SC8@7@{BzB_aG z=JnCrlN1|Gy!cZ6Hk#&6XKzhhn;dbL?|$^ToaWp9+-PF@`e@?XH9B%)dTNpy?v3u= zqU7kt)Q=~rXX-tgm@{Wx+x2h0_!7rQ`Q@$K6Yr0{Fw*w=1dY6Ld*;2-op&eO^{?Ni zn%l3A@YT#ty?yiE=-U(bCU1`>r?1UipS(VWkIxy4f%O zvR|9;G7ew!7#R1*JCi)CJGbw(ZqWVxc#7*WO7G52-u}Xk+f$R%*LnU^(=_LX2IQR? z-N?*)JnYfTE#60s>{*JZSNs}W9rld$8>4$OcSr9}OyA-BS4Wetz4O|uRKw-|uAcS8 z=*!m`Yg|MZnyH?=HZybk`qcEqoypl(NAFMaRBq2qze8hqyj-7#QAMtsJ8rj=+vK8y z5ly@9PNNy7-loNq*I)H-yq348czUDnygK^TS4ZD_b@c0lSMuiMjXPZ7ww4_^P_=*>4ruj2&eU7Wr? zbxn7{D7eyyrU_nr@ui>5KQ8uZ|GsNwFmdhg-JP1X!lFqxW(f6n5 z6~?z85jv|kg6{LcWH7boj2`09=)2!}@o!E}Tu0%&oBz?QmqvBy4)x96nRxHk;7JW? z9qIRey%(Rm$zG2y_14t&K{YeCc(M~Ws};044g9o9W^PZtgUY5%nmktIt&(eu4DR3$ ztmUtK(5{}r&(PP9rS5n2CioBc+ov_*uiU$O^7Wbbr~kz#{r1(9{JQCyKqY#S*6NR3 z^I!F_9Q|^=j)KNtAM3p5o>?9E*$qo$!tA8A>y4=oc)PFPy)FIKM&!6**3(nBM?app zd6zf%{i&Nb1=s2&;pwc#tFW8H1^-escTmtOQuU#H&{B~;$}r_f8I*Ff?C=uyXJM_>L<0Bu&?Sg-Zfi@ruZ zKbwD6v+cFT@&X!VTWhr5d3W^wyHnTR9gfELCuSL#4GA?Eu}^Q_yr;({Gj=$}Z`@^I z*C?isXKvoLAwr156b3b7|-t_2sZS)&= zZ{4zmh~3E1*g7{c8=Ijs%Wcfue(Sy2cV@phuH&tge(l}YM!$kZ_TJ3wooXOf!sl+? zy!+nUZ@oS9_UABfWl+yfy?5*8WG*4~c6;Lb6hm6}^PP$5cX*HV=eT-KqYGy5-nfBb zFn#B>7qL3@K8{PI0iSYnHpq>7iX z@$Z{qK2kjT#aKt1%myn~oygV*-u*!yi5@w9H< zlf{pon7MPMByY_-HrNNseS9CKw51sJJBIXxZnSET0Ti5_RS68mNf^ltrsPPhM5t{4~QEr%G~;)Kkm=_X4bQ#??E=yM8vk(d zPgUKnwpj(w+NoE^^B;CVe_kW{^FM7!YfMvu&VL0}#7$wQh+bTux^V+@vJ>^PM;I}` z+O&x2w}?p_$glEPd7|IH$L961o_`hP15qEAw9Z#YPTXW@zB}>bNk-P2zIjG55BLrQ z2bKBjKX~JtUw)PUpf}?SPqYDhYhnskq$eS2%s__Z-+ga-^aI*KD_;NM=k=s+PvU!t z{XkJ&6*oh8NezbQZD`dw3+%!#Jme7`~Gk1{zu%H zU#^@VxS{)8Ggh4H7`}J0h(ptTk#^yUo9|EDo28RxuXZnJq&iez4EUGR;8)s-+}yb5i({szt)Oww+HjDGN2qyJ>|pZ<_>rPkFzmjZkl{fTyM@1PktjT6Psja+?G)PQ$f z`P=vKCjkg|M?W6D&trWP+vhLWb%2M@>&N=1A$WK6#nJnNV;Ye^ZpZ99dw1sMb)Fr{ z+7HRfa#jtyGPuUd?~O10kV{I@f8|pY{m;y@8sA2{9smE=n&X4V_(#HA;$rkO3a;Fq zd`Ay-;@X|NvgH_ax9;4%&0B_NP!GM{n%spf*tqyGhHu`%3HgpKL|$^V3=SEc(Yz^b zbl?EFbr&@(XMzXPJzK(3PB4ON@5-9GE_O1>r7&Ds}mpTEgF+nl?n= zoQ%_8*Q{+HKk=(hX=EO3_m-M#LSGdDXNH1!rPRpWwd*=4LGc$}#Mq(`9 z*z=Tm2X*|p*}IH{FO)6(Il%~hepfCf?p3u6jOKdFhW$vxROn^f%e)UXL~VcTgI7i$ zj9!jSJvb~&C)|wZKXLcwokmOhdAwI`-&$X9ELw9GEv~_!2O!F-i=w|;QN5yVU#Gn> zdU<*VBlvB~-a^y7!*$EfZ^b@%_Q1WES007jDd5W$vCUtmn)h*m&AhKZ{B%C?%B!VT z?BGnHLC3v(cUqU7yxy4Q*yNfOo5M6Jm)z)kzxG|pJ_8!HiKzYB*JHrccbMnXVJiF-_b^i6vy<3yjZ}f+NWOAC9QM#&m z6XeysJ8fU7-FR2d=aQiH#PDH{he3fpWsq6>M{ret_dDP8;Get(Yo$SQV_&;HIbm1z zcfT`gLzoj@eChk|&wN3HR1Z`iyXxtQ>6uo2 zY8+qV4^ypbNxizKFQBBa>wV_kW_auR7z6V7k8#2-ZTJX&^4s@bv^Uk4eDS4Uv2)gD zzMm*(V`0r)o3b@0jZgDm|0Y&=9tVHD@y*dYH)q}kCG93{WV-oY|CNo_f#=}aw}e*N z;Ap`*?xKyquogg2Z@r{Mq{8D~t$tQL^;UB~u`phHvAsthyft&<2C7ir`u`A?X5WkzZR-Q6=PQPk z{(#57KAK7=so!{UZr)W8O4m2ro4igiv%1j@c%@zP-r$nopSkt3xFX@!(NDi%^_!dh zH`9kSO!rj)nf}au{if8n8m`|p@NVDbrI$@BPZ0NM1F1Zby7h4w0lgrGCZ5ze-{oaC&=)qnA}FXz9@m9y6W>%^hR- zv*%Ea#y@cM_RI{9v>69vr*7PP@ed5#|GUK8g$QpBoqWG+^Gt!tzr@d#G4S(OV*LNQ z&O?*E`DAAMxBkl9PyXWHVIINHkznv&{;m1n`X6Tg!Y}@76v&(Q&-vwde*T3QbbfaW z{sMozzWHZ=^S}LH|LO04?UVW6_`fFqi(mTo(F=d|AOG#^zx2JwFa6K=fAHq-Q^f!N zyPy1%fBKLA-rTq9&lmo;4`w(1@87!H|Ni@{|Lkx7^B>K9>;L?7zw@8{J71sb$6NXQ z|9}6btN*|H>-eu$ZhYkGo`3&4cmAhG-)hId^|yZiFZ|m-7`LY#=l|dOr@t`wt>5{% z|K)%D>wod5wX2o>7hRWff4Had)qj?!@$>aGKKcD${>EodL*4vG@ihM7e{ z_8tFxU>#yJV+Ktz5eB~>@c>VPo*RFr#+Sk7Fi(mQjpXCx|HvIufe6%PjVoBPmlts+3 zW<9<9`m3Xmdx8_(8l;!N#I>2*&|2AiRqn;-b6H{>{%rDDuGH0d)xeHyu5|yTX2m#q z-U?4e|8MhJ2ul65s{SRCThf;Q4+$=H;-@CL%zRpoi%xj%AK{>|k=~znBFZGT4J{t5 z%prPHe~-=9pVk$PYYv#0L-RV@!$fY18#kw}-T7%XsN~y|1ZJk-ZlbJAr6v88OvuJN z#>B+;(^5clSpM1yp-mGY&j2MpI@``3vz9Ao`|zipoXF0sS&eDhJCOWxd5L&BRaBk^ zWiY~5Lj$o zq8+JW@zUt8E9+Hf@Vv`y_ytl;@uf`T8gg zl;XiysGz$Z*4y{+n&twe7Pz8*%1aevXrW&HTKNu)ONG*J;lt9{dgWi4d*?G}Bo1Sx zV14F{WX%1noRNQkiSp-Uy*%r#_^$kbe~Dk7^Hj)?x#Fq#M}Pc-m%jLC{`3ofu>G%k zZol*P*I)RZKfxZwcmBcd?myhy`Q0tahwI4p?=B&GA1+Ehn%loWzx&aOWn=fFjh&A+ zk=+k1AFb_vw6^!*@_rRq*}K1PS>3z8y8m!x@4@oHeSTe{fQ6l<-TR9>A1&ekJcz7c}P9m{MzMse&=XzJ9Qq;ZzKD22M6B+;B)46@i4p%&#-;&JjoXzi^ z%pXLK=l0XulZAcGJ+d4x?4K+i94+l3rz=}0%bO?5tH(<#$4eXRpDu4BXH8C*w$2xK zkh6ttB>NXjyO+y*Y$c2PmrDniOGlRD%cbMXg_Dc<LuD)-Kl8o~*ATk2g5JcCosJPzl0?&K8fJET2DJx%k-fbma(nvbO!0Vry&XtE=a0 zTNmrwm#YWJ)3w8o*N;A4J$$;trFRisdTEbSK3+Y3vby(lZU5u-$PtB~a&YA+cfk(A zxyUE0`;wJIg!Ze#HO}38ys?Em+1N)OuN@*Eubo7w3empnf3mqP+1N!sUO$kmogq&* z&Y!Ha-!S7pL^%{)77&lE2k;xi*nt4x+^t2UOtRaAmvlrll7yg8^=#K z52>xn=UFcu@aQ>(@(hg2_2bL+L&@p^7v&UA%+)zdJ>%!~bou1tmD764)t-|lE9xj; z_aJ9otn6pJovocB=W7?pwZ&Dm(QOpojqB+ zc)W1_bm>x(BTFYwmQEfoA73nT`@97u410v0dAvdw?IM?Jdy-Y&h(ktfB|23%JJFqQOU~A z*~-rO>N;|^x~aiWf#toF%-fe2Gv!a04*R($i|ikt=b}sdr;B@COPnl9K^@O;MN6dR zyyIs}hm=Ruyv4(`?PP_f9-Xe9Alavdy^-0!SUusoCo2b$)78WC)x)!ugWT`g()sD) z*~#)Ha<*hyJUf%9H!hcV9K=cFvY|xqZ&M zSlZUWTRPy8ArzLLT;4fe**#v_K3d*F8FHtnS!u$B1Ir;A_-qk9bHXD-P8N0%lrVxe z?s7K2kDQ|z=a0Ak;m-Cy+TH$e%d)Zk;l}pwt|6eRWzpzq+$@k4<{~>Et_D?u)WJt! zx6A&6wd^@ zniu>HsshkmcG(B4A8xY^{<5QkK3DK+>3Gc!m=69PKioNZu*0AB1=KiDggGZ~# z;oREM{CYz}A@AX%E%2TD1z3URqlfE0$Z?&k(<$?CR|q)2e>k^y@MxE9B=~zUe~bi$ zgSI@%lSe{fgcEh{5P#=YRm0!olZQt+OTa2<-m&ZfwFSNl2P$u5FZ^BDIa=5`T--le z0)Hh8Kg-tf;wEykw8faT2!#V&5R9OTqm89@R(Gw7P{@F_~B)-e!+q{!n{ETy`mA!4SNF4y-h!o*aP^68%-BXv@k&hg_O<8S&u zy~OQc1~f~+@S10B@MSo;OJyxItRLd9+FZ}ePXPlUjn9@p0W$7_dv`rQ1%KDi&gq=> z^UJlflus8iGTGrocB&Tv@Z21(-k_3oX_RcJ}czuJmlW8!Rp8HD8g7W3#ljTzc$V(;KSv&%3fL8>Z!lXyl zNZTwNAURIIX~!*O8}JnnuN)(PIQ$h-1Hr;yTM?&ACrI$OkT>@=#9z??%$r?_Z9h8P z-E}UEcKOyZq%c?9ezbt>%q{QErx`%A@wc4?jRo%zSmzqCW{y%MS+nWOCd6(_z~=Q4M|Hl&XJ&M@!$D%V7HJL@SR^51vW3H zKoh`PcrDm%ytw0Ub~*{AK;iHF?!m&|!QugOxNvl|c${&|*27pt^f0 z0YN+9+{YVrV&SR4QN$Aoc3eTmjRQzu5HsirfgQ`n@h4-Um}o59Pc{w_PDDQ0K0+Y8 zm>j8DM3N?LN-nn#F1Gd`Z%Ts2pRAvDpwAAMh!w$~xod7gA}~4wvygVzLb@=}x|fXu zx;ocw{igD&3iGD=P!u&~*vSC!j^QS_&sCuFo2R}s-N+-;2+6f#m8Jgl@Z$|0(;?gJ z^!*&kma7BkKiRk_LuieL!O+!y7tmGCHx4E1rwGU!0e_ouQjd(*<_XgH+>6@pma~=N z(JU_6VDNur8m`i(w!*UZH1>R259zmH?Fe0@G;Y>}>5MjE|9w=oUnf zg_S|xpd?*K=bWuUCl628_D@y|Sbak z-Lz2>*wASR ziZNohp%HnRJBoRL!9#&VaQtLxFBpxgL{1jA@d=Do^x`gr8rgcVgaC_zNTcxfgG~es zGEgpUgRn;7Anfy7+{ieScOJ|m4R%-Le6|G98uAKfp`VtW-TOOXvN;ks27i(5M{@|^ zD+o3^w{7rU^#|YE)<=xU5Y-5HDu8~t-i1Gdv%QR1Z7VSN6}G#1Ft_C`sJ98=cJMu3 z*bx7nhXtDm8;(_Aep3S5l@z@8oglK@$d3*V=OQW-Kky;oyNJ1n`$!%n$>Of~>C!e* zgmiJ^cwxP+9<&X}x+LZO#ChmAlS66cRW|I#u^b2ng~f0g55my+mX~%r3Ww*~&9cio z!tjgL8t2c+Djt}k!*<%>w~@uz$9Wt$3}f_wRe|2|ylZ)DF4lLSlg*3uv0QH81Xh_a zL|zz=en9D`8!x1T7IcDXOMF5>(BG8auqv6qSNU#Yt zD;aoM(W`=_wQ~dyu!SQaSD5Lu+c49%Ee|l44zPywfIr|TE-sB;yeTb@_51Slq0Tt9!Zaq-hoQYX3% z)i(R*sWuObZEu4M1b=~GjTVg!<5!-3aMm}ap;AN9W+D42f4+W<2pqv~`{cP(POm=P zfJYaS(sIkd!%$dx!s+}3@P%Iw@fQ>Ze=j%BDI-W-Wk}lzAH+}W#@?aAZd)v;Yr9BC zsQzxIhG1X7BKXS{2Vlp3NpP*fE|@OQ)j(8C5EdZAMZ9*JeKT(Py<;0>Tky1Yhyf5w zfC`0=g1@t~)yoK&enN$EM@w%sTgBe^6=5jvIIb$h=9zk?ME_8u)u%!tE1 zcOGtak&C!{iOoiXm2F#qp8EG6tws*!SNG>u*zd;==GKtIg{8em^ZWC22MhBD^UH{e zHQAk8-J4(Aom=0V7pbH?Khd@n5b2&@Y4uQ5!&wvFgo6#i>->c5FKi;v*!}r{Z*2vj z4)U^%_qhM5BMVzdVXJ_04_~nY2euxIrSiy@HV&611Z49Beae`_hPM{CRd`~z^H%Ju z&R!`HH!^Wvk`OHQV49sG*(b5Ob)#!4yf;z0tt@Nj((1+&V^{Zk-@uZKZYNM>R7A7Lh00 zTaULkTMSoW+3-`gA8&H{k!1Vu$(D#P^$&mjcH>3FKD(wTzkgT^}b)OdmjDAVO)^9ZT_k0sD}wCYiZ%gHdH!^MR; zezAFgJl;N`hR)xcnIwKKv0c%z*tMxuITv@q`R3mF=GNK9CWG_wrtB6vhf7DgZ2)i^ zyJhnAU$aAZ4aZVl%l?G4ithEnwA}Y%R1Lxg*{lA%DIUq#nHuMt7eI4RJNyxxlG#vw z64ONvgpR^`2aP@r`-JAex{!VS2sv9n;3u{cTh57z2UUZH-3y_eLT#ZiuuCx+8$Gye zA{=ncxzQ)7KP^ey*q4~&U%jI0jDLW7c-Gub%rbFM_90~K?e6ZGK`{6RO`*0}&uofqMxVrObEuLFDY%7;3DN-g1;ukD{&O(`0o7D z-oo39!(KtFO(9_JOQcmI?{R=N`1@q{ z=<&`Wg7OdUfFMZl{gdtE2&W9((dC*RvgWS*)c|89myRVP5HYHv`7Hvtr<-7I7kRT~ z63BNgKXB*KK4NQR>i|0;EzGaMvId>P-|PqJ(hszOt325P%eHZY`-ux9a;_;}PP_;Z zxBmAP^DGMc>>e*4wJ|F>V6n=~e1oC1&=1{Sbix6fryp-O@HGSn`}};zEck45A2|nq z1;Ja$#nv_w?|nuQ+wnjuWYpNmNbB3d$nl+){UFbLSp{_$BJ7ODK4^n1`LoyN9S!&) zJ8qm>+&OP$y%B|0Mx_C_9e|X0G-&k<{&Hy=qie+yu+O}CmQ>pnymQUb583jo>NN6Z zoA%@?+0uU9g0stFx95J(HjZruU<@=d|4sR{oQ9!NC@<98I6!hP2My)%MHle|;D&k( z@EziBQ%%$Wl<$?kvd>LHE68=CU9PzxwPk+=c^mlR(hvS7lTFb9n#ZpslZfn?=@jd6 z{BG!1RQP-Gu=|HiZ;ib600(oE0riZJ7l$qQD|rwIcDA#9zYQzp=Ea?hP!H_5`0w1K zy}3vG4;Lk-qD5Mbr2BIVNH97`O0p9>-9Mb2RLhn>3A_d+0Dma^x*-3e@ zxXb>+dW0kV8hA}H|7ovs3%u&r;=DPln6F~E3;RH@UCN{%up<`CPNKI5i(5!IZ+=ad zLaZQ=SNV1J8bJzE`1d)9Uf2UQcpHMZ86civ-- z5PMGIX{qR~-8i@fhq zR?%!C8jW%i`Ry^&_Abk(&6~(6&oGW&o0S!|uDOXdWY?cvPpWXsH;$(?7maFdn)Zq_ zGu7&4-vGYFutI^^ru}I-H3xLG18 z@0>o_0YTuTK4{{cUpsfpJ%Vw_)9w8yTYDwWnijd?TNrBFAP}skhTaOPg6l0b1(bB zm`j?$=6>VLJLj7_?91{r2hTR+Fe4;)*K8GHyEYv+a9_(0aK3pW$^KxQf!Bv0JG6>^ zYex!a72>fcJhAL-^Ul_rYyWWHhh3&jP_W z)^{VvYdd{EJNz2_4a!p?218m>IBs-~t_zw6po8zh=+pI`leO(IYc!{nXYmd(?nrTv z=+CvCu9l^uS44+VY~mV%6d!b*6y z$5;fp3D_zSFCIN7Y@K_w`{*HC$$-hS52(eH+!2|BT!_mRBAXp2ZksaUumEDP(%`$M zc<$Vm|g z|6FcoF1M#d|Zr-~MWNM-iMh{FunFd%m`VBunZF{>IDXE#p~A zNiII`2q^WwN2A8`TwZ8x8b&2GD zvV{i~GF2Lt%jJ}&qQ$d<*PNIaQ_Zmd8300yQj*4xVYhi}3(>aXN-a(;{SOpwAA+Zn zt;;R|alx(`Xi-Sf)evFvR)HE@*xdH{5J%_@cY=^EC_LPP$QF6IgT)vCf?dEGaS)j! zIvA7`QQfY>HX%Ttp#UxdDMO-*xD0*vIZ9x3ucFNkx*~FbP^oVb%YZ+eF(nb zDPFtyx)nsjgJ-6knp17O+~wI$@7dZ>Rf5#wvjWo`apXPwr8Dhh4*o_@7V@rbF^%g? zleyMU)+C^|fm{2>>#ImZa3@-uA8kJZWaT%uC_gHZ+2mh^9hYDC!OEdT$;L2Bfp2+? zomA}TZNN8dbng++xd{FOphG?zHXT$Al8P*YeJu~lJDUd&5gi0rBLZ~$t8RWc=cE*h? zSq5qaxWe4{Jv?0~M~+vwY`G~g(XetHt!PK=KN;F=&f>`Xa>=|%CkiTQ!B~v#s+TC? zo-+Sb1HNSkR`jb1tn$8$aV>i3Y!h>|v{s?8eSIkWrq;GDH#a0r%Z1!Mvkl@5vzWqn zzwd?Qgi?0)%1}js0Zn5*xrGE*usd>~Mj&i&dB#%n?rSV9owB(4QKDr-id|GPfwR z3e;Je^5e#wz{feBm~t|^!i(b}kG~y!A)y*SIolx|$5h~6dZd+V{Wy%jA?)VY)${fh z$9^c3jE8ySJl2Sy%4O1T^)#E!BeTbdS!Kd%WA=Cku^ySmz}V(#`xXBMX z%oUWkX~D0k-@t@nNMp%jo@nje&Wg`OCP7*l3yojhMP;10*)k;R>W+Z-eS@zuuOdP1 z)IS_`g+%6(K@(Hjq(i`M1zx;>qL~KGWFmEsaUeb~c;|yM288uNI&Jetm(A?EPHowC zDAZ2#b&E%MmQc!sx^geZUqHWbKkW&^mfIPIZc+Y~u{eER5vzv3-5SXbD31~g4z|cw za zZ4-!kCiA|)R}j3rP;p^9oR!H;RV`P_5nh)}`Gci(g#0ecF#k%kgd)i^5B_$@=FhO; z#&bPg$l1=xe`fv>a6q>eS4SD5YaeF?S(e;JKVI8?wTHetpsN+vHdgFGml_!?V#goMK2 zFRx4(Znm+&8y1hVv#y|6&8A5vl6XxAAhd2ck$`Vg;Z#BVP+QvJSt#JRVdj-2iVCx~ z>K5}Fvg~10I%a?Xs7a0#{ucQa`gDF;mouOQGT5S!QL2VNu+FPen;O{iP-u^->)6lFJxa`f(c8d5FV9ZUr>;`$SWl{5%SmF`T_(J2 z!;51lh^mXNUm!^jOSaha_)Gmp#&KQ&NZP>ns$IO`Mca#Y@X6yGCASnJ@c;r<_ z;?Zj7ilsI&gEndu5q4fjL!~&Mb5UJA5^YfUYxY`QXM14SBg~ex+s;)8IBaQ7V8fR=Sag@g5$bDH&6YB zNk9%v%gyeN1@)&q3~Q3ykx0FAQ)T(YFX3*ld0u)DH8lw!)^x4C`}>l{UOiFBuy|#L ztR}_|W4po1z6G7(q?u&YGp`!{3bhZt%thkGAgn;v*-(ktQjZy~jyL>O1Q`(I&-j^( z1t)>o5oCPea)aj-QC*Q!{K;XvxSbJJ-jFz2Z5K52b3I+x=}*JRZ>)mK>9Tivh`&dR z3j6LaE_Er7v}3_3$qb{curZvsi-UR?{{?>qpkCmjGM`yQrO(WDX1l@nn2Ep=qiXWy zvLDZL@VD4&=Gc0z8tknaE0&CAL;*^%fr2Jt+aPd}V_aI7_swJen$P)X5 z`g4^W39k%B1Tndm#QZm!dB_|Ei(3aD@9aI@-u*aY&oI26mNa%RWGRd`Z!OH-I%et+ zLIU$p%9HFIY23~Pf@3OlT%bKe#E0`DzjsxB((}Yb(#!Q7Y>Eky`wddl&cv+qnDjV& z*D2YthMggv!l^;wlqt4opY{-E+i`op%UM0nl&*gE*hr6IDYYawdD^?xL7g3cpKf0E z4DIBAwRSq>U7m_DhYg~2pK$nT11S+zpd46NVAl*izJiR3y7#1A6->+C;O+25!-B;+ zQ?lWv-m-dAjK$!xWjv3GQ;ZQpz+fNR9>P`d2n`-y2>5a*7?{f-K! zDq@=g@dL}loVPo8-M4`l;gsjy)a;3HSkJAd(}I*3DsA&>JXwFND5}qmZ#lpk z4&EB?)aW+%LH%HkkE%g0nK)#bvMf1L)24Fu_B7pliTpZ_9KUyzUhfk+HZ!D#+w^v} z$Loid{S!I56^ZWF0@g)ZQkFpZ=QSw_eU7-hj)wRP4uYidNQ0-MnO9NRj$$V^52Ck? zDUTz+nRx{MD#^+-gIdy*^0f*vhxj|Ftz)_xGe^d?vZ;}V{^Km@AoA{`O(cHiw%y{$ zkR9X7Zs6PTR|{G6v@1cp=Nbopi~nXOuOqoN+jn8yV zVT(BmL6Ym6j3%~T0rK(s4$E91PAXtGew>ik>ZCBSVxCUaiinlsf z>KR-ERtnVc?$*>HCJ+e|LEis-8_pZ>b=FqqEA1S8ymR!)ZUvfYTSNTJnJR}ijihFP zxYeL~es4n(L~cMPOCadbxSkN?`F0Mih1SF2w(5sqW%~bOYxi=S7{o3)OAV*=EP`KY zZL2}Yo2VW%u;QoQrd6H)%5bWD04A^H(d-V-9l$OuST5+);K9)Bt~MkXKf$sfFYsiC zusCc#(hiSmRPFF!ySV%`fK^aKI6d2xpPG1w7|&Nc{G z@W(Ww4X1KpWEMLX^aSHT0loW;KMIb^vl=F=?eM;?qHyrn(+t;@=25iNB++Jo5HP9+ zADUdUEsWdhgd>ZR)9l<)oi)gYqCLHEWkfrk+bFO@&y~TY$>4Op;L>*;zcHODm~I)Q zeaD?yb0o7*Z+E?vmVl?!7M@%ES{ITtJ0Q&AUW@;FEjB~vBteNb= zqb#E#jvR!gjNQxS{chl1bD9#5P8;wQ1HoL;l4$Q3c^y6`f-HdoD+_Cmj4Z7ciDV9M zFi}7ZkIwvGPd)-k1>X+oRvNR{eq?E7Br|#QYtMP^>uh9)L}-4(tIW2QaXPdTs|2-? zz7HvmV_Bm&>#Nv%%peiF6hUTLDwR20-a|lYT+cL(C4+i^zciqa_Lc>$Txg|Zt(S*6xm6tWj@G9tO)|9b zooZ6Z>si3He+5aE=~{ZV`Jx@@l7wOxQWxa zM}gtmOtDFs9RFndKudG%Y$H%$fWGc%=pm0cJE&a*eEZ|=?Vs$j-9(;lZ9LuDda_B| zeD8h=>8p=fjCx}?k>apmJH6F=6e!DNYm>RpwLj>S;B2^X`ak<1uXY?|4tvGELh31$ zjU=Ef-1J(JLDekEF~rl<99+xOZ@I@;7_?8Zolv%r`zj;Ht7`O_KGFH+iGsP|h{|=F z8#x~7ioXbj6U%PNY5r4B-{1?%%R8i-@}MGtp>~WB=y?flVM3@I=h_LQv@uyY<%Ff+ zDV7P~`wa8tz0OsF;GVK{a<+Ot79(leQ}}3C@cG(fR>nyAn#GVAIxVA;YRf`MHdJ92 z4m1d+mLEwJ+nvT#DbGJ@1e`z9<}k#x5S<{5t`*5NT`Rw4%Wq8FKQU--{3$#3Feuc?;CQ(+eY9K2l#jD(spOw$7Q^J?M1)V)3Nr_bm z`}|6tRpP>(z_Qa)v$75|Yml;ls@A{C(ix;zW%)8~9rGoRy-PCNhp}LXVzmIQaJm(m zHs+X9#$N6)V)CyB9cxDc*hO_jL30Y)dzY zJkt`>w*oT2s9{9y*aFw<4DwPk2$*~=*qrd!QQ$&(`(w$5@{FO2v-OM9wM!&0J+#c( zm&Wk9tv%=&Ejfh>N*jW|2Fmhs7ycSygPTdX%6{Q&dC28y=K-Skd@AHur0#4R+v0!oGv?NFhL=qQ{T!{-eZYyYZ9#@jM*e~#{2}G=_p~U7O zZ;%mg+a)AdQ+N9ph4O_XEpMf-8wh;8wsYW^gI*|7{5OO*tTu=oZ*$L!_KYO2i{;g^ zg00{+1X+fevalkW?VEM3I`d_x;ji9Axtq*?9WoXQX8^Rfbv(M-xZ*5cSZxssiyGU1 z#cP6zjv1LF;m(Y0Mm10iBA>5qJSY0DP!ZwhBEo2f4pki`WvN=aIzo3D&qC_vZ!8Kd?k3I^$qHQp0szIcP$L3otTNhG&-^58MKG27=%m5T9m6?ru_ zIBW2LRK5+!f;?Siu!dv|xt6`}nRFh*X;rpqc}nV)X?ewsV+wi8T65MhS2Q`TX}Jb9 zyI7Mi)m{50rU2-vP7XamKX0KpRD zY(t{tCY65Z?PSN7leR%PB`BYTo!h?ouLnT%%<0H$WH39m02-;ce^v z{3l4H@t@vfYoSaBYkjsrpCwX&@&d_n$~4|^qdeZUp}zINa?>q-Isi2_0BPVWWDNcW z1cNE)1iB(6IoCf4F}aotRH&4ng1NP>bv_Z*qR(_o)oFfxvVMthxs%oMD(8NwRu{4- zJYbjoYN5UE>4erEH7sYfy9lO_Gql_N`W7-XD?Yt}-b*GkJ=T%8sYhEEjsnLgkce>V zY-Pk&B0*U2w`)|6n?@;1HvA~FjVf2!D^TssEo#|~%pvls8ZG(x3ilK^K1e=Q_^DW{J5Sob68rOQB}pKohdD2>Z(Mh7R2nnO|J*#bt;O(4jFOt z!~&fL<(e~v&lV6Y{#Cx#ZYqc*BK!)}He^@fFyG)WNm%c3hLrrf|+ zV=@}Pk5!aB9!@3ykW{~=Q4J>27~{Kxtpgj9_eF?fjIL7Kk_qo%tAKpo5>Cco2^X=d(2u`+A{78HPl38{Bnk1Z*N&Oe=m!upA(efGw%NlYO z{+@4K@_HIiHJdf>ey$jVZP+K22V6)^$trmre+9v1w`*B|h}^b7o-s7Y3-}s;!CYB8 z{nluzmLRYc9}glo?>7xdom@!Q@~4#YHKmS{`fQ?es9I~g3DH)(Os?Z>d*HM9&IXv>j7WmWm?c4%(wRKg$@Hf3i4)tvAyEu$FBLv zZdI+qp%8PhIJsE0^0U9c(DBs%WG}YdzMjAoq~u@W6+c+qK)P={X9am%j#ltjlzGq6 z_^*y+4aop27lQ4=Mzz9~uf+$a45RuAGbr4-aQxE4txnOB9r{4VIN+<#WcXY&=&;~? zHKVQM?2k1p@ab)R3KIzm$NFlvmfncs$o5y)7didql#Hd|Z^ln>HdzX+0Er~;f*fh> zIAR+NWuyg3J8CEE4=M!s2Dm%^vQse^XFhZdlUJT?bX{ILw*3Xne*SzO5tQmffH)-7 z-!W%Cul$x~#2nX8;XBr@<$+|utM-N8V05-t`!!#+KzzWgF&hd^<^z&D=CX5)>PurM7`jiZt+<=aIKdC6Sz z7l*;M<_wotJTS)=tf`Q0ORNfNWW8m+>4SmFoMM5Led!HzWdOGdg>>Q0*~Z@BUIarG zfx?AVyq>%35L10(>Sx#j2a5u$Z7pxv`-NPsbCd}dBB`wdUf`*^u~vQ8mzPspF-`k^ z!-%6m8vg1_NkPoG!PAmB3GOe9j zqrSAL^k zdyTBj?S;CU5zGJFgSqUNPuO1)Y%_-PIbCHtr%p{GR^(U5>zfPC)Jpt!Z($c{9%*G^ z8^&&)VilIC9W72%Bxb#ZddsnwEoCiC6$S9}v_R ztb(U)RQQX59zU6x+hW9chRMwLns1eV)e^4sW{r~**ry~uRZG77HU4@62Mg5vEwCZZ z25`l8mDnr@_OG=fbjM%NN4ZA!KbtH_Z(EdW!J95MYj0$Y4O89XMd(&M~X! z-r-WqQM#SVn+i*fPy0&W+IkT&l)6^RR&0-$3T5XM(e_nb=g=m}qoi*+H~TdvJ#u0% z#f}HYy7v{H9jTJ1!dZWTDEC{|0=9=-ZQ`C+?@mm) zwO}_5F3e?xZ`>Gm^}*I!fe;qFHTO`WPXLp4%doG&kk|F&4LzH^jcPjD}P zngo1JX0-$*0#G8s-{5TdtIKOWE|XtXSWufe!ZkbCm1yFy7H3}B(uW9_HVFRqwL233 zZM=Aw2yfP(1gt%GiKV|;KRl~YdiBx47z^gM8Nr^K)P}l;z0x@{_L?3LItO8|e7bji zlX)~3Ya0?JTy1wy&JJ8vu4$$adhx61pN6ge1giZ`T|akS9mMRY9VMJ%vXckV616QH9K<&(;^{1-aJ}ZwG%Oa{qOq^mEP#*#dh6aV>aSa#+p@LRy z#nimca8STTwcNzT|i_dP+4cT$(6#>?pMUw9v-dWy9jS&yxIwcQKy^9#F@B_zJ+@aX1G7Q@ZVqsEAZt@$-U z=U4(Q>>FM@z~4%T9vqp|r;eJ43sa#1S2Yn9*O#XPSdr3yPjc1vfNZI_JYa=+uFKx}4 zbs%#Vh}>0lG%dQ9o!HVt-HQ@KT46;DQzj}gXq$YMlXAqaoFCp}Q-I`g?|8$_JLRnwEf&4!6ZSJltW#wXI5hlM|Ab2b%)2v3TpfDF3YhwBexgJh=pamL=e?lm@e!H&2Mx zu+oU>*2Y_2oZ48NI|}}Gi^82ShU~_0Q;&2S1*kuqT~Iqtlu9G>^$H5O87API63=$n zboMLZeCd=p;Q7+&<;r=ih1^T(Pp(e3T-4W9&0F*L5%72G;iKeY^*OenaAw+;&)K9InUiuOfk;Y=?~YpA8D};Nqq}Auqy<=Dmq?V^$~NC{8>Xh%wfca+RQ#4;%{Yb z8Gk`cBq&T6H<8?q=9zSU{$ai33|wfKhSLGf@i+`1r(%aU`niJgbIxLOsBO7eC&@ zX!u|Xm}R&p5Oxd@^u(nu#O2e=k_SUSXrgUsmw2?6SmGT>a^>w5r#TZt2@{Gd6!YE^(XE1`kLYn0!i>2^4(cwyLHn!Sxdj)@scL#a3 zK1OEgW}7Umi{a3JdEcUhQH_?u|5{kq}NvA0u_ZAaw7vX(R@nDe=*r)@P4h~ub~ z1V&Rs`!&Ql#(dZTSSLQ((B|axmz8 zRS*ZVe8_{ZAO(B}5L^M-WM>ttjh}g8-9YU1pZy_*PEobPM-x1Q$uM2}2K0P!NgsD8 zWbDf|dq!~gNE0vSBM}{c$NAEO4CmC)urlju#QUv;XXXZ@HfR;!<~_|=gTq>*v?VVQP(4|WWCg2Kc6tICdcPFMM^ z2j5w!^(xzup4pyUtZds8w+&pYO^W} z*aww{xGa-i;o~^U;;9bvEs|LDVeB`qv@WU&)?^zURW`q8A~fWc=84)23Wv2BSZf`j zvbqQBAyCFJp%7u(Oc`-HP~M7&n~FXx#TaBytkAd+Jj%5)J7zdXhIugnTv`=?2;Yq6 z4-y5Zf}y6{&97?|4jKmEYYvXHunfUaa{0ztSQ7dfbPoQuS;<*zqPzj^qV+jHGF_1% zLL+^i4BH~V-ImDR<@BNNx3yoMYJq<9d-olK_}CASdWOiGvy$FS&FM@%0^41=>ul2x z=_`s+CeJ;6TfT)2Wpp<@O{f$itflqOHyIm7?R-_x`;<(gI6GI7y^^frxS)oNoHjzj z<dL_ev1Xi(x}8OBW6S1^5pYpkeV&GPd`Yf6%)5jlJz79C|o{gui#Nw3?FIu z-f(=t{RjEdY4cE{QbPz1TN=Ypu?GbDO%iFXNkeA1VXVbu@2|1mRFbk94vyENjGq~@ zX>9e{(f)!`R?SLz>QTgLD)KCQg5Pd;GlEiY^EtW=c=&S8C=TmO!{ii&!osZKwqb(ZnaZWt zurc#(_JAiQuRndV{uDF}AhNw&eJm+l3nC~?8NON3Ti?qw)`Fs z9XP_TvNY%f^@-PqJYRcy2L2A#s%kK*>jr&#R+V?W@}!fOI%Y*h2quyrHCW9fKcO)U z=((3_n42dLIbeFacy_n|qZIy*uTr_q;4(?hnVXz-Lc|&6Y7@3ziEDT86?sP*{#FQ> zKXW|1-WXbqFV-LKe{msv&w?aE+k#OAopF7ahndddClhmNymq`C1x8Ow3ce+_UU)hj zp``2(*H-*JaXNEes^W4({O!1G7xZ|p%KrUbgsDKg_jh(4>>?fe06u0ONgR8PAGm&X zbhu&SID0CqQN{h6e5!b%m9*T!)X`*S>1uYcakzcsJVqCTUMvyu_YNuVP zDlt~`PzADLHNX~QRLeFpY z+-av^Z6{7)s1@2iBMrL`TGBMNfx?|Qx6z{0V=;Qj)ta%f zIU6EpRgKD!)ImcwXX(WF%Dgsgyt`3)LCol}Skyto@Tt;xm3>+eZ1dZwYQHoDffJ?B z^Dy_uN7N)7#fH)G)_!AnpPKqylzCjNofXs$&kFi9 z&gjf!>whAcZniLj7Iyqz(BYwdGrT?TdR} zlgD%a9iG;AbuD7eggYaB7NkJUCInUGim?rbGkkO1Xq9FyjI+OB(`Ls)&nv!kV{z#& z<8SKGg^Z-e+2o7Lrx!d;o7*=2@cKzGT8J5PDuSGnJblw)fpT+RHFFLc)8sFjX% zMd%MYIX-7}buH$PW7F|veCY}iDs)&&G39+4^XE>%kTh_S)cb6<={ha_tAs zakijya-_8*53>Th`rr{CGVr&JYJQOC{0{3Jt{`3bTany+&sTuRM}_w1mXhDqg1HI} zYuXX3Q?}@^R;8{LJhYAn@C{s^%&l91*VQ1bVEy6d(E~~Fmo59rmxlAUOXxGf9PFI8 z0kWNP(fEt!8hMPZtb z%NfH8RrvFlytBNLd25o-kk~E|CJobE*6d`g<~!PQsY&!5kcNQq%O$r&wq=kM4Qzn+ zw~(&lZ%z~x8btC&)WI;^RfTlZl4#LH2Af75Gj5KzCLKhR3}o(GEEti%7P|vHbt~T^Cykf%^b{{qMQ^xOf_uwgP0O*_Ah@@voxNMq@=)4r z(L&ilu>ihSWwiuf^=&PS5w^guD<(q=7#+OS^u zn+S5pT=e8vyuzeOw1UeKs1s@yq7;fmqLk959&93?Hs?q#;^t3I2J)Eg7XIGXiD9WV z4`^PW6AF{<(8f+@vVz^Luwjq4my6T_RuZ2Z90!OE>q^jKW-il*(~_`Vwg9oIlMVQ0KggSHm~ybSabY2HBFLV; z#5Ca+P}YnyXTr1Wq*AsrYgh&tA>E6`{fnht1YYb6E(5f&Tgj7)f@6fi>G7$tT2|%8 zvDW{QLri2rq(Bg?^g{j&%4kt^jCsG>VGTjd;WjKIzcy7Y{2jYPy@m#-2z(p<=311u zYE9OVRr&z6D>KJAEkWT_8@`-e>$C(tkD{}(5@B&H__<6ZbC77-AZ#$AF+1_xhCYfv z8V=y9~dTB2m%i1|Ryy3%@G+>q<6qK-M6rgs8$w_kWmayF0CAL0|bs>A zP@tKoUr(y?jq_ng*$LsBTn^C5rJMC)+ryFK)Rt|c&%Hmn-|CrGGM6i8*434B6-;ax z*TGH~MKW6G(z;m>iRbpk%?naJS2wqobBBndhf+i`bTY7e+QzMuvg}dk(m+7Ytu{D! z(Jr0CRMOiV0Oe1b(TFW$TOQ+tlH011a7KMK-j_=PnC;TQcl^d)6Y^yB0*ME^L%DX0 zzZJ<{zEFhMOmK)|?SAF{jp(9+U<0m+IoyBX9`{-t&mmpviO;P}1+OSYZ=gC_&0Y~G z*1m0t0O-~Bkdy^Xmq%JBI%~VJ z@+!Z&doNM)(W}XMhT#&Zy~<~eddYcK`cvb@=AHp8(yQ#0HvW38D%O9-Z;TgL;F~Wa z0DKSRE=GzYHy{=Yd)*AOxuDE|Z~kPP8uF)$UBO?j+i7o)|5AVWY@)h_t)OcAzJm&A z7Sd35$f{x`5OL=RJBJT<@liWX%kzaZ=ctf(ob4Q1n%w3fY!TJ@oh-+}Hb+3-F%dTY z9xj|9Vah?=d33g%vCy;$!xi%+n>Qu5}{>8VQ?D0$J?w3?e8U}hF~#( z+fLjAcFz}-g=J63F33B?Uq+NpG3;iX3~6#kbp~YK1$3r~aHcsZzFY*;$lJ;kMhiN# zzK!#!llhq#@emn11hW$$(%$u?+LkMG(MJ-A_$vp(4lGgF{7*_h272zaz;>Cr0JV7f`ZrFivdxKNDM_F zGWhy~ptCKPVPFb8#eJC8N&ClcJHag7j#v0O%MugHy>dXna<1MtO!xjf7g!MYcz?%0mCE9uNvWZ!8wP z0)^}ccYr?bgJovxLCGrSnhQ4#cFAfMCc4^C-HOUJ9xV)wM61-+v0Z_C z`<2U4XZv%!W>MDcO*@~V+nt7^GvP1 z1}kQe_>>rNzM&8vTE6H9@>H`gsGY}4TzoUMHw-l1-QY68Utx4GH{Sxz_k0}}&X<+= zxDq>I%3;%WZmWEVr@jPKx7cpJ=HL%H?B&Z!!QZfB9Mrg`Yew+g9uS=U?s=BO*~bqa z9v(hAI(&4}1r95@lGW@~tIR(V5k~lo;L*v#DQp+OJ(xch`CZgD;#H%Qm!u8K7Ssqr}niw)4qi#G2_Hj8szjKU3M#|fgTvvjf) zyGTdp3!*9_s=}IREi?&1Ky)mBW}7QES2N2dVB}|Rkt_k;Xb}W!Q8cQwYt>nBZwNLo zs}N`9wGyq1$}R0}#wTSiv66QrG1)uAc|FhT|Bl++Y$BM6!1@D2Bs}x!-km4gTL@`a zatLQdN(F@-AO=2s-4@Ap8;mLGsNB($m7};=@y`d98Z%Y?5>;`qO2$KLei~k%E5;ZA zwT)uN<)CDq-5{1-_CtAnWkuwvmBli^yI51XZavmy#N`S4>(61|)wguJLB!NjXvkT5 z1im_uYA_nx138Nxb|(jX)jw%TET8(dddyDX)u)%MPqp;Z+T)Ck;B|q!OLAOfbK_EL zBgY=I&pGAYFttsuhG*`CXfyIm`2Wr`QZ3stp;(cnT(lEU1@& za_KJl@gf1o;AZZ$^>$og!usft)ZA(-G#}m+`<02Eoa!qpa_BoamGJCUv%p{Zs~3;K zh;Y(i=(d8tIGHQMIjPG@goYpse4U8J7TOC~!x{JQZzrBBfJV%Ydkvfgf{VaDSPOKH zx5n9^e6}#tU?_!yzf~A7bdhVm_@H_O@s>E8*~SuJ+d{)z{-DxsYzFA9DKo4YV!JaI z2=7DjU*WI6g_Q7aab)8!HIQZfjKFG}pLvX-H9yGso8+soU2*~s=1(Izl3yEx)=C|r zx0PX4>~*&RF>7J@D};r!mDSvHr3!y*=5K-v!e2vhd5=>jp6YIrF$NkO>nsytPk>Uk zfb7PMQewfU3s;c3&T?R-*|8a`vK6zAP2a+*VnbpXQn=nqlg*(fi8nZ!DsS9SCkIl4 z0!?p=uSIxAqK<~!elVJ?p>QMS{)z-jH5eMkG@cH;B2{Tq!Sg^JF0|U0igpT6QhXJe zMWw4ATL{Jek^@$>z2-7gGMfLV>hri&^M%~Ow}5NqCHrGrKV&YIY=iVkuqNAvbhao~ zFUXB~vT|zC+JoSo(AE}(sa&;R27p%=bXVBwsK{(AB|FTGKDpoct1)RXu>ii@6kC}? zp25w9BJqnhEK?^GZo0O5%35dq-`c z+PGi54fK@XHR`(a&?`t6uk6YtG#1c=cn-NCs(t_y%2{gsl)`f4hNx<15PRi9rmI5X zD)0AK5Yz00AW=sp@b0RNQ8+BG+U%N1Z$>iDsPNa`>0-x&JmkFH0cB80tumVq`h%+I zqUbC1k1>yrBvaPRGUmwouloh!X{vy`cz-}q$>;zJQr=&LrSWSc-=e@t) z(xp1KDhk}rGVnRKIXt&#Y5~DSmXTy(g-3Thh0Hcrvp*kfA9dEDI(o2;9NynJa(_3!d8D=DPZj=a5#7nHpm;MOaj%>Bh3sF^9@FO zd8;cQJTegX1vESzSl=bgykPKDYhEpFBju6yca9S6P0m%0U&U@b&w{^ko>iR5$;-;x zmBwGns91)U4XR^*XK;w2;lEH|L#+nk;L)E1k=v-YCnq!Syz)$2e?xKM;I9X8`GOQC zUfjQ2-gBgQyhdfcJN6V6p6ZhgtT!9cqIr48^R{*!5G1dC>~I2e1!d3vVde}uei-?=d5}WtPg&rRUJLm@?x?f(Zw;f#*`-P)XEz{kam^& z$`7=Zb3vJoEMT1AUg1TsqqwbMQSJpg5N^wsG9tolHjpdVb?c4_e-jmz2Q|Iakf_|b zF$vNzwqdN9?bY&{R6}KqgSq8!PLfz0w&N{VNdeAD_Ts$gX zAtVJD=XwOniw+iR%nQDI4AdT4##_O)A8B` zU-8Ra;3^;AZw2gsDq1Ve7W61$s}~_}u0RK%%omGVd=^C!Jf)gke|WWgV8aIrgTj2dPe2CRO7vZ%61km4Y0+<>VUNScGu*URp)d`m zP7Y=}v@|-OeFk~?GtY%{hc-`I6_)L48{4%#&~VvtVb4yAmLBTr@Lx2x+|K@zUAFtb zyN&FAShDxg4il05Wg_9P6Pt^}7Q+qCt!*aob}+^NtRq2NW(Nbl;H>hZA1?Am;Cvmp zz7hP$%UAM~BE^ak2Y-Wo#b=A>x_0t;IodloGD;#bK2{dURmf>rvpKYHhEqKRH~5(koxZ=j?AQR6I91T10r; z$A1&;Z2@9Meii)n7aTIx$lHA-Wi3Ul|F8~d1X|z-j zEd1>jZ-18?3P%ei@lqq70@gp}yF;~xKBR`Q+f#bB z0r*PJH+K?umZDx`>aqRa?fWuT6sR~Q4k1$e!UdJN3LlQr4!;xR{qn(94A)%Jz!Cf{ zbhD4Td8cDksJ8fcd)8CInelgwkMZtiYxE3{;}i3Ql0g>1u`!DVw}--`QE%9(4s?pg zshA2Ek_tUr+}UQ#_$@LqSk`tws?|0c?Um~|?%yE~s9p;IC0Qt4%ikD+mTm?LaPPvi!;AXV!e8 zv|K<>h>v^^bSJs^ZIJL^ z-lKBVhy{-$zwB#Vc@BH_1-|mtcmz6Ax{fbuBGSd;k>&7m=_spUT|rexxRMQ+H7BFz zQTr}di)i(!Nd*T@dxtV>E6RyTvDBJ)&NFH6i&nV|hrPPh3erRTjaJcA8WsaF+s&4< zF4uO$I)fVj4`1)0B*%^{Z9e+i?wvV@sYD=P~r z3J4Dm1+X6 z=%H-40axa&G_ZRRyuERZ`M5G~3~9IP_n4hI>Q^8a$jIDT_W59+ZCppAIvQqcfZzk# zoU>&26n}%ESgI9XWixVY$muo)Sc5O%72Y>tz%dIuHmNYzuxDc#nY{D^uI<}_OQWjx z2V}jU2F88;dj0Zp^+j87&E{*{3TjlB4IEy%qaF;!ucAVR4$EQp`{4w=!}Mv3{ZQ|n zi8~s6AukHRj{dsz+6*27wgRdIwcKa?*)a~_S>v1 zRy@7>=E^oqfnaNK5HDWw&Rpa8oNx_T>clmz;?l+0=m1Mj>gTXpEwrS@$&{oX(iA=$ z-)QeG(IC2KJsMX>5+Z>If8k#TFan!EL!p=+M-oyZ8kWSj%cq>!C-;EWdPmRM*UCpP zLI<^VgtdU>2E}5`wPt&us{kbkRrVbm^kPO?d3n8rzK6U?y`>P<=nO?Pz&*$tU{#-^ zP%S5W1J3oldSm$$`yT}~Q?s$;)Htp;saHB^VhioD4Pgw?2MfdD4f8~0sj)xvU!eb} zAAJ-n2aKXn871fZgVkE>TUZYVN5}ZxLY|*j2UsIB|Ecdegw+1ry$J{T$oYfUM#ja3 zMNav-dPpb6XvXTiz4%RXkhdiMtDU(YaCRonAV|R1#@@_*YB?tAIEL%8^5LNi<}?A< zWSo(2R!-Bm4)%zBtZ18PF2b9Ip02Y|)YebT4TY-64L}EeComF*Vz~m%>DkJBgn%v$ zj;jX)la+sCVs`X~{k9>M#sFrnf=B%f~0e3a!BRnR=DnXMk8wE&H?acZEWkU}8y*`BxB`8X5#~#mk)Y zVY_w>ACSG)?A(&kALgqwQz+6a{O(QMSs#b_I?jr3h0$WKtlP^Lw(I00-P=0{k|hL7 zaaLX4S35_m{r;C+Bq1h+o#ncvVurVqb7Wue;=-=c5kvHxDSE4-9Ew$TxN=rX(xSl6 z_>Rk+!(?^7a9MrM$w$6>uobA)U#upDii6s!^a{hk3v^CA^9;q#9f&jdt9>E_bdF|9 zL!(21^)V%&UlmEKhCs%%CM`FB>t%nZ)xr}6IH^XNvttOg6;xkAaJFrv`moMOY-8Cj z-;i=3JIAS-ODEta^Q%X=B~Umk6ih5r_)%~HbSUSVfG$C&s$m4wY=hN~VUY}=Lo!t9 zAs8`8u`#cld5pisnt;dw>A+Jk<^Pq_Q*Pxzu#i-BHjcTf>E{@wwIYKTD^wHCs&PD! zYToKwD!&%-e-iWk>*nF_w13|`3zv0*m4rxsPz($6FVo;J88@4<$j3B zvPpA(sGWQ~s3KrGckZD2RN*kCjTL&H$l41HRAmd>U7R;|X@lCd4X@Eon|F|>j!%}~ zLiE(9&;%4lk`$3QbN5``_piSa{560EH&p`YMKb65yPA&ZWc@n}WP#lQ-*|)v^v48O z!}WStfZN+x!dWv{qDDKc3D~TkWC3@4=0$Pm0Y|-r)hyK9(+0Wrj@ORj*X>Wb# z#I40w&KuxNdeDFB|99&7=*F-j>V9Mgh$$crI2Qj^*{g_CJ+fphx{By+TGn6qpk;h7 z4Rhwa&L;wY_x&T%noM3+WFU_i?3QIXk|v$U17e1zm#q^9CIxyIm3~6R27po_(~J_@Tfx2iuG#IX@@{J$#>3= z&3Us(!-C<(3w*QVC}^;<5msdNn(B)4+J+4BinnU#U{cmsiLKJgM+RR(aLBLMQNpL3 zZ2;PkZFo)f4BWx`Exy{F?Jfuge{}+xys>^H;5k{t!-8#y289(NAm0iAy+B+$=xnt) z@2g|O_%b7Q=F7aOCk46Pd2xue6jCGH*3JgRJDJk{uei(%EmI&`7%5sY6%katKoiIc zO{{p>st!i6!Dy6$WnHxv)In7k@1V38qbLgynh9Ur_J@ zYI!;S)6tHwCkMN$8%%~&klk#zXGX_EkBZ8qSA78D{RsVND}b{~pdMz7E#W`&HmRKtAaQU`Rm$+qtg=E6%+=KriXTwgSIwnUZZ|^n%Dkp2$pm9 z>(vi6?SN2ALl^^Z=(%gri_zQ7q#gY*bk%m+>CG-({WS}hK{Q>?=oYkP|J%*e_uFSR zMc_x(|H6x+pW&g0SZHp8r|Y%oji(qcTp+f4cLkP@IXZsc^6Nx`p=l>}`1^Qr?)KqI&Cr${UZEaUCG)Y?U~IyBgtPEejU)U7d}O;LwGGS8 z8;W^35mPF+_3%y$6eic{#OngfeY~$JY@2iNmvaV)jlbN9P`L1V?tmx*?nFBqh0pcv zt+sCQ-h!%wzcz;Nl{#uw4$uAF&OFHboTQ;F>^qsf%IBRT8!C9Erp13{c79NsXsgua z*sRJ6i-n1;YL%6LwdSwa|4r-`XpW6~WesZw1m_$5lfWOMO&L>Ag{C8erv2p_Sl!#PtCab&eX+c**4H z{D>eUoHZ*^U$q^M5DAQw-eQeJ(&egNjC+8lUO&8(U|FSi zYuQL&;kLu|YO@Wd{GvkGeW@k}z0jEz1Lvbpjcc}2DJpL;C@ z`03xu6uy5{*09>-5gWC8GjRWSz}M1{vPkQ-m;zT784@=!+ban0^jW+X0{#*TfLDPRg|53hFCykLhn48AkVG~R6diOxLC&d7_0x9(0M zyA!GYn#83;xV29xI=?8!kDj>--cq5}>*WtA@KWPBbKP6+lPIiM%g2vAkD#YG9M7OK zG#jKYe#rEFlr2>+^I`3BiMS;(G|VJqz-yWQvQv64&;-zTDFk8`->XYl$Cy^XcKYnW zfpr|avW>r}fxo&9%Uh*mO!kLo~hp69PKOd1t%S-`IS+U@8y&43!f%;5FY#X%OHpGV!%XYmQv{!#-eX@^~V~d3r{dN~MLIh2ABv_?rVz9B3%Py*>&UgGB<87elw%*?M z017nD`4IhUiZpFe8_VsTqXi4d<=M|cdJ8E67Uokw?>{N~Y3UnCatX%q`W-Y1o?ShR zxP!co<;Yg|EREb4vm z{AKfB7zg=x^q%vo!#Bwk9){>k%&!(!3X+rsv-g9AVdui&Z?SkYIIlmBoeYNJZ2I3Z zciV5FNM|0Q>MSd<@i&|o{wSPE)B88j_}erPgo()v)xKU!|Le&S4zwleIBVU$-hTUb z*Fw$eM;+2XlyL6BoSMfePF)*zwesNC3qx>dql20CbDT33CjA zO|CxGVM3eY)lH=FF?t@*7M99Aus=tPq>O((g?_$MZ=JtdM?2Q<#H`A+{`E>sKU-YV zx1YY zs5+UK=6cbmw`#@HoOz6)_SF(KO@y&2C}X}2%uKIr)8L|(y&g{RMF4%t#;C5059eExRw zX*E!I7_=?YpD#YCHQP7fSgzVQXR1(Y$p*)M%l<6bGQ7tB3vPm^)E|>$S0BM&72FU6 zgW#O$LLXD9?CZt%G*>Od7Ok1(&aq1eRmE)u#8@^)JWBCf@y|*DcDz=1 zAAVdt{`Bfd%BWRyhHot=8Rxi?N2^2|^jKwnX{_pGf}CNUJDx79krmE(#F)SE;uRX5 ziwdtPWohac_UFp9gJ0%o2=I~80pAorKtdvO00Lkaxm~ns6!BJ*5VRfdWJEW#(N~}5 zTcRJflC**b_!lxC){M;)3n%;qd0s>)GKM{cA#ztd-P*1Tts}X5`KjPcR}oa<25ltL zn7gfAG-#RnU5t1_e?Vcb z$eWR^Adv$r)Z8+V*W#U6HXN4Y2t8X+k6EEL5YUZx-uJoP!{DP)tnWNLn&VU>=XnQz zt#$0b(zgJsPrFy$a-kI%?Ja%!<$J9M7cD`rn19x9RlNn93W5s_*N*xr`wsu*N&D=@ zed`G>nXcj*Dbxs(lqnieKA-s`DSy5uLh8?l2*CP0nBwP)7)pxS-zn}cK7aV6 zMs9EI$;lEL;Z{<-pniDy+szr3C*!pg2&p{;%* zW-9!JpDrXdM3PCwDoA3xbds%;j~aPppj^MwzaUF8Ek92c%B|*x?9y=uXYHgS?@wu% zw3vE+@Nd!ysM_N;1j)+5s56YJ37U2OY69-?S(}apzTmGQm?~KOq{22IYX2I^^B`(W4AS`N)C|;sKkCblGsp@Yz~Z4{vYeu%kzI->p7U&ED>gQ>GOXaE)C*ThAUkC>myn=W2v)_Lc_zK0;z{=?@ebxl+`x#AODQPMN zmg;KpbPE~NtV{6rw9TdCcfL_9@%M$H>r4Qe^%4HMQvoiuf13rfn(%BXAlbB_vCslo zeW=NxF=-8!={9(`+Uc1&=iBw)uUG%lR_(jY(NjTdVXnjo3~rDsu3IvavgU&DcR}!X zb#hG^E?KtCg8S^vx&?nTW{RqPOX?iFIqObxj&YE1mt)>)1YF3g+-LQ`n3TlR zBgg3#Fxo6$ z_!vgntv)VatyyX;%mcd)prV9D*=XaCy?);S@s0R0Hs>wXsM$s@@2|doxc+vkRd1N3;rJAZ%|kVsf!i(4n^JKtQ2jf)};V1l=JJn zBsEy@taC(M^cbJF5NKid*ptn61)~bY8vnjo?Jt9E%^)-`4akfQBIFY=5YD@h*Caai zCmB2%f#J4BNrA7L#E1?uU$k=&FTpQ?r&9J9iB@HQc=sspu z{8pyJlH`!P&|8IJ8)VfgrHjbh@oL& z;(W3-p!0y!Ph6tD8NDl=A5(w5nSBX{k8*jPAiRiaA`vW!693&s4I;0COC-j? zNHfbH{#-qEZWz_ecQSvLwUk;sv=d0;DRPM;w9=^aV9Py6b@3m)29I`fa6xm1K}M8b za+Y9rwl8ZW6Qa+x)ZJQ&fp@5{HB~s0R{Yl-{Hv=52d8?Kd)L+cZ}l+oIE&eh zg(0?UysHa*gTI1K*NAr7QKOU(h%+O>E?STmxRF$m9ZyGeZ|EL6A%UGXL9c&Je<&kL zr|A`qmO?C6L`qf}?r4v++3jP^-wB)8(Q`B(+mmSw6~-#a%iXf=7*7A0CQ=ux8wA!( zoYjwj71!Imhg$oQs6Hnzp;b4jt=+h?ipNZirX#60WlSpGS=Lk7R#lj* z>jogp*g_G(!3S!!D~=Go!UcEai-aal(z zEQm69g+WnZf}(K;Yr8_J;kIJFpeh+gE>L-;s?Kgx+sgPmmgi@kj3>9A-&ncDbAjCu z-b7wyIV9n@`{}~I@z~ZDZEr7QPh0Je-e}~ttHPe#E}o@frh{%Orh4;Sh9xHU96UO~f#Y39X#nr*$U+&g791Ude0sQ42bXN-#k%77^2`lxZ z2a)8NVW4VA&u)p^oULo8)b$^)fzJQErH6>Aaaj1ju4Hqx-ZUdA|8=ubOr;y=j8q3f z7#n1#g4#Sa67|?d{p))M-5U9vI$-m^Crc>m3fRgPt!YO_Z2RK8!rWym4j}r^?LV9U zw>ItHBA-s(qmp+)#Ju;^q=pif>6rNb<*18OP^}Ti0NA+6&6zHRKbY1NWf5i)gL?jCbyAQr; z{7u>h9Ls;r&8PV`u-qodd4|~@zhomGt>iKFrs94*6m^$AsS&H;XR>hhMI$Zw9Ts0h zNDs&e!r~oc#I8$d2YI;TuWr)tJvA_+d>X0eNG}nBb1;w3-IBuW{N>l{XEjY632ice zZ4eeGE!kX47u}ct4*nW0!&tSo$v5Z2d>fJFQjqukmemkKY3;fdSxzPAu1?rI-yl9| zyi+C8rs=R?G=ah5o%5!47PUCGx*X&j!C|TJ%sRi{PUO;+RoS&AsrihFm^*nny;A#n zcAaL{3~Gb9v!&+bJ%P>HI=NU^SwYUt7z#!&GqsG>*qm#Ho+xOi9VL2MS=issnvujg zIzKT~&JHH>nJ8%N%>2r^I3d;}F<& z@$h8@vUrT`j=5aTHc_&gr}INAlb!{`!n_}OizbPzkVH-MX*7<^zAby;3uuK8QONsu zYC9W}VpV1fH7-tt3nME=x?1Q_&+zEX+yioT?uWbbR?<5U=-Xp0CxE;B*Fnp1*vaS` z#zaicV6O4B9Qpq9s^kox)mFE$<0*JeTo$%>*yv~`CHwP2guI(h5_9Mve$443>h<_b zW=2#xBXwks@z+lOX$qE0p;5;G%|{n zn#(CN-augt3wo%4z7d%b0t)NS{r7Et^NzP#)H{B30`GHT(XB-7_J_eu4c#Ijqk^ha zzi;`LEg0Pq;v(B_{L#~G)BuC>0dw)&f}yijXmM@Y^vVA+Pbvt;Y+k1`9QOaT)wMi{ zHvDRJh*Y5*{%b7OQ{+NMQn2!v%^4-pwAnJRwY2wYRF>G!0dxXD22<@2O$56H%?+v3 zv(Dj>)46YhJ70m;DPe7oEoW9DZBZ!b09;YquvCyVbv2WR%bn=y4tyb*zt9%Qh+e`x zexZ{-kynh^j^T})=;k>MFwHh>C6zb0pOLhtIl^uY_0cy<$#w8G@mk)I`!6i%sOE-% z&r2Y+Ilw9QXMDkI%Zr1;L?FYC*VY_%)**5!t@l~G5d77;z-ng%VqJbZxlk?WPwy`m z!ip3NdHKl@aM$K?qe@2dc88(ckF2_vYIv62xr48gixf;3!mbAME&hVVse!2li(OB0 z!{6~E7j4zyF^|5djO04FAQ%oC$i7qjv#MOG!D`7l0;91rtInhw_$vrLYY`9dTHvcT znA@yjN>e6%*nD|7uler;Jp140pJfZ@75D~!1;^Iw=41_$jO2_2jrqk~rufFU9~V*} zQ$hLw!IlzBXlQEdSHqaBa~`!YCZu}LiPdrDWBE%MnPq9^Ezvk*Tpcmxg>}MeRZ4Mc zQ8wS!9JZF;Ehd&9R?>!V!fMG>1;6w|VJoOAyffmbEtwx>z;@IhAMe$E-an{$DL}6V zSWBz{iFSN*UBM<|R&~n%;?1-r3cXr*3N!~~ z4;oq_7-qUvsRC;jBKu>!z{Fqac^C46o7^-1+V5MrYqx6EhQ-0q1Lp<)q*oACrE`G7 z9&g~25ZOPsth->T!F}x^a&OHkc5#DMqXCKU+EsGT@3s_4N+B=M=7>l&F zq%}=I*KOmk!dSsdZF32mtkcQk)BDeA&mX>~ePKNXEvb+&t*&zf!zIJ_Np+_K!3%|p zo+>46`*YSa3L;afl5NX$goBHfFV4&P6F}wPg01ZbyL4>PIP-}S28o^p7_|Tl>tHP27-yMb7w<%j4s+6US&nl>L*%TpgR#8~~ zZL-W1f~9zABC^&<1BlnQNKGsjtV9GvJCxlRnf3B5EDzS#h~?b zVV0f04@%qlL=#K($D=5()otmyzLnt`+x6nVTpDzk+b9Ywo3MUDRmuOxy9*+I_TmWm zYpmDm^I9iDcPKgx>*O8oZxZziG}y~sOX-&E)m}t;X^W7eC?U;z84@eu4O0b%Q;Y4UV zPOLmVkgdD5!6)NQHW`br)BqGeqC$~wY%XWcq6X6`Ku36@xO&@r^J(kF&OyH}gt@kl zCzonZrx$KV*+x#$1!qM?h0EB4u{IC>+S)vLZPrYJ(x7mQco@^O7SjG(At3hKF*4ie ztuq7ShVs=wUbZTkC732-NSQ=RQ+htXRa2=Am8Puyd@SUm;c#9>Te;|^if&0yIPs1O zaH$`zcx1H>w$?kW0A|4`K40Rm64K1K`YJo|5fkvdLbkG_B^6xuN?4hQtZ!#u)8M&> zq@n&17q2okZvgFJ2DO#m`^yfx6Z{v+rV}zA?DGwLJ59-%O0yIu|GxkFt%2Gj_dHr8 z6>X?#*-10sT^GeK1DKjnjGUCf#NNW}W^Puc)R4|;f<=R4WFWR&<;|%95ctJl)(%G@ z{3BtO@@*||U^+3tEwKA#=Lmh0Ig9=6KPoMyh|^2e&-@00@63@{tgj9#plLnSGwC@4s)}-dHTj%bu(w zI&)6*4jx`@?eYkjWk`=>#ml3(G4d1tz>Gjg)g)5uWNgJ7Pq)=H?>d8`@xGpXR9mJY z?{r--*Fr~cB+p&5qb49_%%y@WgIr@$<4|KpFSBDTQn6lRs)*}I%r#37+~{iU?Yw_& zF>!0#H%zxhx)_A-gkS%H`4&Khq0=}ruvi?NgGOhyF+`V~a!}hM(UzqPdLomxuYO>E z%^rf$#}K`kcqT7q3eHp zCwGXpu<&B-RHKevf`j1HynNexi-5twU(VMRWgB0n=CPUMW!|@T2cx&>e$e@~U9hD~ z>)Qw@YisClYr8J0)BKac-!X&FLz?rtYH~E?T+@5pB%61 zxN#Yd>+yR_b63b@Nq+5szpP%QsT2ryg$S^Dk}+$`xqN2hHvEFo zh>u2F<8rdU3(CnQ$Cg4=0OptFvCSa2EeIqn#wFMZJ{#DrUJW1V7Q@#b=~`=*_AU?kT+)8_%(071eJT3h2!M_S-EjZ;&vX{)x-D8ds&RtUN7#`lzHboJ_Bo1Q-?#me7TC!MknPamg3i{ANIpu7x$#$*u62#6(o>rkEh27a*y^ffx9+~CX8=>eC_saGQZHS?ls|DCR7 zhlM>RSBFACj1y)D_s>A-z(xW1`Sd*Z2R3REPnyWAFnXc+Lf$~RD|isEg?D(qHBE-qQb&5W#_{p+ zYB7a2wE7)?2cs*6Bt3MpfE5e##AO+3Q%utJB=z&19k4(AH=ukt!6s`p$rFQYFc-ws z#|jy~a#C>B{_J2?mND>=DwK_pHZ^eB=hgUFNi_&=#u|(9+S*f0d{+ji27|{#xG;nV)xijP$Khb-6ipKm-g(*3uH?LopI2jB<2Dl~!NWmq zcp2wYu}%1r*7^+QEUcAgNcuMG9%9E1kB$t4UG6A6sA|Lng~2tX@!)TmW_fF(mCBM1 zF;06w><_TA<_=*zmKYO}E9sYDA$66W$NqfCY#K&0B0c)NdzgIHg};66k#dkJeWioQ zfo>ytmS=Y1tP8J^0+1BE75_lAq2`nm^MjJgNBE+hx7TW4-dw7^yu1AR{_4xStIuyQ zKfSql{_~t~Kfk^B^yd8e-KE>x+ov}-&wt!L{qyGOk29b1?(F%^$)~rc+R?A{+b?g_ z;q~V?x2t`5d#A>ACmZ7W<^6SIKQafGeyib)2My`U^RCA?_rXJ#=z&H`aA&!-|ihftBP4vmINsEz*O+2mF1#{OERJ^0LTmc9N_VMuX=LA2F3TRMSii zZ?BZAWqAg9Q4x#6VZP{Q`;c3sNW#q&EZM$G;bg!wYbpT}&#ZNkWYLvy$WI)EmgpJ2IdDoOg5EPfrsql%HrGg_IsI!M(vPbuTX9 zo~Z0tp2Mt*zmehP;FB<=k0a-|ik&Il$s3c-Bh6>(Y)9>vLM1IOF$Nu<4kNt%s+w=Qrwt~ECDJ3s{ zF_Bls?*2zD;i6%~5xUigvb_R03uQwK?byCE_{t+7g?86YXPiVkO~Ai~$_z7&w7G4uqSiNtICHR8YbdaxcB+B|r|iI;Ga;VAnp&q>)6kq+ zU?h0r{sWg)A!^N*d3pI^@ukXjyQ+eeQ#&F{7X{GS(RP%OviOK2VWQ!CAXxb5w(Q$d zFsQQubS?>|kFr^5yFfghTR+ibW_ZMg9MU>B9CXfmqaVp7!@3MgT4o~08CTkN$YG#% zhr!=eI)cT-OwAH<-;l=CnQV6OWJ3exuzU6SaC)n@{I)VKm8%Ahp|OHdg-H+oI%5bf zjBN@o+#su?2x?cYTyfEM0b^(WjZKTvn$>3MBEe{h_JM1g3pWxhcIp2g{51rBcwTu% zK=ata9FYYb5K0<+6MvO3t z3sSuk-p!VN`cAgkErcqNN^PsIe1~Uw%NxQ6z4*HXsZ*=2KGBj2^y$aV{m3V%xSWC!m^gxd#`JzECV;g@!8jt%h%JZX3ED`>q#Mx zZ8Xfr6P{>kum`OUrgl7cZeC7rgzswVL|v(nj@s+{8?}|)FaNLlS>!sPeEIOlh1;7; zwfjFWKK^m8Elo$BWYA8VbP2EPtqE0jKdP)0T0Ql$1=Ui0=^Ex4exH$+9=cTmtqQJ@ zyjCMxvC2o8N5x;pi8Zuo_HNjb;Xz`Hv^d?WNi~Ru&gpF+z=B;+HT=%_!f$Jn39|-g z*S~Lr($tC4pz^TPHf(pcwh%M!p8~y!uXt9we^^ZsWI%gG#(-yEOLdn ziXF#643avNXt1(eb^kbUW;|6VxrDY&A&vK4Lu>j0>>fFsDO7oy?BJzzORHV*H``D^~G zHn&<_9_DLlJb-JOjJ!oMqgy_lU8~8-OG)Ro-MzYluv(PRYb41OT(9o?do5#B7QFG| zu{H;P6BkYV2HxI1UaL5MVyCc#i6jYFjJ0{#G2mOvv!%@mX}mTvgM*SD@Yx6xGhdK* z;jbK0NEoV6g1;kq!WzOI^sQ`nAmEO$jRLLr3LrKM21CJCNd^lCZ5{CJD$vA&A}v)+ zG63zu(O`7U)XLXT6iQ2AwTH1P+p&B8qEM)1!M0VIsvSo)uc+tD*8g}_wed%6;Omqn ze1+O(zupthvGankmVI6aNg2b;6y`6pet4_-FIH>jHRsHk;l&AoUJ^!woIaVGJLlao zSB0?Dw0v#aXC>nxF-DrzovM$_YgvOORN*xuGSRm^<>41X!x5?@T~qH!--^=iQOeEw zDGN4Jo)(@G6y_c{zDGnOA&K%LBlUmpv_!903DnX{4Z04pPsFkw%1!i^R9uz9h!0n8 z+()@_)9$`s-pM2q^KyqhDyuO|3x>Wk)CO%Yz6cplPHC^FXRoIhYF|&zwf%N>?RH9O zxJ2)tJUVp$>|q7-RZ+;D=d%>rf7 zWRc~kH&>5;8kFz;=k()0&L01~zW?Xt{U0|Ef8Kt4b9et%K_iL@aZtyo#I6}TE+I;@ zHiVlTC^DYVSQwewF0D-ek(4WBe%!SS4H-X&s4B+Hu3s^7(9rPqVtM}8N5weBNotrg zf84Q;@U6ce{aO#KkNb@?~kZ5U-(a1|ThWPml4IY6ej@t^yD3*}o^ z6aQ)T@Pe{_Rlic^=79K)zX2&$H`uld;B+&3CjzzIu1hRgLYgP4nFuTF9?217tsxRNkn$FD=60>9F^5jOT!`hrBW>O&pyEjORCNSF)c4NItZKy~H*-6_L39PZI9D!vY z7T}^e;{s-X-|lDXWLc`o9qtMX!Qb)ll9vSjdJF!_?VA8*hf|jGrk)iRV>Ee?S9wXk z^vRW1A#6!Zo0q@5|D>jLq_jAEo$xF0bqx*eIR7`Cl+diAu$LMXC_5|@{1vuJcIk)= zZb!B|WJ_$$$M|bM??jJ;|Cae0PiZTU!__%Xog+N>>zs}`&8riBxp+_$%Bv0dvPN(a z{1|@?z6q&_GL9VgD_dmA+UuzLvr6}3EJO2K>eR8Xj+1gQEx|do#gyJyTTFN9zxi=~|NZQvljf!|3j&%_N2x;NaZa8c z_-p!^v^&iv;+$VkgsrO?YLD^P_&&2++O-sBZ#DO=w8x|-`?F2ZZ)CMTm+e{(<)=R` z)CA1}=%=^(_7dBgB!hxC8b6tJMe%eKU8Z+se~kjqq{FZ#^_Bhz4+-E#JLB~*Ap(7> zK@|aGCn?|DBLV!UGXMs&j8j5kAp780vRD6Ro@q-MqNBtY!&o>#ig)T~; z&5Ej(+j&fC`Dy?+Q0VyxCLTR0h^FmoJ0CYoJ;pwaQ8{N~v<$veLmowXZHYzao&`@) z_)YynZ}lpo@x~{G99Tcp4zok&_~(fLpSS9J9)@@zm1ya@VIcVMMDeRmK9R$l$$^5~ z@~h6=Eeo%`H456lk~L$(qV&9us&Z((TC8?Q&ls+RH*N5@3BO}hOmhA8m#^2K)n0F( z(jH%Ll{NoKi7LaL?aP*!mC`sKAA4Ghwn4q;v-31px4;SwZ)NE~0jNIi%omveddkim z%=MOEfxoa_<`L&(4Tgt;hQvo{tJC1WGItYB>1~diCMSSX@QQs5xm4+-g@Cgbu)mFg zZal(Cix}C$54AEB-5jTu*KyrI)-qc!85; zDbtxqE;P1j>^^;}|mRYbm)XuU$3tJ1mnOXcwf&~F6Z9zAto(EfP zNz*x*swn)-Sy0u|$-;B+S}C+4m@XJK1lGo6IKs$4vjeJdupT}5yP#Hh`X43mW=VPx zOpb&2)SKFwDY)Ekr#zRjwRmm5JSc1s$CBJ%l_U6x#DcX%kuL2vbs?+|Y|(4_RVH3+8e^Gi!?M!X234+$~09iH;(X zCzpcxZbjuwg-&TIkn9(ck@x-`t zbiuE~MD1oS&$wYSDH!c_QyEm}}0v=SR#5=B*%gu{(A!DgW00Qw6K* z3}&@K&g6LIj2yTp4_fj`cQyEHZGS9p#^TNJRd4U=(*!oOS8i7AsQB}a#W6=aJGj1J zB4m>M2uG&E6?G;-&vNH;1uehKI)g5q;YpEN9KvXjx>-dq;t6sV{?_9#q4@fXf7c`j zH#0t?Z|myTUDMF|7!@4vI0>FE$ljeDI>;RCnd80Uqqg8L5}-38yszkUtFvyw z#Q%oBVY`FE>j7c1W$3Z;>ABP-fAlPPv_}pc!g7|W!OBmvhrWx)DP8Ey#u?2H6*k9_ z3fc;u)Pz%NXwHoPWp4pqfm&_Vo7;t5A(T#4b#%8FXN0YW*W|XNZ8AHmdB(zH6ZEN4 zggiBB|4HpQqAUi5r3SZE^Q10KGmQM(YQpJrA+NiYrAYqboGBpyl)t=7kjc9{%+w$M zxKw+1bAA8k)yFrt_iyi>-YP4qQJR$ivJ9NvVN=R<7@3(EE!-s zrA_HiZ#|RWvd`APoE^(^Mz8czVZlPKsD)0n0gtrR2?IAyNn7Z=ifjn?g}=iD14&a` zQo|dynQs|a^tSrfj{4*T^}}4~nd9Nr$JGNgJSX_J1)Zy{KK)R|GEZdQ4{?n169b|! zg?jZ8IIzoJh1%np-TU#qf1Zmi1iv_#XyICzhdxMrjT8#TDK1OwJ~f#Oqj`$nb4`5C z0|<_aZ`4AHoo2ZF(0yNs=fdR{Od`j2PWj9%9P8BS{ZGj?fx>m!+Q47Siu z^9Vt+x&^+qZCe#wgpg-S7<&*EBr0T-avk zc=49kEHcDCwMw>#2oRQf8cx66LbJ3Ff)io2^Bp8rB~0RJtOoLad3&Lz6~xqJ zJMysKEH>b&^%7E2!H%Cqn(=V3NfuiW)^_agPWgTKRnY4 zyIP1OAsMm)u@DJo4i?+jOwT(b26KI(lPAqVhf)OVhjAxDG8JO+Za==gQoPpPpSNls z|Gd9{^B@RTTk=P0cAq+#eV#-%B6;X)JbPw!y^^j`qM<&6$Ak`B<%(g}4xnQ*i}fg5 zZ1yp%(V`Q;n%NpRr_Wk-^mo%3V<@E>duo7Sz?$U<>l=lc7N;l!QeIhz*SzCfeA#e$ zD+BSrtN}T*FM2}88&AQ^>eP#t2tIP3J0@~mTRkAWzIU5UA3c$NA~}<2PYIlw*9Kt= z_jx%avvZ_`R~H)0Dsrt$gR@AXT7zQ~Mmiyg28#{K3q!TT`JB1;FBv5@TS}Kzuha|D zYn13f=IDoZ;Z>%skS$Z}{X*Nm?|HFc^g6w8^(@pb+9D^TOW;XIa`Kq^xrDP25JD<^ zEDC>kQn9J0_s?omdy33o^I|N_vrSBtsHmn%Ba>54GCZ1Y>2vy#-B_J#)=X9B*p@+j zw%{+3TIBYE{-7{SR*6w7H%hRZ>dnUA@$K47y`+=W<71{^_p(UAaUN5fffNO?`d{v6=Jd`9tPRbXYmb%aq z7Y=@QXcosMs}0=YN3|kGo(yWQLLAoC+~Nfu&A(+TqX)Jo($d^!ta>Cf-gHER|6iA1 z)C3tS8#SZaaR@ZV>hWCjEYl!d;5(uKOEMCyp0$8lFt;AJP{pZ$A<;DWd%!`CfU5KD zj?T6TQ+0pvSJ$%PH?Wv9H*aq_PUKJVw^l(L^x(+SK&(G!?(g_(1F{32iBR>^!DQub zt|2-!U;D&yk-z-+mGJkQMTAP*q18vH!VXNd6036xWR-FY<|-lA?c`i-Mtk9GDRpB% z$?&^ixh@-`A;M+8Xa0zPHqKwOU^Vc%$Q%DMxXJ9WBC0rK(PvTGEiRhq=!ub5E!G9r zsR+kxN-lh7V(rq7b<{J)Yiy(X%u`v#9qwf;mKiGho}_SXFGKcOlEL+S6q@zs?(xrC zwNHQEs6GFwRAs#`2X|GUZ&e)4h}TuwMi_yX+Zjrg5B;;Yy#&|biOKV-4jJfVY!uzL z@q@Oe0gnorkq;wwZ;dia4*M_otAWICVPP}&tVY&$9SRsFV-sLqV3#j z^^nZU214KbkQ13mE$$E-a?zYOldLGNMeH9$Mn@z$2Y<5;do1cHzVR>cm*-_WJakh0 z;!eWJV)eWExz1=<(NOu;Oj%|kHP-$c|8MezH~cjb{vcYoi+ z2A93nx6o&8(dXyibi~3mXC7f`0fn_iL(`6A%R0z-F21DRDd^B6#A_ue*V9cARvGh! z;EICCpV)7yHoQ%6^TFR*QP}u<>{r#trJu8-Z2@%To>PQ|)dqs`0*iFG$vCDp{;*x^ z+a0Mj%Z66#Ck?eSG$*7=X28fdhsEMSE_B9AYq}BJmnI`p3p?c8|AcBtuJ(EJjJc)C z*w_#z{>{#M{zSz=2@t;vkCesp;r!*@DecSq^Op}^on#gVA)I)&XV=aly14p&b>pDu zi`%+-efapz?RGW6 zsFjCQ+0wP71`nDOHU{P%MO+KYu}5(g?nL`bH~z&cWooRz#Z>OH011+KC34(B7d|AOYdQ*Tx%eMJK*443*01nEx#Sz z6{1=&68>)9=cqT*3gR+W02t@9_B7Od123v;H2j&6cMR8oyGLEUPCOFm#h zs-9dcdfay8`DPpV_Elo9(;M@H=yE(A|GvYwrc!jXlfc<+t9L>hSnr+hj}NOIIgCJ# zG@e);os3>V{Ymh51v}a2t6T=D-$%Y+i;xm``?V`Qx0LN@~WU*%bR^n?p>5wqK=s-VM5C5I5 zs|6Cz*{IH}EK_tIgR^Ka$ScL(4rMJ?XuZpg0le>z{^nXS9M{;(jz>uOY#qUg;Obg( z36tLNSGa6v@it$WYlhB1q<0cGRX(`kuY90G1BFvYsKGMtNVtV{HtBeMC2b*6#qtCu zOFwv2$?+){rozUJO?k5Prm5Pq3qGq!uP`O4L)Md*${s>YbqiDY-I|A7+GRqSQHp>~ zDX@dUSx>DUdRFH-=-_j~ZjhJsw2TU^KcYHssW&CU>-O&C%Xc{QZc|W>~RC=kYW6d!yKCM!0yctS(Z+G|WKc+MGem&6+)&^bnDV zt$6saqcvXN-_e4_AHScN9lOnnB--)BOmcC?zbfk7>xZi=<^prA9K3uvs`74+Dx3E4 zbq|(2i7F8gLxb z7gh_l7JqY}?1R6PA#1C$Zc_cI2|d|ak0Y^Q;V_KbZfL!nu_yS5KOfz6Em&)`-?@Z7l?6m#eow+{z57d_9@l0oUa6%-fh>5 z@M?!xuX=Mjcap7;{W+7cE3e&vEgIP}u~K$FO+`x8&={^-{_6-U$(C58&7DUBx16br zss!+vOEMrFazYLUIss5!m$Dlxw-@X5Lh$_7zhVst{KysvxYd#=9GU4{q_0;GQ+!hR zYf-5!$MqEv>{N%YP8jQD?a$HYXRJo=8ZYgV48_f{>Ue(T90rp53@JFosDL%PuG`Tn zS{{3}&V@-@T#~2rIx`V49bC3WcfqT6z+wa9N3TaOzyeQ4QbpQeD+9H$2`4%c+=q9M z%rPU-)zymfIbRhmST$I%NKO#g4NA(GEJ{laz!i%{6(gIis+0SXnZIN2^0V-kux<&F z-ENg+)YS>qzwozuOa3pRqz4|zt5Q_&&V;$D4F&SPd{BvubI=wmF}_;t#%#gIjTeHpdxl^jqO6&z^ zwM`G8cKS(j0m>(HO^7K4ctNlOt}tFz2X+iq+_)m$i4BWA3e4q9wz_TY@I>1B65>60z|SL$d6cKweT()MPm=}#9l1fWfqKwG47;JC0g z@)_wZfG~WhWg^6^?Y!WiOm)=VYn*PgR^VEmAUpq3Dh(?c34cwHgLIk^YPx#v&A2ev zN?vev+7+TNte9T=ZOapBeto5@u4NqS^b$4Nkpf@@Pn`4MsdFm%r(SPd zpyTU}n@%>864aecCA}*>jxI4mQdGrqa__&tig25M*8W`jM;k{~7$7bLyDMEW9&kDV zlo$RsKQ!yp5n-Fa+SgS$Jv(N;Q#nWY+X~E!9fQ20#)=8q!B-GGJlc5ObsSEwp43=V zDcspV|6m`@YMqr?3rRD@;2V22{FiNyOpUA{FZk>2K7Ut~l-F6K!dzh2 z9^SJHHR1&7-o;BBxX}@l-*VLhPkcRnaigMm^2(`V`t`#67rrc9%)h zJk678j(DyS=0SC9mA;&&S|1_{Md4bc?IeWMQAU6FB%1*WSrO#S2-sIxF4R_mZ@Ah@J#us%py zpN_52SaoJDR4LVU7x?NC`rE4?tH`s{i}F!xpD7yI)HQWHvY?Wdb+A5wO<4M8&vv>p*Y_J~@~}z-=F{C)!@)srdTj_ZiRKd>e=l=B^dxgues6ho$R~#(NUPQ`7Vjnuf4Tk6nYzOdAC)uiqwj~H7t*czv8 z{kB?oMeXIoOPbeV8Bhkw{lJy4VABB6bX?u&(4zgoT?4g1)! z4@w{$S=89t?;iWN)oQaDDvGEl;3kO1KE|X*8Y}=k-Vr&?+?P^Hq>4Ci05RcG!C(9` z^pA{Ut9k03LUY00Fc%{>@=YbMVr@=jRAQxww!%oRt(?3QN6oryc8$TKzu!Opcz9Ct z&Ydd3iWfTsr7SxL3&TxW$Om$VL0$qZiLu0PfDuFEM?=dX6_b(FSE^5-;+A7>mRk@C zrWU7v!ud=IK_!oQ#;rz|h7c=#D1_KMRBOBtQs2r)>=x8^s;}E8tv9sC=dThR8+$E> z)eV4p1>d)XU&A6&iG3Uqr8OE_=Uj^Zz_mYq$fAKew1R+)5uBHjaYkl8mgH}^yA zBHL&nx;?!d4SrjY@@8~J>*p$Epz_r3rhJDggRv@8{XPf3@Yn+_kUikzSkd5@y~5G> zVU(arCH%&WO!p3%L4{+TrV+c~27W4DnIJ|-Gfs?yp$R0p+t6|U7Qni`M)H#%f=hFXkM!v8KjFGhe3nJu$;w#ciaZLs)&zZObvO##+U<<-gk@@;Vh zsaT#HoZX$j>@V;IUbWq6@*uAu_=WSy1rF3&aatcG7W9#SP!_XgALbtie>F%vPU2hq z>*90T%U>7G>ebHA^9EFNCi)wrSEDE>zvFMT#K@v`4x?9NmGiar2%nnvnpx}*CK_sy zUE0Rld_YOK05YV%s;$`Z&;3^J<7g{-TVDly zUCU_Mw=J)z=-cwVdXPgdupo8)mP>5gM7e~0gS^J??WdQdw)j{Lf)+zi+^4`{V-OB z7tb~~?3rT19`moZ!sy(jW2n@#jpwOG^*ME@>IsT+n;+VAtX50WZc|c(q%~)n@p?5q znl6D6+dB;hSxZq|pmf+KEZe13 z8&-|kfty~&9~n0oKk%}SeC(M~KB+&6E(&lkN^3H)7Xb7di+e!+n zx|A35jaQVr5>)BcN!!d6(83d4dty+ycBJ)jp6&>OC53jWqWjl_BY$AZ`gU({lV!Ew zE6&#zuv;JZA0F=;Jtr9TDO8uGJcF})=6GZ*-AW9))pqS3K;D|oovz?<9m$>C2IEA!}3|5*}@X4yzfZbv5D=Jr#*T-?NN5<0QNx1addJQTo3@3SGv^?|lZI(Dd7cG$rG^GJ?Lf9Wlr)z6SdH4$%bFddqTFXP zwx`Bstj(IpDv2Wt@XNW}uUGc}WV{`MhEFI4;|H=W<=O6uwQfWjT>O!4#c_@E`#b16hiQW)g zb7#F7WYl_6?Wn8$_Ovfi;6Tl{ch7%ZtDvP9Y^oQ`GK86^Nt#?g*KQrpLZTFKjK(_3 zEU3UwOsv9*ZRd=Q3Z?#M=J>>fs4Hw-ozcnD*UYjxH&SR}V(wY@FZJG*e3XmN;$w zT%JLi_SKfG27Gn(9@*-^Xk@AhD77tJ9efP=wGJr(Lh_NTF~|PApQ-3&0oh(F!{HE< zbvsS4q9|QQ-y3x2E1>= z-_#0l8nRH{*6E^lJxzh&bx=D#PULfWD@P`3I)12@x)Ldta2OGxeb4JbSXrmL@&a17 zPqyM+>^8|w6^>(HaB_r;zYbj2&|*0TyANW=K#u}^4bXz-Fl88VOLh!``zjq^8H&VT z9Y^Uey^#^f3}r@SYS`e|5Jezxc1i4*tG=UM=Kk z`J)f9&Ep#IDhaa)pjQB-kx#_z0MQ0?u)V0LMe(acXgDl6v%=rw=Aq7+Lu|oc=L+o} zWbk*{nY|NRGqXCe3I4|Z98;2zE`WQiOUNY7 z+(GZ%e+yP}C%vE3haacTbvm>Q(|OIi)d~He)~Yv&7qoruS=`*jKe_?43`VSTSRPTC z!m6od6wX@4l4G$X*oJ^d*v@*3$JCK-4JFI)`fiwCnL#rbjCX@p^?SxjX*-gRXEwQ< zEEO08WWis~wAGjS1!^-lH5XiFN(N+Uwn31w3Fl6NBmV`3x45tUeweKwZ|%lGc4jUy z3{fDhLt2eYKkdK%*`jcaz1G;dXU$OFjHH})^7CD9@v~2FenrEY`ji81YKi9{S~Bx?O^C3`x39v?<*sK^7@s;wdAUH`dd~cJd&9# z^D3|RzqktWcIH0LLEF9-djfwgoF~S0NcX3yv*DT;jXmD*xAiAawL0)XunYC`nY4a5Yr@AOE#ow?Ia;`npaR&Q}8qP zXZq1^4-gLuXC(!#_G>3^BD);YoP;BPywMUxk9=IOMYBm3OS?W5SFF0n$@6T z)W8V*FB;E*A&=PKQJB0}tA!p&jt2%w2YC-!wBQ#bjcu{4j1^n)jmy6{5c>KXP032h zSIW<=E{bW&i;6A!=h>qWuvnIOY5cj^cm-0@MeC_qHy<6qP`GX+oe|qW`CNEyoBrU$4a3U}W++@oPn%QKc%Kp)u z8m|o`G;oust&f@)G9_i+i6AT6=;m)x&OI!Ot6-%WbFR*BJe4P`f-&-2eAC)Dq0VvU zi1c1fB^ip%&J@_coVMYc#lBd>R!CKx)a^%I@lc>D?9U2+>fh#IkQ%0(&?%(?sRuHi zyl9{n5NuXg;aGk0ehi)c%I5&G+R1B~n5eP;*X*xb9lGkBe2Z6CfG&&T^n4`NbWj(~ zvsh|L_mI&;dWffRo@HWhtP-p{QUCpClhf4k5Ik*U?DgH(Yp+n2a?mzi4)-xp5>ji6 zg%cLcN@rx|HvTG63WLJ-k}UkC-juoU=G9eDmKv)Kg0onJ)^yMgOcp$yRU1-A%jE^U zO1PRJSgW@T{x%BQ{4{>yU0!M5ggSO<@V7SP-(oDi1h8Ye>@OY!w=s&wov-_h!ffmY zd9im9G1Z{uL{D)XgbnNxacV3#99K>Fc z;q03m>oO#686QOb2#Mt6Kg~o41rFiWZ~|usi@REfQ8;k|QS$`5iC$pzW^hV4WZ)42 zcv#q|xjFfEmptPbO(rbyQFwI=j`nSh?aL@!Jv>?Y^+`o8As=AJV-0>06$O9!R$y~j4s`X$JA-pyYlNg z_mba&U@v|6=2|tSwc??wX^DDRm?R5B6IqY^a_MH})~$vn?VypuNwu#jaD=%NtShQZ z>=2w6TtManyTTL&K7lOEqdD%`v7_hk&{(q@SmhbfITEk&?t|tk+NvLNnm}O?b7od7 z+eex}>uKa>TE9Zw+SPlj_3l#-YKuMNDxjOBklbf7L_^ZE{b7^)NR&$n9bhi_tKI-#C*dwSoSmHD%GHl=l+e^~7gAdQMiMe}k!Hvf=ILbV&O$9`FQD*> zp_)XeR?RA&;(JQHKJXX4y{H`nmsN4^)aM(0K9SO`^5=?Jo%Po)o%ee7P3_zH&()+) zrcR3^_pm%ud1Gxg_LRjSNn>s*&astxEOSlu z{5Wlgx6aU=I1><%flWVbbv|FIe_?KgfE`2bfv-5uR#u>ZEE2{bxb%FvfU!J_x4NZL zWi8c580z1;OxC?Zt&-I{0b1%!)Ho((Bej+#QVMW;qt9;f8hf3E#xvIy<@p-{RjZoj zqo(1+Cu^X3TuDJC+LrWKe5v)QcGcYjhaEif5qK_$Hr&?yPUBMpOvA>5N60dnIEHUI zcByuyP-n$iU4B#(z4bnMMq^;X%p!PNiXy4y;_z7&i&F}-wsM`5A?4*1FXG6SA`xFUNZs>`{{P*I zQ1|4@B+}g3LIJ(lJqX1WO&jl&<)o?owQ=}c zjBybyoQ}-d*V`Xz;BVuoLA~@qmXm({WLn6$13(Zr2<)D;#S;tnN!8 z@f`64iN96b)pK7?YK$nySz%q&n=;_)5%8)9@xi|JL+zA(BqPEyfbk4%)qHU^V4+<0K<1NSw zhlLdiA64PXMXt1Z;E9T9Og)X$izm0M$CFF4dLL6;gROc{c`JSe+79yKqV<}9RCOK} z3^n$*IF|*ximTM@$PW{#b)3_r9@Ju>uFE(}EhHB>8A;{ETRa%PY1RTJJ4qtK_?tyk zbu9@K81djQ!X%pfY4G>t>(|p)3(hm5G zSqyJ-NN%IEq!FnfP5gtVbpAEd|L4~Wx0mxDFX!K!zJ2vgO+!~v^e;NDcKQ9w#W(C5 zQ&oUs;2w&M2YguC3V*@k$ze~FLc*x=v&I(t~k@{=5*GoprE2;@GvIjk5F# z>F_Jnhc!`}nmGIrEM9%;U5Hz8feR`RlU$Sq=9-F9Lp{>eTDY3}8MP8i9 zwVvEkm7><;F2zeFs~v^2-)~iXL+$eQR9V1bzR}H=4^$2mY}8;J!_cf?)4t z5Z3FlC&a}+!ie=?3s%ZY0>X~twfwhGs|Uod%Kdbk+LBpixSB(7a5My~3e&*cC=XM< zHPgp)M}VkS#=Z5gEB5huG~a-WeQ_MUWoZIA(^_lF$Tr51p0v4e478iulFk z3)U)!T5H(L4Zpo|q_lD{t=KC#QR6;>AJ!EY_-ei@gbCrFM{tbMwCL^k!Et9tU&@@K*#UhKp3w3WyI< znEWOKIbQRZNsVg2>$-p|J7S5bW9-u4<(Q zOtbHgLh!L&YrX8yXBXv|{X5RrajrWtU56YcVPYuogoI5z_TaB0rCEu4VLD3;YioEt z^Xh?_N6#*Yb@HMHheZ$9Icf_TeK}SQ4$JQR9gnc#a$~ifmGuRC6(lu0Q-M_TS)?LT z3yIO|W=7M)pt-$THB%(!6F(GT0D{gKnGgRLm&-wynzTYi-1`>CMdTG$vZBT?-vb1b z;aez)GEZDIxf0=w$cnX9W`TGmkR*6zM{_Xxyk{ljHp4H9z3U;HY2sng=v{bS8S+se zD@$5(dA3>^J(AVms8ctY6h>iCTiwQuYc7m68zu|>hUA()>pmx*$~)xz_BL3II2LJe znd>eawOv9b_*VY)bb8^{Nr>G&H)+LtqKc6UvXUD`4NbC9!KuLr{(e0_S9`rU`+9zw zV&K={NbR(-%@%aR9EmkzoOFzV#US0Fl?NC#B(u}~d<(jXGrB3*I3CrK$~n2#X=+E4 z5CnH_u5mo7y;d-;dANnb14_lSOR&I*rm~*WnNRU%BMFs@&$kV=d~E|SHPOa0sT~X94C%yBQ7JvKQgAeh_dtUY3T+n5t&R!~hAGS1EZ`W#^aww~ zl$=Q}yr$Ryu1`z*mBYdxL1B0<`^DDaxRe3w17*;)1{-TIx9W-|p~7yr-|uIR7-2!B z&ic`=b%KAWe&X}pw{zz_X)$^zXXVCNnFNsrM>X)hIb5lICmDir?iV|bVIqj%(jDdk{>)3OSe?AT0w0f}XT#cL~wqniZ&MxZi68&zmF1oAn`4)c( zkhc5JRbRxjR46sgX^B*xRGrC`nhf(51wOFg0~e+iWx+Sh7f5~haJSmy$w#%_f-S(+ zX>7%{3AGC8mI83p_)0{0xtlowW_`2`M`nEoae3jdzIFQ2;IHS=$!CM_=EKFBJ&ZUx z`-$VK-kY@>io(&NkI}Ph9_;|CeflrA4I{=@Ruqosmv76aW7ejV@|2t-GD0+_4%QYO zKf>ry_z{fA#ottZ#0>?21dD-IG)PB;S91Hs6=27uT zxUq(b0#9x@tjBi=Ip4xVp5X- zx5|Q7kd-Pne|i7m<->_$u3SA5JM)$?#|i~r0PFnncqNXY8-Ox+>ZITt*=@7%(^{K> zPR`$cYlCgfw%*}5UgjXL(2b#}9WV~OTbvi8uAIojsbkR%4({i#o)0^@6SRnO-id-1 z1PcL~KEG`pII+yydRQ=bt+K5hej+`2e-*YP)yjSJ_RvG3e(ZN1dwBKd7R-tLD$XO~ zN#1KUhg&?tcYoe^iQ)IRl4DP&_s?e!ps;l2SfxDg)>Qg!M`O|^&B$$vXtrQ!>pIn8sIR^h=H%T5W)?xuFtR zYBDTqEBw_Di?iXsia1^EIHrBCeC2gJiMZ8`I(ToU}vLY#w-#zdQlOM#zPw3VnX zvC3rca(-H*=s*Zp^$tRr-iEhV|)WsYrdeI;l{ zTL{6ihfXgY)`DJzvXfmv@UQsD>3<;Pl-wL_rhpHT93$~C{+kwS;T6Lj`%ZitIaCuIfi;kUk zjaYcJpxiBx&E>wGsDf78+<-eNyw+;u8dK6pHM|FZnO}uYPL=_@@zoO{_6vrhuTG3s zR+q?o>$Q$%uIK6SUq`7mT@fF&IQXFk4*3hg<-QH)F^Pc43Vr)O??3+Y$kBnKy$KQzLFTf-KEf=O%Mr_E8YA}oQ z2fqrwDVc?8nj@)gCDdNdUw8ZQ=f1)hvxc5g7!)q0Kx-6o4|QAGjXNOhY>mR}o^jV| zOZ7$YI-q>Smq*8NC>NezT~V3?u^e2YQXPQr#dN0o(L3C)JNIL>1+O(vGgTcA zDN$gf8FO|POVwX<+;-nVCUxRbK}p?}cHkF#|CNVh$`%mo9B;`$i3O?iG$jf@%QIG&3q8Z3_6 zcdK;+a&cEYI;r$!7c(vRD=U`59S7w>RpYOoQ4kCt z2d|rzu*HeZo%mOhImu4Cc>L$-{U0X}f1W~bU^m03f?V`6tXZT(gs1+42J$9Lq_SNa(6{ecyTZ>i~#pedKfG@;I! z#aw1YG~_|C16-A1NVht`6?Zn}DOvFYPGG@m)zh%uQg2<%YCQ!=Z%JsJr zPes`)*VCBzXXgzib;u4Qoi7^uJN|N&FkGhtgTJ0ihVo=0pWS>pyFq(rt`+5DwI;Hf zC@kHNPBLvyz0MhIYHBwrpo!+@kE9Q6Vo0o3zmzYkkHfSaip51q?1br*v>4jt*wjnUbv*T-Wb}rm1>nE zqZWh%_QZTI6|PU*{>!Q225)3xR+~8IB=!b>8Bs;jjinAcJDqlqQ#$epgW$1>G{5*# z>Rgr=R4UEIN0onMRPWfQQ5yY_$OLPHU>{axq1$1QW5M3Ff?O*iqLES9sOvK3{2;zw zj!uKcBWHBcnIMCYxs&55U>-VtpD}~moSxXlY6yi;hbuTuQj75cm zX1za0D@|8srs}7p*l)l`Dl3o!80vGp5~EVZ7UO9kDQ~R4@4Q%BgK2}F_Qgv|=rsy< z_r}fpF~e3?4v5@vS`O9KdM}^VWG%G8aM+3@q(q8ly^UQlp4dfLA%_E?IWML za5T#j75VMzCTaLt3qEs@GX=Xw-uP-2)hr4uNEHN62y|BdJw1CmRbUD1HFYB=Qz`yg z#q+D4hWRONS*k@v9gejabWj*}j8<7ROhwi*Rvt77JL=Tzt797K+X~mHFC`T@}|6p>CRP?06qb*FMMjQ))%DG}!g_ z`A2Mpqm2|wwo(63FM*`S2?FJ!?S{YDLlz2qcE>sdGi?lf@)PkX;G5ultYpG&jU3Q~ z(FLZk&-us-2CYVZO;Vm9w!5NRGDn1-eEen0%$j**d4e`O-c{Rpi>--z@<+EsUU^{= zBd;Jv&8OLEYwxg0chDT%*TJ+y73i^48z^NP2Z9r5nj6TBMxG#B28AsUz;vF7OKXin zx67|8=SU-_LK=>yp5(lT*@ycqY>)G;csJK{j)h1ZgaU=1a<4 zFXWx~nRyB(8g|_x5Q3FKOso0|`pW#pqRz_5pz!C5OEvC|ht(m)KqJ?}AO5!(lq0OGa`Q#5VdjWNxaUj8`NS!`BmvOX zDnK#tS1CmY{3XDt5m~iGYmgI*sGWgAW2IVSu`(kBokq{#BX4-4=N2xh$>LUN zpk;Z@CG*kx`OEp`7cFgedX;90q2CIXz+e2fT`Q27s}G6;R4EMe-$p4X2)R5|rr_1y zJqiUc<@xeZ2(mZJKMqxIyZXxOvde-GVAbbP`>Q=kj!f|R0FS>Q(&%Sh)&iN zPScJ8n|<1nZ`whOjC9hvzzK?08iIGqufg$3!r=v$qVW(_fA(U6ZD`%a*~X|Os zu#4ivc69g`RocM{P2?3@q3|!WxHwCgW!G&dswxXBB%XOt*jAAQTy+%&P=iQ34KP<% zxO^6}2xX$<*%yqBMcLw_%#3`C4%cZh#0!fP>?b>ei6UH;+J;!JKW2C3=|6 zsPj(!l0n7`E#)9ze=6Ets_4Z1QHC!Fn60LBh-&lTtelfYt0js?}A#&$as2t zare)&+kc!YE}GD2r3@2=QZ&zNAp(Sa1h!7JPgsR^z=GmJP;U^#2)F|l`DlASDb~EH zg0)l&>j?s7^a>$qW(U%GNisM0fb(Luq zn^$JWkvv`k}#v+Y7spXwxTei7atq zC!fFg1ghc98onxxrQdp;!LI!g7@c@{(1gM4{Y1;Z-aMpze7#X6d<-wxx0_V~MfG%u zKLxFIXT%^TTeSC9+rYX@1XOD>e_`>0!}2Geos#to~kw*>}8bD8Qll^^LJgpiD@@gbwwH|LTu2_@k9e)RTA2bqvUWgdF zN^_dmO=wn8GNC3zR)Jo3E;6Yl2rsVGUau~{T?>MZzm22DcZ}6{PR>sPaJA}~PNFw@ z$L?2KhKpHS7SfJl3w8y9vmcfk)RR6Cl|YnpHOF`L9UeXSuY6orQR8_B(+`|?*SFb6 z$*CL|GgNF%sew~X!%V-r6W0FNAH-a=K%@-OV!@C9Jiq(DKivJ#xpI`0AZ%fHW<6j3 z{t^Nx&kKL2TmvO54IDv);04@pqMNGv)(l$+E09g~NTXYE=exYF!cZaUQX`{D8h@AB zNukI^mjAHYMhM5>`uySi>HXRB`?rs8|9pP?huWt%Z{7YpdH&Uz{qI;Nw5vJ-#{7GT)!xzI%T2R_*hv7a2PIzU-ZD@a@a1xS28#+YqO3BaB<_Z)%n88^?*F4F4yb25SDyuS86hY zi;RQ3a^R{t(bA4Kn7gxgX<&M{=l29qWp>!wyyTd6G+zX=Sf`)Qp47$6TTB(0hq??(vQJuE zBkWC>Tp-9u1b_hC zOik_bN~JFD8LKZ-$mEx4CZU^W9Y=DuI%g^ttAe$ciC1njxKrp8@FHPJU}rz`gug#8 zv~-J6USmk{5Ob_?dScKSa%4{>9kj}VlpF=P<7b{Fj?YsB^o7jlBs06)q-=Yc!P@@0 z{!>G1uBkyTqm_@<4w9=o*8periBHQ~ftPm*I`@s)pL5d&Pt7-Z^^c1Zl@!>*2H@=p@nr9tFYcr?U+RuDi;?_zL1ojx}3O zr!!F!HJmIm?%Tkt96LwKffZ4uD0qz6&!8|N9@)JTK*eo^k8<@S`lDab=c#uTt3!!f zGPmUMQ&r~P`s%~!<@>X%59e2(1i_V$imKt>qzzPNLJvCnWs#Kx;ut_p!L%K7vdZ)% zUyTk6ADr<{`x6MpQ(d(N4!CrbXUXJe{$o{Y{#%4oDQw6N1s3r26>YDUyu?L3F4Usk zM^-+SQBt%$z4iQE^bb~tQ?LxX-!uNy+62AtPpGIW{GGCkr8+PqbW6h;=awwCtVbL{ zj&gHl5D$|rmmXWRAHDuWQWWZ)@mG{tFNibf&i%?N7GwPnk7K1FYWW`y_rDzM-R~XV zeLhe=!0pGAyALOqf9!96|8V){)78888n++zG;ZGSU%%T^!rtBf8S5H|$qHCSki(G` zyjaQDHgo*EmG?}XR6}T(1S&VJ>S%AWtQzkS~+R9#5>Cn^*Yvi|-FFG1S=UlR&!!_U$UHJ?Cs{Tls z&rY5Me?d~sG+UL4Id(Rk*H(}dQd|7Sz0HbODJav7(!k&HD+=rL0n^PB>AL1zXuLgf zz}gsYcpA*twyW;S@Cv9UE{qf-H66*aG?Ber_?xA8K|?&xtN_@NT*q?_j?3`!L*K9N zHMFCm-O7p5rMsph5G;fLy9ODa@L%kIi~$+*y4#raBQ@GrAs3o^=Kcv}apE0s%@5#frlZTh1M~`Eb0=Wm5#>|WBCsd|KcxyUU&iHH7&cm3_zUEjvsH>A3 z%+$13rwU)TS=(7pq+o>_-&dTsD}Vi`WWmj2BZCwEBFppB>nB$B$&gug47=*Ddw%1c z@_U2P(^E~%o#!ylrq{t6!S=>-fU9a7p)QX!F05g(>pWR9W21#uH4QWeZaxWx&$l1X zFFvg<_ctmKM%Q7(cN8hz+Ue6?>owRUMmArkOe9<~>|5q)Id9{0hCj--#MTWE8^KHN zE)uAXPZ)$%h0$($m1jN>yr6Kg;N?6zn{ID8?E;9yz^)yvf`PyG-&T=D7-)NjA-37l zrth0mpwEA32#CX81=DJWyZDU+5%`+Yg(&k6U5!{pYji z!&6lnemy>VIy}&LK016pK2hho=d;zzY9muwJ1X{9a~`Vx?Tk{B*kozW+kvyfV$G;A zD&HV^RvZJK>YhhWjNwlXKs#Pc2aJmFCa;NOq?$yTIZyL?*EG#%F7J2OI_bZK!n^C6 z=mzlrl*Q7;!rY$ub$rgVKME;E`oL4M;K1&U#Akf|f8Ciq#=J+iuwcoc4&Wqs-sV-| z$abq(aAcqSNxET>aZ1FNSxwb8^M_2TrpDyyIdVbHbAp40W)0cB>;$RZr=>=+sXN=s zwks@{E!J{x)1@l$AYraY*VYZY6uAb)Cg>~}B}zNI#8M#OFWFX#?Rt0-qO@6-gF4>k ziVKIOmd`o9Yif>Q!DSC;We0Q)v9RHJg9uAs^(RibtV+yD7jfqp-rCpGkP#x0Z1U?G z0qJnriZCD{ukcrOz5{vD^J6O&jVw2&EDUb;!8WYmHC+&7DF1{ztsX-OIVWFmKP zUhuBnO_oID4l(RGT-~ITN>N!@`XX{s>@pEsqP={JInQ+kd4j)5WSlagQB^-d)rk%_ zBMxeVdc`Ppo`hMsZN8#^xj^alphnVsp+~V=ee2cN^@;8+W6?#;`n4EJ5tHUAUI|Wt zwo;^NT0XM$nRPHYiMIPqDZcP|b)#3Q7y4nVarOS<^4(Tw`{ncHY8?|iBRNznk3IEMcO^viGKY+JY(_v6%MIimNkG=!cEBdOdh z2Sr+ijDkXN6Vwx)jv*Fo`kX4HZiV4$BRst`{sP>bjoeYw;gL$MtC$X(I2)Q>YwWd< zLMtkt3RzY{MMjn&Ow}+q!?ieuEhvmCsAzAF_sQE?UD}`|(O$J4*v6gwY^1J##d8&1 zwA*=dh%@R4UO-l7y^g#Kv0ZDh=q~h_CX5fuJCbM2#95OFEoazkg|Gky|>ay~e@tJhyTlal`Bs@)pB2FJoRc z<}QfLEGod7{L+CTsAcG?yCSm^klQ0O8ij2kv~^efHwWbSTG7vI0mHuaFB3Z5r?8dI z0Lkma(!;t_zW28i&oOh|C5v8`JxFj+$YwZU2>W8lC<(%!V_p-=QgVux|lmDvs7iP_wIh#(% zkh(xWNcl*`$(qveZClf3=io}7LpoJnygR@8u$C`cjA`UW+g6DV5%N}1zv}wK%PO}g zRi)U&h@lX$UO|z;>az~UQ`#nO*1NA7Te>$Bjm>$O=O9<%cyPAS z%@_yc?*7%LtBm1A7oGyFy7M+;)T7Tjs!%von){!zwa$Gz#kZ4rwvaFy$pr zDPX-=Ls&b6JC6jd| zXr%(^!9}>7tuiysWKCuT>*ccMJPu%SWMO^QC2VK)m&BTVq?%{Cg{BIfQtp%lEvI>4 zhd>6gf{<2KooF}-=Wso2Mg6O>3_SI==Yy>^-ZhfdQ|jknt0o7y;!*=( zs^3_BIhKxL*yNnK&)_F1E#W`s-X`36;cQSiyq>FbMQn_rucT7ihBqO^R;l36NA#JOaVJeSl|E+WU2U0NB2Rk`3JKQYb_xy8$lnuWK9bP$|DjVwP_QR3VrEcDxT)#aQ&(*m7bba^v z=6>&1LpYTj1_x8HU1tln{Km&;S{vp0c%^{_$UI~g#f637X!0ypUuy|7xW)*_p=#~F ztOE$cg>{0bBBu-TPM+b;A`Z`;)=MHU8gwocu6|ZYkWQr%SPTloSak^x`b-{5<9Pvy zIG=;U{FjR0__Fj{ip<(-HU84~qmkW98sTRs>pihKYy*CsE+<`z-t$gm-{4-zn{r@q zHd$Nx9BW)VI?p%^#yaC~#pBQb;Ry8>7_-j0ssvLAc@h=(D z#b4qx9L!gOFfy#nWXa&|tY%G7=J~K)Es{a!A=f&FnU6wX{txM}@NOTADsJMw7M;Xb zc3wSiE!4$=-OEBl@cPg7R}JmxPXp!a-ph7b!);ZXr15jDTKF47a0-Ego_4!&!ipnL z_{(>-%?_Zi%Q;w_rkVzE{wu&u7LyhTD>h9%vi_^bg`U?NkzyZ?DY>T#e3Jo49-t6} z*%F~E_9TI7`SOnbrMZMug1LJ2mDH!5Is>w~Z24R`D<8649Ar2{MYZJ&I!O$$SYmM$ z*9_cQ(bAro2HzlWxTo$UCSSVr;Co?R=@TKZ^5@kA#w|58#zhT1PJ1qo8M#&9uYYD6 zlim4EsT7{up5c?`DQ%M;R{9Fhhldk$#ZREL+_By1bwzA|SRMG)B`JN{9pA#xlj6ln zn$ng}+7_xOFuuzUsLe0sNZnYp%8ot}5SI^427{DF5_P2CuJ+U>`eOZl#qx!MU^xKA zCUgeYdAUD%_;ULAaQ37z5`?tV?9XU8#=syXv@KiXi{`xMzhSg9wVG-Aw6NF4Q}=Zw zE>qtMzt+jeg@l6{&Qk6bivRp}KE`)1SWNL$V$k5}1ilX4dco5l?1gUR-H8t80Txp8Tk^#G1h|K&*2~A$T4mj0RbJCp z3)Anf70i`^q@LcZcW0-+9j^X3Q69Te>Fz$R?>}wGgMzgQJM^x{T>6(ga6pVPu(E?0SvWMlHQC*C?>nuk9C7q(kltF_i_ zmH8aD!gnCYC)0`k*T79K(@63HSG9%$#Wj86KKm8=yc{Y_BqyXd5@E&OocDeVWA<(VA_J4 zU@lnCmQOev$JCKQd}^SujRV2e+p2CUiCz_JPP=KI1G-(7m@xPH{fY7}lyYO|Pa&6z zO{{x08aM__sJoN|O|WPOKKR1?=_4J?)%h`s^4=F7*VE4!VaKo8u5+rtoozi_Y9z*; zEbHVacf{Z2tBuI1ZVUdp1!Cv&H4cJ4@qopz>*BSfhQ@Ck&6#|D5}fR~a}e2!{h?a~ z$GZQzw>Y%5FYTlcGx8jAb+RCO{zs15rxc{NRX~n}h3KT*%WaMLK0)5%*I}%AT+ZLt zXiLVSWIbV>b3WdVrbA(+?#(`G-DG`t$J(i}6_SZ5_1QVDj#*uX7AW4a8T#4H(fJk2hC2;vV z6$+Tv_7(mD#6n@!AZUO5mmO`TG|U<4+Od&)`DFs%KVA0Q_{&$|p5SI7Z=BM~XExNP zqeZ|`a9PDx0qeRka?fPOD`k2VKI^tekhg3J7gf2sS!=5D$t%vA3xU7(A!x*L?X@sU z;v$;@8WA^vk(QDa>~7zk3vIV=PbAS-??iq@fzRAZMRv6!xehU4B-@I{XHv5$5eImg zC*Fx%EB4Q51cF@u;Wf@Qc=m$%_u3MN(kIwGD!Eq*SO4jN`R$HT;?<4DzZ>(ghJy&PGLhkDeaKNchR+uKaH z$>jSHb14W0k=2%2%k{=TY!R~q#~vFjRc19aU)bdT@~v=W&F6e^G00H@v%M<38{S$V z62=Y233`Ia>0Xs6a52{MNYC9ojN_b*9gwu}^ujcG3_i@C$>faXFwQJm-Mu)>e>DWM zY$4@B+fL1xoyzR^0Hg4!ap^&!J?UF7PA0Qz)SpxEz z$-u3b7 z(SewP;1a8mN)enLKJ8_P8158LN8+N;hH_hc;||#$8iK)J^Oq=0tcco16T)ZFrFF~{ zFUEG#c5va+PH2)?=G0Iq*F~Kc19JOlFoyM1}9o+tbO6ssM zCR+)q)E2?iR;VZ3)Q(2koRD8C<0569L**O>NoWomE@e?**9BBwL^@X>1#A^(!G5+y z9e^G|UP0>MxdBsa54_w%jsnW=QY$PbAS>EC+HWr3>WrzOXQW}0^o6D2U&pm};=)9S zvu!l@Juu(`(psM0)f3WlNy4ZXB*YP_-tC{?d_KMYbfTfcTUFr7=Y03+M2Q@xx`%gm zz0!{*$jjTeF)5$P!R;8uZCTqoYlik}p*wYgIm`tSZACBQHgttV8+%pKTY6o zI>6-)^9}eCSuhI*tObAU_@zg;gIT?G4w3`<)3BUYl9k!+cQIeF;NwdTvaqn6d+YWZ zFX!5BvFISz?I!_$cUX2V?K`K__}uA56F@pmbIjy7`T^ucJCTDelR$9+CBmsp$?%$Z zjelDFKjqa4Q>fpVH4aKI1w(ltEiX#eQu?vS?9-M^$%N*KX*)z%pEF{p45OuX_!o!? zdX6(>XVHp7-x{4W#+kLF?_xRUNAWhT5Gvl31M0S8kmF~*+rPd1aB=Z|yLs#7gYAeW zcQomp_ymw^8%0VgOD&8hWK3L3oUmp9A2U-Q2B9Lw#6$6MO=!zZuKApmk~-%(zGWi0 zS=C6187`ccR6>5`vz4JfhR*NSm*tn{(%CMoyajyGDmnn;As*mY5Uiggog)jM{7)mk zBvDtP;sBD4mf*<1NbV74%q=6Ti9k>)woMRqt|?0cEVa2&Cd3B0&fWk1{= zpJ`N1a%DA}hk}**oM9hwt@%>m8~k;*jS=nD4mrI=ZH5K+ zx4c+`kWpvmn2HqB?YX;D8P+>l(Qw>Dmq1_*qwy`gmf!RTAk!Q3pmf4ctSSmKRy{R+ zY;9<%S2NnCy94QAqEG6W!e7Ni2#a)fADNt;!#Q+Fl<<<|sd-iWCHc=f%=(#ZRTpWx z6@y23t90f-VZJ3UGmKTFSX-HnEhhHWz;hdav4oS)9APYUF5A^sE?HG&KL>^RuC_6x z!+B$$7hjGZX!){DE&z@1I_x!=TRb;hwdiWR&^XOlm8`#In%tGb>G@nGy^eE@%<8o1 zs35P2ZPKgchj(9*&X61on)}pNCWjuNRbpbk6$uLXT3|aOss9Fut;0clyMhv-L5M z>xD$S5-Jwdq!nk(?XL%;=>`_-A}@V$eMnJgs%VS-j~0d3NL_#Ve5JaT?VEMdvCM*1 zMIy~sWFf{w#5u9`NO8+=k-H(VC9~mxKw;rjm@hau@}sh2DI?|!x`DzH5QhFcV|~y6 z>bwzgmLHMO?)~32R_q6S6xi7Aio32&wCHcT&QjYNMZM1N+D#F%pPS<~&Y~Q*~igM&6 z#gZCXk18Mgrq(nL?l`-Exlw-Rpr@+LG+qwxG>QWA$&pLX+D^le_B?S@;>>NK3>*{A zNF2GmE7N@IGoV9CFpW*I=8p0-f6O-n`ei7Fx^`q z)gZ3Xf?TXbzgIs~8Dj3^I!i|~M!@3Qg0}IMp(T(w5L2+IKW9BG`&Nc-U>ET9mifb6 z0;aRAFIwd$ib3}?wLu#Z4dJQ&TUZ?Y4Lxx31(7WsnA1Bx9JMY_ zwVeb|vz=+LEnjbMt+SQ-UnP10(+EqVz?Hk0Tr6zn9AL_$O|tf3rqhpg%_1m zMBrFOzH6AT0=@;l9qtjd1%}AZ1%iXfZHWa~;e)aIf-=I&fT_WqnWL#F*aFvzV|J~s zL2a_Kh`qvd3!_uBBaZ1aoNpbEP&4P?uOWEuMcKY(E-IiY{KXR+B+@PEw1wBbGO6!2 z_oJ(i0q@51tfde%umC|}{YGq(VVT&CFY^HFB9SsFsDZo)BPfRVo|_zYg)1maY~97W zqC%5x%x}RE6@hu9Wg7!kf;>r+0JL}|_}iII6CgT5 zgv4o!(0M1mBel1x)|40DW-hEcJ1xHO_2R|r7SZpqR!=YrH#$2g%;0i;m2ZQam(Mx* zRHaO!k9>4#^3G;=lv8Ew8dqz#*F$EF$XUeO!KaHw_q{$9&rjX zl-w0LQ^IDXhUU58-jm06AgAKH3z`G6{wW7 z<{OUu&$W`MJ+xRDqyF56^QLzPWx`YnYd=xlo9}B4hj-!ZFygPp9CQmlb41tp0s3b;Kw`4)E*2Wy`rLEhRXj5{I=f?&|C zPz)~F7-x`5W)%3V20MaaaJxn04piyP zvOPd9^q63poAmv}+uT*RZbZP)nP=u}DGOgZx5AN zCFISLWTJ$!fy)@Kc^l>n|8*0aV;90RkX8%qp&G4tsLRUE&My%P%_zYcGS#yyC>)P2 zb7RMb&lwttmYK{_9xTWd0CqYY#ogv#}1rY-7^wo@tOJ9iST+OnC=(1IPM=FE$q@mj>%4|4Jqmp11NqQdKd@H3fj9G%`CT#X@1I zvG-#u)_jh|tCT`5ra8~Rp`YdN#d|C<(IiKH+dpF~QM0|l*XbNO?auiZanOZUhZdDPe?Tv=2fVIwo zRxwbFRr6oqdw+1Ml5j{76oZl=F%X6-HILeBB(wv0CJ4nrx=Y;lHdY z4_L>69Pcp*TL++llLsYh%6WGzsvGN0IHmiMCefYfawg*r`vRKY!c_x&&JZrR=WK4K zAxBNr5j-*5(0N?MS1+5j)@68Eon>fe_5J+9Lk(V>E8@)00j%25oofh-bxYhEzj}UR zU>RGfI17H{aXcUW$$r};NlHv@QE3cZ{e|z&ES=9Elc1h_UO|Edep7x&9ZfcFc9#Q7 zE{I?Dc6=9Y4_6LP)T#48^bGUTE1Nng(AE5>RGAf;GIm@jI)?O#Ro3B+!dsn@rOKD9 z>%AMLNq;$1XA(EZb3Vzb*JZ}iZYj1fB27bt171chCpZORwQV9y0ibCkpX~J9XdBqL zMw}%p8{)ab1Kg5pW1H|0zH6`;?52iD;2Zok)(hVS+|R2&pVCb7>Fm4Ih<=MI6II;| zgAiq~ZC{$9jCuV!QwD8Qa^+Lm)m%|;?VtnoMX2=P9r9DfU`y~~ZzIgT({fb5?`?nk zc=gAg);Lg4X8BYzZ6qnEXuscsfUM)iUk@KV zTn?<^Ss@4>*Xr4B(9x;J=+D92#gc>(_t*X3fxlJiXkx2|15ngW=Q(?&q>kj8$(jP2 z1)F#4U-MeJ;=F~1IYt7g!SRHW$=U)#$yFfxOLp=Y%x4(1)^*ZgpDpLtPu6KUzeK|! zyu^YV{r!}P2aTZCF4I(hdif-eCk>p&M1;AM_TM@=xjv6HVJ(Bgtslr9NoHo{ZaIO9UK~^^38|$Y zW~Mx&FxRS87%r8pWp?b?ZYqWZz8>nGrB2b8kBl^Sz7xs)wh_I(iu1XcuQX4xO^2DrDvbXFlu{u^2G4Z(&>-64+Ub}uWVpc{9@GwcbRwIOC4 z7nSyKDwo!Gs7wQZ+jjyEX5Auj@R*&c0E*|fpIe!%5s<~smn6~DZr?NvtBR!Lc^OZt2*2$o~;?$BOArECeX z1i}1vUK3s{vMfNTlG(P&M~bpY>)@>%xwFMzoy=r|b?{dQfVoX??c#0(H0X;>OZ4Qg zHu%ytYyBh3CHM>iN@)h&kN&F_34JH$KcCP3)Ob4mA+q2O_E`-O{)^6eg}=kh;BIv% z+Tk~HesobC78FKCG~Yz8givxd->W&~A7>Z8A07YY{qbMliSw$Hl_E!aUkSv=`f3Q4 z{9?z-S7}*}H7hMGm0Oa)y>!e0Z5AZA8~k^;~FS9WGT zDMxfqX^FeXCQncI08M z_R;Df&A3#8)|YQr_b-=sFIQL37Z>GW&YD5GNrSH%?P@q3%iSyuKw@Ohc8>g}))giN z)3izKReQplST!>BL3m%ktu8a{yd@@E@x+p>%B44Qm47w)(m7AlTU+L>VbTrbml|8X z+ZkMyah;?o_c!ZV;Vg6)rmfZa*&bt|?a0$>&k2s}#ExriQ#yD!sh-s>VYO^=FKcln z(ZIl5Znd+gGzoP}Sevu#AQ8F~+cvRIf$zYvRFn0Lgs2{rA7)rd?x&su(Y+w)i!^FM zYS$3qTh5m6qFpDo)|##T#!{z)v&45n&9c1#b?4L)x@{(npw+o+$@qfEV9#42D+M&U zF%8P%L8Z5)x}Wy%hxboRM1=S9)73^9(eJmHpKh)`-`?!s-ySF)Clyo_n7Sl9RN5Ng zI{QdFROP(%>Z01gzXYmQ?b7B!{ct+F$y`_v838O$GESjp$xvH;X_WX;wM9z# zkf7;bV}QlT^1MZ0wXhM3DVYDtS$8HzY#KhaQ1p)(E_$G`-anAD!LkFvrDqKBNtn@A z8q)gq#I8{hJvIEi{jj=vd%FGo^yII5tG}IGy;0u``5#P0W*mZ2@|EgM<7N+*D zSM|`6;S3LDc4@0llJfwILA{>Ujz`NOkg#lAPrWYK;M6v099edU4FC}=hSidTf!~`| z0@?O4NvxWga(vf(tAP@W)IOi87s%6Y{B?WG8Gq-e@$0kgbH)o@o#9K#NY(7H;4?33 zJoKu(DClSDBB^drcxSs-KdbASUN7Q3l1oHeROum78+0TLm?=K};wAL6mm z4xF@cbC|f9_MJ%xWY6DfshPz;-mqGRK52(wdNMGP_l9^PP~L*1wPrUSu&!Fdne1)+ z%q`|i#Uhy=_@y~!9O)f8X7n;iqQ(Z|TAJYj6#j8>qw($X>gD263|D<72mjUaI)lCC zo15U2(-#%()zYrAt7Wq4$vd<e9`1Br+bE8dJHxaw~Vlb zp~;|TpS$%>Gif#4mmOGLOr6VD)?jX4Bm(#K*b>6#cg@;t);uG8lk+>Df#C^{>$@wt z^esQ7bm~CLxO!zxV4qBDIek6cdIX4PO~{iZORDcMXig1&_$%a;lBUeNqo< zkFL+vjx0dsB-ekFNrhLLtLs_<(ArPVGqxhXf-EWGJFsKow)mn=mXodwjOtI}uPU3G zS$j$&5hXix$vEArCE%CS3y)exupWyZ4iC8`x9n%T2sjVN_luY!yon99g!*$G39HRk zCAz8@3VA7?hFkiKq3gC}-RVd)41UXbTWYErDLA8*Od-Iux6Zuk{HWwPhmFqlaW*N9 ztwGG>N0DpP{V|f4Rb;lh(bR zZCLQ-`khc%Ug$eD$PzoASulM$V8KYG<`NDu2cCjrio=Tkszr0UG?xro5a`QM9x7s~ z{x6b3R96xQ%$4X}$jR(akm&L@XN`ukfvHdkN1hpXBX1I=lDj20_~UtHh&A%^XZh%Y z!l)+WuWrm%dKm*|>+oNJa(+IY!|B+(y4^dz`t)i0{{80Nhm+ra|NL)%U%&aR+%~&| z$h5&X6{(b1*;j+*mMCq%eRAhK2gVH7q-{j7cdKW0c3`qorUWPnj>F^F=L}$k#6*)H zN&j_QCMf4DBXFAA`F!}0UA=plYNO;H&+!gWSY`>)9B+NF32dCnc!Q^UpTuFAv6`hL zZvngpWbqygY6tHIo#D=$zkS7a8pD5~u4Q#ohc*2=Y*(@!uuF&3pdykd=0vRaP8UqK z=-Eutem&>5c5FMH%d0FPHvWRJ_y$V@bbJA%#{WFF^D$*FnFz=)4Oys;zLGnx5|fyg z2qr0_s|?s{Zx-rIis+RwCU)8Q5}~t{&G-wJwU;{`Hl|FcvWL_CQY97E z?PQ50!>Yg?7EEE9CXdU*(-?%9Y~r)x%M;sO?y!!Wa&L~({gsqd zRJ$r_0T#`ME-Kr&ScV;RZEcc)R@9>&(2Ragb)se!3CZKA3360uSIW`@Co4^m zP*KuU`BeFC`4w}A;V-3Cs;`|jyqr{7N4P=&x(X$YXpdF}L18cN;>>4{=rni)dMt)V zd$WVfym39TSS9v4`#&lsFXHq7VKQChm0A8OXM4WRZ;tLrp}Om z3KNsI6jAMpUtAeQ#L_=O=vD}Jt-5jcQ47sp@v9IgD)G!jMZFicMsGJ(f_I+VC1u1M0bllOoB6imD)D!j53t?dI*!@+ zE9Y1-?m2G4{cud*+l0K$2X#v$o;#ze-Od&5CGE=9Jz!!XFZ?$VUIljDnm!FWG?~q0 zlkqsVdU|dyA{ZF zbS${&b52-Ac`ayFnN>E@8X&bz1WL99)vR}M7J=pmKMm%rl ztOJ;D;o~R)PU@hfOvu~NseR)wpJ;&LdWQ;XU=ID#l7@X7Su4d+?zS<3ieo2zw>dm&*qY!S2sX z`IhgMXZ63gU;h7#hyT5OR0{NnChK^ZJHHNVe@5YuUwfYYG_tm%bfYGfDg;%$@U*|i zz4>x<@p8P?DE_MjvQ)J$bMALcN z!R!2FvI2AxOGM=ps;{gWv9g_6g+cd|s~d9}pd0a7kiE=+)omptWSx4Qc0Q)q_hhf~ z(`uW~1ChQQ%(%owKL!y27gh00c*|&y)*-xczc_rG z`i%v?&dIS3fgMOe*{wbM#)I{<*43SewkY{ZiCE-fU4GtP?bZ5mWBpUVuk$+%r4Va7 zXE7nILbS(pjb?=ciJM^57@A?2+a2I9IajsjZ)a7J#}#)mOg6Z@{hp%WA73|0yZWX9 z!p_@9Qa7iX_lZ+09h&i|h_K6=1l;g+ZwH#o@`nhIo0m%Ti}va{&Kp--1}&GMdZP6| z_LO0D{`XIc{0bB#K6=y=y%Y$@OH&mA;IC${1%%9j^w4-Yy#IEj=)YkR+2LLHY+v58 zjlE=Q`Nbg@B^~oI3rAx{aURVX2_0u1hB^Rb2-_K)?e&&>b`X|O?4jGXXh1f_G{cC^ zbBh<}Nc`39wf2e()%iIYRt~PYF?DJokGw)6gIuhT<uRfg}j0lyFIml*XkB5Zkp^kvck&B_0@mLlHy-8mDVTx7yN=#&w>mWW(yF5l>t7d zFY3<$(D_r%<*G~MfLDOnCy8$yl*B(=92rk_d}4v%uRCioB+t3?uhMlS{^X=OJEggP z!p8hMBMmmIQ?6H|kYC#QNGiXT6UN}XdHj6-w6}Ub*f0`d+)Vgu;u$B^j1ddIRGo0nyc;Ray z*L*T>D^d)OXKpjAB@sH|%c+`Wt-hX|Yp7IqV)`*W@;U$AiApCyt3B#gfS}E(svQ|CF&hTXkSgAiMR$H0a#$R2RTe2QgO^5wy zKPXB{T@vA7pC1GVf3@`jmtpkvbI!l&>+;6iE&q9Q{c&~s{`BJagR}p5xBbUwg?+8b zJ>fM~JbB8al2iu^sT@RAp0d1M>yk9d5S7q=S2%N?^M6^QZ9DMqYEDtVC=t%NHLg?%O0 z5|9Xst;Auw+GiMp2V+L05XI&(&qFL%I#ygEh>Z=#Zdm4A{mL|Crj#g~|f2&#GoX6@)kzB^eHk9kBB`7hw)b(hhhK4H|UYQTFEy|Q;? zcBc>U%}&+m#l37$=DFn|SL`^hfO2S4Emg7j=<3#)Q)n-CyF`8xaBIaPwn6IhQ`OoT z3*VV7jo$Z}xeB&NM;mR5^fBz@9+Jl^*xkNA-Ml*if)&bCev!$D0Nv`v$5+Pyx*vNH zs~QG4W1Qr@GCgJ#!)B&3_h}{f@4#O-rGy1z+3w=6RzC9LM+V%Y&y_VDVNWDCn5+L5 zTsGs5qOda2K(Xi~89(KSmY+Q1+q{kvH=M11LZ+6*L6bqLzt^A7uRb1Myg$_QIr`ft zwb8!alZ(=+v*g;Lb=hL6jycb4x3~pEEyD7h?#a4VCky3XQu;9S+<0FCqrzk@}Mso zMtku5cg{veS(viK4Hvh58j=X?;=!K%*nHl)W!4xD4O76|Td${vnCKJDmO|Cl!^64M zj`3F@C@2I^(M1Kjr%x0Jds{h>?)PF!OPfJqF1az$hs;n2hFx2y)w!tFzSP`cR#=b?ULK>bqY$vAIC?ww7}im}tl+QSSDVdo zga(p?M?#0BSUVc4_h?^){H|JiNNln3w~@3kG!PSCH+k0+zH6QXM)C{;**m%j?GfD9 zGDcd=NQqt=y^bN2!L_tty0-pl!Cy{W>@pm)Z9-WW3Y9wNALsV3Y!Rsm_>yStwK;ry zot6eJ^Uw^zd4|PnI{wRgh{D`rzH_Vxur7~~eb?o~Z?ArC-dw4WF?z4CzC2yX^0Y0x zx^yfTl|t?q-?HNmm3_*}?3{V$ySB_3!s4WzP(x~ewdyRa(Dtt`Kdjep&cuT05iMV< z$c0WT7oSU$jvl`pKWVVqOLm^MRdp@+s~D38qADOu)}h!!%ZZ0=^@@ z6aEX%lF!_Af5y$OT9S~K*Sfnw>f~emOB4owL3V|w8+--93;q%*cFS=!155=yGfnFK zc7H2_M3%|voBgx52P@eppV!i&dIfssdLwgg0JBBg=@r8+EfHvJNrwbkMzsS3@q#WQ zS&4IUuBOehOMMgEm#3u+D$Z6iZfHxf7#I!yVu80d0Mq%FO)`nI8?#itJ?F^}3I6_C zjc$YO@Q#jEf>F%}qOOXLc^s;d=BBtLUgG3JJHb!wT&Da$W>LXq>UMSW1De@~hvGMG zj5Gd{`9M{#@LdDuOLjDPtsBz>h6^`CPuHyOd8PA6!27fou>diF=8`x*(M@tc#cCs) z+ylNKbW5L#J9G-P-o+rTh$^<3LU>lBXvkO80mEiEKT7rB0rPgrC0SQbTXL^f7(tv-+QxIkg>R}c)j_Rf^^te(u*TitgYft9=&Q!_$+xHDmxq&=FUNAvN|s;bcan5tLdWf+ zQ;{l}=;}K8knuJ*!?o|XI4NmY;iv6+Ru7FoS@Tz`{dy$e+c;Zk$yPG4!|_7vyK+Rk z)VI>8Dy2%QMu&m0c$?>aeWH~R(M!^4I;VnVB0EyWQtY_pSAU$J{QY?S`{~_h!I@Gu z9&CnHs=xRBXNM)YF8r5I z>X-?y{i1ckUphQX>Mhr~x1KGY75a@C6@Squpl3x|E33*KGiu~n3AO}$;=HE4hxe~X z`d{e)AlMjJouactTgV_d2#Z}>^D|7DH-aa*kTqDU?h-5x5Zg@DC1D~g+hxkP^s=+usMOH{5|t<-h6 zaJ*`)aKyYS$4>ck8i=bqrDnJ|GI-tVY^s@?`?(opgRppaHQUS7^dju>Y1jH+m5U`7 zmeedGg~#H~D!*1^u8m#+F8C{8mDJnG%u2pNVf|N7sJ?SzzGNeFd|_)dQ{gXJ&~vnx zV`?pi)H^i(s?e#Dt2^V%fBxM3VEnoEs-_xsd~&^XR{>u|hXujuco{;iWAqRtFA}sn zagBY4R}{S4k%*ck_Mj3Q>NT~Y$qP@E{L0Eg`Yc3UWd;g^AdQ9te4YD57O@~z|DEvH zD^=-FK_0H=0?h;(&Kip9>tVHw}CoGtY z=N#OxPb#9is5>&F#IQV=mxBilL9J?mg}=2-lGY1WQIcD8Jl-EYe>r;5cszP}I{qg7eLVdp{B?J! zNk5chr|>t0ftx>7bWhzqSP7~Q{B2-$HnO2`_2<|1&+n^$emnp9`@o{_iSk&4dbtmrP0Or*Rud;9f{6m%}T zh+`^W+MeNdWh}3CoO9U;hIS@^RZ7j(!dCO*fbVd6mrjWCsSn2i#VgZT17|WG;O3<9 zpQLAFXJ;u&TSGrYLqS{p3-ARN!A;}b=Nv=AXXKm5!shmnsVrA+sk1^-5!r$bql67TWK@Dg~+dTOOyt> z#SEyC*ErkzX03Vf7c85D!sXd$53v4h%%TU& zk`V;{+F6}m%_DCLPbnzF)<9g2xC$IAo-4n%s%6zvOOGY~YFlp1%tlNf94I%ZmV>vV zpS@y{eCBm;%+GK}sT2jyn02~tE-iJj1Bmq-3xAi$qh5rjPz*@Hy_|<50o!+i6o9sx zpK|P=74P7Q^;eLllmnfIGT{>`F1ekx zK*Pm{Gi?#a}poi~_0b zTsOqfOz#MUxihjXXSD`ETqqpG1hs{{9^u+E zUxxG2HCkIO?ts_27Kv!6lvhKrnZPPgX8K9 zsUhT5eYZMsd8A`AxD2e4ko|{h8Kso<~ zaU&hO5@AzWBag*CfwFts5v@PPn)R;r=<=?s=cnwhc_4#m6}(BMqbj~^UnNmL;bUHt zJome42J%toiTaF&TPGF<`-#AietoWj{jyIu4G=dw^`X224CkhxTKEOH?Fx8 z{^Eruc2hZ2I{%VK;rNaTrU%t|Fap2etn^fAUz}Yh=N}8{M90 zzUHOv75*kmO7|cLu4r=BU!bEVC`{&_Al2v=U~NQp zKv^9~Sio1?`SWql`VeNy6h+Gw9woZHfOX?zp>uv6$gbOyZ7_gu?9AWA&eQ}JtfS=| zMwcmjcCAWYxbX5tdmB7<7<6JQElOs@M0Os})m1|d1b1-de(*E!c81;?f3_PuilRWd zxDP*E+t$z4u6q6phbpZ?s)^@S$cxpZTrS!|c(t7)Zk}u+&L=m@JG%a~)spcV+7Sho ze};@!P#b;HwvB5UH0_mNfovA4#-FTGqwy!Tw`5^eE>?~i2Y83|e{1Wd6B>Er_)=x@ z^vU=axazYp4!ONi4htq(Hw{ z+HdB$Xy(d%PRBC?v;Z`+6C<|37m4n(g@l%k9n3}6cA~L<*E9d+3@zhGYqnU1)7kJ{ zv+WsT=;40QB&@c{KKMIk;Hw?-oXNA&R)2Q2ue&sZ&W*p0z&7(WR^A?a^xX4KnKX2@ zV$FqY@4e95iZz9`#G%LZUqNuFZdD?i=k}6G-U(h0POY6n_U>t;uj3J7+y5QmVC~>} zHvWnSjfYkmhK@P~#Ca)+aDl&#1;HMtWOpcfsYfrj({?b@2*|tv%G-Mnkr) z8Am~IVXhU+xq3w3y2y7;8kM8Brr5M}KKsVkX+)!ay2T1bVqkWMI-Lh z7VlC;3(H1ZCUwgFldbUM#D=9dZ+&fq^Euw&VzuG6U13yZ!a-hK+z+PMS*uql|Kp;% zW_f3f?xO_|kFBV#Xm54N*1fsX$Kb0ue6zRG5dNy_O_pS5xm(|Yu#yX>O3qw3a%zWX ze65)$$nV6L39~l-O2q8=JxQ7EiO&{LGYtfx8e#47C9@iD^L$-!5M+eUY8!B`GY>CT zp=U5!@1ja?(gI7JuUsyqky0kkiAS1m=@aeXg%*y_hvi=oM($kNsk}ylzW`|Nzqq8W z+`$eYykon=9O*mFvx*NNzYX?hOO4?bf zV0Yte|HCo zd;R`IT{X`B_I~sGXH^^B?Vn3a>!o)FV*(=hL(s0^dg$afu8n9_1Vmb89HH3RT^LdnlL_`-Z?*xk)C_b2WEffzr9cW zqm@QwUQum@ZYj3`vGdU^CZhMDF=0Aq#g32Ian9vH|V@aTR{{4 zjyL{#XR>&kDcIXK>)82o=@yu8A5f0C^75q%honict4s!l(=E=QOwnTDT?|1ce2@WIrZ&WQUco5^@Sb_SteC{Lf%?AvM0@ zwc9uoKdNncp&g2?wB*i)@|lw^$SU}_HEa5fEeT+t6LPEdVPHgQVb;$gBu5 zpM&h+FMLxsGxP??8}LowSYKSRz(-amY|gIZB}V|O1uAo1kISQUV{jQXk(J_U?9RRX zMul}ON{NI9whZW@qUPX3I`pmK9g>(J5p5NuGE0sf0yNHj?E|dUJ&>CAlP5!|z0kVJ z`B(S5D>eA)PktT#MzjYaub(i*r;Ny8DBHHWL&>w=3TM!NrJ4rqUA}d!+-8<<==BN@ z#o!+9_ih#XT>axjC@dDN5DDF(z0j&o75r@-G8%_%N*{ z=Af$G&Kea627#?d6;E)!&b_deP}`N6N>L_SQqw{EyM)AP7hY8!d(#FEnVYxs(GIY(+GP z@bX{YAoru$rAMd9;K_{785{egDszPe{W;dPw;*rhZ+(tLd*(Ebc&E-8WS`upJ#8i{ zE`iSWuZG#>yGS%W_x>yV%~A^{yy3j=i=`czcI9x+n!Gp1w;G@@zU%Ibv<(~M1lZH*l3!wInp;)50pqsJz_ZzOSr|bDip}vz`w-Vinof8u5>Er zWr4o~g>|{QZqk*9bnDEpUC!`x^{62qb?C6Ol*z)H@mDC^=~yn{&Hl350^ij(VP%yX zQRMrD=&+U)eBQd;Nke{V8yi|C;K&*BTEaj%9$qsvE*E?$uo8>p@|xzzVBv3xwyOx; zx@||+@~#UOldDNoh+`tEw_sw7@X{(hHw^mtv9+VG6bn1uYRx{pRhpTame5k|%UCVQ zi0s3fxk^CIYx~yH24aZ4KCfAD^WXY$+_O`DfR>0?K~iIJfH*uQ zu^4a}JOT~nz!80GE#xy;wb~`_jP6lemkdMaYso1jAu5R=-4KqC9VpL--(EVQSb+1K=K&(? zB>J#=(PzD2w6Tbx@{UA)T@?IbrPh=;2kJL?U2B~Of(0T#Fy@N4=jui2c9a^(H8PIv zT~c{&idvo9V<)$SR$i7la^tDQzcfRl$?{s;U6zdNB$HH>YGCogtfRreyLrny6~__B z7lPb;+HT*hPyX|0|KC1qd-XvSOq0;liOLkYKD1L$72V4=_^Z#Q&#~p&qN}U`$R_>- z|0~}@10!+$Eqo4^ZfS2T@b*IV0fo^m%kk|*(s{o~H{b28eJOli9_*knvj{tlUsS&< zXXA2(@-g)hPwA!CBmDFg^73c!*GnNVPuOZ!hpRdZt24F2f{$)AsraHi*u$&b@i_Oh z693JMs3dBqSjVTWQ|VvDXKP}+rdT72)Kh(yk)7W=y+>YC`L|QkByBwO63?@`aAZo9 zOtrT0`@-_Uf?|tbOHVbVpdPaJLECJJSn&xuv$nD4u^2V0hbBRv=yORKnhLE2{%R}Y z*=;uvJwspf*`jAhn@61-$-q`}FhDiv{a!x)T*YR&q6Y&gruO z{tA2t{)$11`suam<+57099&bpL&bDzdgg8$l7a@rO8G!8W?jl0<9NI!m3>k96-y6c zva0mo>=<4L3K(SLpPXk!+vK*KIlnAnrAqdMjMZH}TYH{sr6hHzKB;w@bXr}a*XN}> zrMIj!l*Je1tzCRyb>>+9;>bEo$Mj)^`}nAJZ~5VlUDlM&OcrpD!IAh3|9j6NJ|*x` zQxRnz?pCu`Q(cq)zZiyQGOWtdAm=JMxA5OOGpL=dAlUMlXYPk@3uhT z`K1fy?(&qpquEO~$EPNZiU!uHhDK^M2YIu0LsoE@4k)m``gnQ%``N{N#ey%@DOv?e zwMHRJB06>4xg}B#?Im+AbU)0ZsG^+uPy^~Z@O0>h`sA0KtCsWtuGsEc?X!%(#iNt` z&b3C_Z~WWbK#CR<3mg1bZ&Np;0uvSe-F~{f{jj?DjF%&yFkBowu4E3>YnT_FB)tM?BB)&_FOw zhRiNL(YFTk)sFrIk&%Ux zCM!D8BCi)l92u5@UK;r8^p8BZ$?3O~%r+*E;h6OY**42Ac#UJz6Cjp+hj+z%&s65=O3yLcpKOm);W8_vB=f3~mttPBAlUboOliDh z6)}#?g`9)^b)%INZ=bgw-ccddT0F}c_+HF(7|qT}Xmxa5~stU2pS6h}VaKAq{z?u(`K zKb@ZXLcUf=?r2|7X^QqLWGpD0z;rS04$k7ECTG>F6iF4c9d1dcocMO zISY9+^JQ;T^|WT7pd1@j6khLGyc@{pI9)KieX$2&@3Keamxn*n+7f#uO1G|8uQn%bXf5 z2*$V8+6Et5IlV)TZ9xrQpdGXp`KZ#v;#^c1ZGQiB{MSD|{*S+0zWb~zqyt zX+%aR#7wH*@|r?=BQ=*Yqe|xodC?@y+padcPm4&+cC%L#*a?3@*tqYi|D-hsko*R%-fRGBLg_E+54sVP{1jnSm4 z9U;ZUe9d;tkDW7szifN{V~cu=by`_TR2!YEw!I3D^z`%!bpIJcY1_2O$eJ}-DG9ko zlk;Pl8^e2_Uu_M*tk^|faCV94akkd9RH-azGL)5flB`v1!X-Hu^3ZPnP+zQzIT6b& za$><)DM-3xQ}QTzmrzeFAAhh$@UXFEUF#Uherbk8c(I>*Yxk5ySnow!?Y9o$TdtMh zD!=k;ay!v8`0MlvY}p=eZaf1;EI=46DzHd4PJy{5dQDT-P) z51&V8GB@yS)LV-u3hNgplA5ARrn*g6zY(k4?Yy&ghuBf0Q1`94@pi z8S&I&E}_-y$ky!xFS;*uAUK{kupWCX>P0B*2v@hd+BiA6GNVKR6!67|Y?rc-G@vXh zYV(YQsO;E{Q@9xhDiAE#eeKrvkpX3e23YICiPq#-oAxfl*X_J(AJd4u(~(0L!jkM9 zEz@K;OZ~?7`7rryEl}TddgUHy=^UsxYX+Tp-Z*aS3zo5izp>n~-ftM z0RLFUi1T)&Hbr3==oTzH z)x_4e_pf6dJna~-h2urE6_pO+(^*UWSBpIYeAY0T7G5=D|bpMiaV3B;6Kv*T@;sdA{aQ^$m*!c{@r)*E)S zcp1bVe6I8J#cU^i74?F*+0Nv+l(S>x<*sU41~`W9iu$sP0M*x+E~F@jJou~2%|CP8 zL+d(w%PZ)7eyPZHjj++OA!Bpu9?Hp}vfNmk#dFE?kIjR;loeFIddHUwTZ7%CK)Bh+ zlCT|;n`+n29|u}G@bE8h-u^#-TmNyyOnx<~^e3~_q{$OCoOg<%@(C^x{c7!K zw&=(SxSIt}zFe*XCiXewyU2T)zpH(Lg|@>O&^)nyn}Oc0^HcyjM(N~5;lDbvE+m~V zJ?}g0X7VO(J9c2Ehq|?7qoUjb2zV>OuEnH z>YL4YjPrtNH}TNHX5!cY>8vFb)-6fZYe5;cxK|#jW~|7CEF}f~)!@acO|hL?16U%! zN@9!nUF(e}Lt`ML@YinPHoef>rNgn$=V)@*J`Btnh~`gVYGRJ3=F9lIgngyM6A5}b zUT3f#agwo91kswu2kTdi^TTKHD(g4O?Em$bO1&7Ez{e>cNl#2aoB5H-a;Un4>F@gDxUPB@g>IJ=if4+LV-n`##KV4t$ z-QFC2xmAwzk=A8s7Oc)%3Ir#0T;i<9()jCIq`9uJDvYKwUaW4;gG2*x!yV4 z%B@%USgQlu!`#D|TlGf?2R9pC=G(yK#AjvlRXkTaaB5kAkYC+K(cwU__CbFTe8+Fo zHba0|CWMz1H2$(i$+Ti`Wgf}}t;>-E7K@84HM9X+J^_D0QXXtQ^Z!DA=Wq3kF#lD` zgycnMMr5>YFiqRZ(-*B*NanM$CUo%Rj?;dHz{bvyJCN6NzLGAeT}lFm>1~;>psgTP zayr;_4BFr?{n%^Gl%?ViCo>q$w)|1xFHu+U_u`AtmZ5AYCm|<2tovL!fl7H(Nu7fK z#^(bKRuAh;&`K|WrP*A8>TaooSPbdlq~76B=!pgh--qGL$J z{DS=a7yQ*F!Uzm}Wr&`9RXux&K(Sp{cDL5e75SWe?WPi^Vwe zN{bz^@PW5fi&&X^#F7s$&dLboCF^IX8!nwp=c|On=FIAf^Ct2&7>9LK{Lkn~9j~2h zt&a3r5WLdZe!9H)d_^u66_;ojv^{!&JS#G?R7RvWmD*;|oY|}NHlIz77ON4RSKDWG zex9p<$%RH9k@sf7;P2A#S*HyP_CgGL8qr#zrGf*sSpRgf{ryD9d-!kfPXE4l`Cf4! z<0+%(q@`w_{Ebj~=9zoywh;)9Z-1JNo-l%1{3Im{uO%8?UDv{=efafhjC2{bTDE4c z*{Ge6BWJzJP%J5EL(8+VuNB;sC#r0-w&-lj^%Y;=GoR1D7df(FuN9i&e|E}>5jKne zrVF%P%KKG$RLBc+90lebONEMpj{s{UCY;*st?09n1_-MBayRLQVEbMI!HNivrH5%# z=`gfL`NM;@^vDtq)ehGU;T7kVzV7l)g+B`@*66()LdBA5MoOdV@yA07MBO0y4?gXM z%Y-$m8OZwjr`nPze^?%sz4gQ1h1U5kSm!#}B=c9TXmg$skNMNwmq6{1ZV zNjH|?xZG!*GryV}03~&k=GuJsU&N+;=ZduEaZP6fom=DSDG}W+vfr{o*gM<6`c9LA<-?*;q5C0u&#F78)@QD@9Ph2xY5n&tChwHDvZQnwPrBMQ6nKe(8b9>oU(h+2j_wNegl3`bBsE zgj}lESVpRS1B*sR&zZtp@YLXF!mB#9g#s^JSf7KZyWcVj3x72<@%*pO;OYzMS`2@^ zkT)wX&HMVIzAgk;bXXwnsC2R5%x8ir?XkgiGbBP!Al6H~?y+E-x4O8V=gfAf4(*gL zx(uG_%}%U=_s-c$euKXt?~K1fKq9$$;|s&(z4Ov_JT4u+m=A|}HiEgqUr@NW7L%zL zM@x;dSg@NM7^hSTH!6b5ap1gSdbm?PtkPNA`vLhv}h)4;ck4_U~l!y0_;A69Me z7+MEVs<(ZV_`g4W_^fPa z#cFGUTAt55z1Erkordo-FBMGs1fR1BJ}f&~qi8%4 zYUS0y7g{6PZ7bKbzNIddS{-<+K=223#8OjAEfDO48Bm_7B7?*u9rHk8192^R&%*Z* zVKsL4YDXpS>3!#=i`ItSjF+d477Jc{UnOhRqoa*^sg$zkcknk>il2<;>chShuC{-u zx7EjkfBR$g_ru!{o2UJIh5z(Cb=x|)AJh&#ES%zL^WH1=>PaGnqv@eHr3b}KMzTuu zI={)MpX&kXbf)^c1!wbroz zs~7s(D&F}PnF-!1HmV7!dJPRASV1%m!)TV`owf$PgfIr$vg0^!MPRd@0zEhIB;(n3 z7%rLTIV_>uUY#)X*12BAg6EwuVqvbPak1U<+oZ%;u4(m)R`V`xPp&lpT&&On->l6v zFjNvDnlERR3Pt6Hn&Y`rGFA=@YDUO>3-aPu)K-Hxs~04cl$T~qE#C6(J9TdXV3d3k za8_odcJOtmtE3W#Y9#YB{~9>Uj(bt!L#7+X>`$IOMdV=ciJ0;jNz?K&!E)IC>R}#8 zH=ud!MD%=~!r!clA{ccHH)TQKZ$a?(%Rt`hbG7v%;0yurKd)XyU_)aa0>%-&6VEL) zPp_Wr1KF@?ZQbxH{P)M^_UFa@PlcP+Yw`H%{`2bU-Ld#DIYQtdGK&@k%Z5JmPTnkU zTz}d`pFTl1)_>c9W?9Y*DU^%`XiZ)iqysolp=bJIykd)9hj%o08ta_KYdvzJbWEtF zcy|`ESMv(uH4URbtcQ=_LH39sbN0ytYEEzZ~mc`wWF()-Pw7^!PkG{+3sab#X)BIZ2#o)6(x zWwcM{%+(7E>gu_QV&Ax+z5c?Ido$*w{>z+^c2#dEL9m-rsyp=FB^|M}G_dloaFUD1 znaZ8Y1FUtCyc(~YTp5ndh}#h@uaQcPjK6rHwNu8q9LOq#=mEA$10>PH2kVR7l=3_O zl|In{_|Vc~&JUa$mPVC#RI5mA-|Zj$=bPidzEj%smrpmEfiFkTUk}}Xa^Np|7EIxf;t-++R4AdBaKg+$jgkscPF>Xr-H_6!f3Y0{`6yMe~9Hm$+;Yu`6e877W6bask&8h?$srC|cT=$WFi ztlnE|`_3!*E>#}FXnnnqI>#4=CZeiZ;B?P$pB$KPORJghKx7G^u$XPkV<$>!konAh zz&FbqfxpVFa+67%#(1_(kb!2nbArE1`n2tYEX*|4JR-2GtqFCOOd2~ngFWFQziL*g zo*QRgJ>hom?B?@{5}s88EX!&r`A9)HFE&-ZWy@?yM|&C>@h=Uj2-WfFFeLSExbvEV5NiiJP!AM!g8tyECr)FVQjnCv3a5^AlZTKTuR zbEQ>(JHG63{c@ySYY%h?x&9m#k*~>n69W>UN9l(?6DV3AL36 zTSw~5eI9$ygufb^@Io17+Ni^Z>U=kE4p08$?f(Dw+xqYOcOOStI_hF(?Jd*z7sTbQ3)%cfBeAA+7?zr^O+^``vY%Cb03 z7>KziP!0;~TasU^?zSg&Z)Is0&MvcM7y~*dNP(ne31bI(H0IyPeHLoR^NgJ)6GcO> z*l&4XYjAk^0}Xjg41%fZ6$A^2LFD40#4N%t$!&n))?$KTyD;Cv+|)6-q$uf6Iu+lt zQ(6^I#CCW5-b?gZ2kU`zKLdZ`z`>L39M^%uX6V6F-ZjPp?`zJ%kS0m6&(*|e%SK-0 zYkAJ~c`S47hX|MgbAD~(si;n{DriDS6<>(9Z>{1LOvFG3+G?Q|4b@IQu3V)t!-70Ee4bTVY@1s8;*cpM&jY#}$8Y zWWF-*k9e_m$krfB3hyzzn6-DEMy0;c-OWQJmkw8A!8(j{j4uUurizZY$o0?pGJz0B1(&3E7 z9IH+$vCh?%eiy41KOe7rd~mH~Ih>bPA@D5jAHO$8&S*IBSIPv&ij`PhK4KMc2IZvb zNxEf+s(q>a4Fc*1+JuOuiiT;+!8XaxZ=dlu_9xc@&DpkSB<~{5Kw_QM>c5_z`)st& z+4c<2ymWYJT1&BwZKrrGwpL+1Go*6uoY`I@RbAu&jNg)-S&{n8dTyESTI0|j5g?06TG#oKShHq!2Z!?Ml4+HY?;E#1p|teSK` z#_-BIPmb@-X3N=TOa~L&_*`oJYVxyWSgutwD*hbgrMyOwO{8X?=iEulAeJ!xYWBT- zIrR8?c&Qt(8>($!K3CH-jU-$db_?cGWHcDA@T-HZ7kHuJn}RS6kW{{i98)o3hl#T; z11+7&aBdW?>@2!K>x=w%qQHcMQ_|ZB42{3`SCe_A8Sb>_6e7)?G#Ve>5v$&N$Zzuk zON!o|{~aq8~Lw9Sm+Hy@4#!EP@pmeIZlKS{ix$<77KKnmVnY34{V5`?|lh&Dxb zile2dzj`dYbJW+ft%h&I%kJ3piXq*IFv+iUuLp?6TRzvjF8u9`Yv(@?=|)kHZFdVv z;qCc49Ys>t9Diotxg{NFC_iT=xqdZhh;uuNh+pTr=F0NQZ-kbDziqt-Uj$Nur&u*S zRqPo~$WyIfH}S+B$wKAE&(Bi=ZCTi>qA=5Ho~j4bsCZmwDw-XSV2u_9EBP{0Zm*Q% zu*km33Dp``-Y}2Kw2b{c+1}_b>~oNJ#@~4+j2EGufxofP7W}RAv|>T+Si{si0+)H7 z^bvB6Qbo;F{8%k9&4UyIKK+&_wP0C4u!wZk4MVZcEYy2(1gfA;eo$~ zU>E0BbrK|3`0G^0sUnGeywi=mKeF>Pi!!(&EvT)tu_BUXRvN}3(Hku$sPyL5?}z(; zef$1@{(b*1?>29aXN!v`wlfC`Rc-P1NapZ_(MTnaZr0}|v&l2BnK?;rOY{QkCDBt) z5AczQOu?Vb&SYSEGGh~BN#@K3O)uV^;_l+V%Nx|M>j^JWT5wj@?;>9+YC!PEHf9?V zO6D{2PCgZD4+&cXg3i2ReuBJoZX_4Fxo{GdlZ{NwmnblN8RV_Hq|^oXca>|c4hiQv z`0iwVd%WuJ8j&0Ny@0hU&^?YGv?jHL$QGar==`qXeUZJp8L4E_gLRP ztZrD(NV-T@!S8{!UtzCgN+VyI{2H9WCP9!lF=+dRiVspF3CiQqO%(N&+;OT%lOsEG z1h#rirPGvSNJOSoPQmg0BtGiF<$^hZL4Eslqowra?$fP1O+tI5np>|R=d$N4h zF_W#UPe>#lTbNWKQ6ju$565#wm#T4HeZ4KZY3+m92Gk-APcoO*Rs7TJXLF^D=A_Is zfpzdby{+KO$&s#KjZ>F$dj z$LJ?BCG1GgKoGJ`Y&zUBa?Z#X#7xG9ls#z$81^x5>*Sq(6*?^QMwwTR z;9##B4rE~D$@gs+#!0~28lP#{anx&n-lmWyZb{uQ+!7LP6GH2VXW zUFZ?bD8u=6C#7~;Q}vU~y%k2Iufo&I@^{6P%7@j0WA*;x?Coar;b!~sR^`Cg2f|l;BPUfOwZT_!N{?N* zaP?_*{>T2_zrT6=|Nf`&_xj^XGlz_VlCSVzG;>)><(=?cnxALMPWfBPhoA?mt5`*2 zxm6r+VaLCsFoKL_ShmyQ!}GxxM&WOXKu|Tgl$p{BhzL&!+A`t)4`c7yq}P#TY5ubP zv}3mWWqM}Tx_7EtOc%Sli&b4(P!vg!1R*k!TD++RkVpu!dUk)~o^u}exbXt0u{8_3 zcJs|Eab~8b1l9Qm(*X5?|(RQ6=;{J>X}NZh2^yZNA?suIl$S}>vz0y`7G8KuBsX? z<)PrO9L}>Ryu8y#(r@QVxDx(q>zQ<6dGw)Nb4tcQC-ZwrN6kq|jEQWGTCAtMTa%JR z4|qFER(s)#f6E8L{NLn4BYjxoL)#DhE##_EYPG;ImA4%H9cO7r z)hBep``wc?yrX`8CA83#6;p^dMbo&#xL00q1jltcddBvE{slS@}{$w9C&ED|F0c(T<%w_G%izOTW>>(aHLbv16U5iY5ntO@~dH-I@XUJ2vLl@;ahX z=y1byz&d{hf43dPY<}veZ9NcPHtC0cqbTR_Bb`X80lU-+9cKSnG&PCElUeHS?CIs znQ!p7-A%wKs$lF3Qr~H^h}34J9tTTJNq3o+?p~;{#{TC|AO8F2U;Om1grk2WhV^SNiEn!QXE#d5#uwcg zS<9RtEKkOsho23qvNi`I_xM?Ig!H{^0k&*r2641}ps9vC+VDzl)o~`{tUN!rRtm1lSDvX9fs&h7Uf<_pyOFqZK|8t8;A;o6 zb_DuP&04+s%h_86aO13AB=oPzrX+L(bHPV4sRXz{`Pj~Np&NgL#f93bBQ0p|I#+s< zzP7x)hDp$t|MoGsqxG7QXxSB}ITe-mI!X>n z$1U&eAp5;jR%*o4nFj${==9ScM4>044 zG!lwVJJT!JY)~G{<<{x(qkX%epRP0=c;rHplOw2w|B`Se{GGjIc5R8bGUtfUa7UoZ z6t^Rb=9!67Rv7A5EM$A8mJYJ4t(6RO!*|Z?ZhCem@O|*{AHMq8@8yD4Y6tJ7x8@W3 z^_lnDsmSsTHvZzRt@w_x38}1+-=h?vP%o`v^fae?mKv@c$D^(8p!`h|A>8?1Hf39| zsQXq%R!Mh83pp6?kK6edjVpPd)Cn^DYCL0A*0{3Q0%Hus(e?FBbp?SGerCO~Vb4*muj>d1F8p98^)Ajh#5-Xh9+sfR<8Az{bYf~)1%){aJngNImz|&!8&8$_EbI=C z&bjv|FJ7){-QjVT4#pkb5WLjiXu_K+l*DtxuUVr(K4j<8q>?-tSwx4)ykcL>S2wC{ zoTM#FXN2sp-|Z5C#Q+|Zz%LgzK66D^&wfmY-gaFczh zmsX^P45tJ2I=rUC@fCv(9Eon}b%6uWnHQ{cEu{0k4jkdNL)e9JteR{*M~a1~6o_sV z9&lVZ%fV$`r><MCg9oF|qLwCsb>lBm)1GG%9ew@I^l<#NB8CTY+K<_V zPW9w5u<-k>^S)#z@2dvySet|XLEeMElb@tGROUKK%!heDYYf=8hN1^#nlyi!BZJ-B zL3yfXg-2sUqP0+l(!=MQtCJ14vkg_`s9b2+3alUB+TRSRpNbCRN6%7$j9 zO!d5~9`(vnOEF)~?zc77HlB$vIqJc| z@pZJL5v7irdte<~gUIh?4s%RJY{_DpGyDUo-T&B2{&u>HEu2Iu+f(t}g%IQKQV;7h z_4r(DwjRAD3mUXFT4p5=h(}{!Ie@3!4kRj{Mpjys=poCNV*JmF3n!4FKiLUH-Vpy) zew02voL5XU1*0fM<)|y~!d=Le#*KV)aH=M=Ep04|Ah0$B)0!9Ki*6#U^yVo)LHp*& znXjabtO3p1N%Ogt?cCKoOx0sa(%W2Zys7LT@LJadf3;H(93O%PkB3B?Mq|g~qcIa& zT54qC#a8vg%8*=Sfo)4)sjs2Ld?}qm)+ndC+nV^&^9B(gf)!v-nygt*q4CP_gyrCb?Z#9iqK6AHBcTybLq2%^~hcEkem}Oi? zdU1!FSEm;*j$Bdg^4b3F_L0d;RTQ)wRiwV~6m^F*#izu)gIaeltMBoPu*1uyxUr%E zk_g&X{iwCo6YMqWV^BDr@oMPQd}L55XcD(7%N13?w7FjJ%V~QMYP(X^bP%=W4A+ZZ9FtU=o#9RX8eW4bjqKHXnvGxHTms= z&nEauMJ6K}3#9&=nZKYm8m#1kEH$Jc;?v{5jL*><#C^tAHmW31;?q_T6gX-&zTY9y z64m}Lwr#LU_@$({tnQ48SY<|h%bTZX$6xJk{qgaq|NJ|Z zcm5Aybo?Rh6(J*i`I_{SzH+{76Of)i0&bSg>_Iqse(A|EGPC0YI1A%vsB3m?H}*vY zh~P-$f)j!IzxMOUP8ip$--80ah<5rqw&7#dS8Qu7*>B=yda2a+R!l*6)V&Ic%6=}I zKTvpQ+ zx2p@c&Tt;tT*Z8o1&s%r_%FPkXONRq)7)-%_O7>fUhb++@)7fK?haOJfzg`xQF^s9 zg2EOLy-5B2(dwmAm@*HgvVX%yL0(U^@q)abmeStfT4UGPcW~miudC@8X61OUlg`cQ zVXudBN(&%xO#^2^XLv!zgZ>K;vorqG9miwO61tAUq^}ef59`{4eA^+O0^*%D?##?b zF5WsV>aOg0R9`s+WOxYaH04d7J`TFdJk)!kX_*self!FB*$X}J*PRwNq?Z~>Oi~~C zvw?&Zm{N7j1-ldpl4K3igdFve<3u|U0;yBCy!7^yq?{nR1)v8C9@>lbxG z_ePmgz;w1o=ct8s%e-`QS#+3ZI-U-Xo1(@Zrl6i*;H&qN!W?}L3Jm>>#+zem!%k4e z16AQTepL8-v3aT1(K=6oL<>>Lwi&J_?NaauEUN8%^Yrxe>z${+`RenJeybwSe>v5pV?$UuLtMYqk%dNEdjQHx>6|xT zD!J%vg`xU|K~lrM=F`E2z14Wrb_^u%G*^=Uzs{X9kfqP%c~J3n)c z&$`+OQdgXM+-2-^9glmbmsH-!!npH6U5!LdU(=><5KY5xc%BC<4D+?6N4wfPR>ONd zydMH$L7RZGW&{USK~H|K`0xI~&HllQ{p06*N@>olqOjMpJ=zt_pWaUBJTR0K_2eB> zb{-d;7tAeNk*j6K(>&+5aJd%>QA&SVN0Apjvu~QG!XB8%0M^vJo#!mz(UQ`TK(#fy z3?arp_>11?e64dUC6;DOMTjvDC5!^l@ZxE~1*+{O&ofC59>Z$Qm9Q#}yttIHUDvnf zzi`!JtX{a&>CM7r)&mvHjXAmC>|)|}jcEMRY#WQslufP`Af^f>KIDq+x)v75PMs?l zE5G1>7V@U~uwvrjykh8-ch=4tL(g663Yo_wPqnbXlc1&WQ}6EP$^MJYqvmvuhk}^Q z6 zfP+&qX59L-26J7fqJS9tDz^8GwmchphRm*homdpLHt{H|lyr!Agd{TH0{!qtv!(WB zpV(~R9YHH?H741weyF)frniNysTT)=IC%(bhtZb&HoJE6~KkbYD5`u z1Hl84bsc0%nMG+z@l}mMd$njS4IyaiLB{wjqMapdGG8e2CuJT|A>*O7Emi4v+ zg4YSG*UFE033$SaLAQ&e9{b(wtWy3c-ljaou)t98mp4M^@Vg#K%9*%xcBJ>Jf-bKU z^(DS=d9`_d{^Uf9BB`l5wD9**%fhNG!~P4kq+n(Fp>iWD^Vuae?v7rosoG=;PHM+B zB&9l}3Zyh_sJ9KTsvTK4>HCz=(pD8L-)QEM%x_!cnHMiKQFX-Y$9rerJlp-#mtX(e zua3Wd^wulOdo1a5r_IVL22w28@=cZJ&}dcKwPNM)M{gP7d?`k_UD<}?1*eb;j^TK^ zi+!C1yS&>vC+5q_I|)52`b_h%S-)mxD>tUU_g^f5>6r3Gmvv#oa^iYxc2s!Ztx9Ta zRSepbqt!Ami7htU)xn9H@YhuwQ*Fa;-ic#2T}|moCfgcnm$enx$J4vE4OR~9v+YU=I`cuanj&o&jxvEX>zr1zJ$m|(^2%Qca&mge= zU*oS%(eZly3MbiNwZsUJATC>9f11ChD@WXF)>4N8FRg0a?15ocBi;-=ND>FNMqclC zGYU&nDKCQmY6}DttJR+zsRO{r1%g4$xz1Iip4?3|{85reHJxsdJ#t+R&G%6wkBhP) z`qyYMT=0>(OH|2lQOc~m5bv=wpGmu7{%_3!PNG+3RVi}Rvm0hh0RMSc+hF~OWNRy2 zC}C7()Th!02Y1g-)-s1WgE1bXHS>GGfk1c|5yxf*|sq}3;mv{v877dKwPAlPqhqj8F`XejE> zel~bVNJq&+;i<07cL9r$5u{_5l#-yr>$sz*D14nu>^R!KwRGLr$|U|P@Hfdh*J!_r znBzlBt>!|>xmP>1LBIKc!W8QYbc=SzkL;8!kqm9U7Z0FqKbyZAUrMe^D1)}`<;yIR zohz@4^kjAHb&*cbHqXwtFD|qQor(s_D;SOU{NOH20p2-;bg0slT1@Rt7pQy-3bR(P z{+k1`@=In_d+A=~XGS5ESMA7a9to!XA(9VFao3Xv8@r!;^Wfv(KK{+8f?XB2k>Apu zpE=q>c8b2QiBH}x-u}kiSI_sVk-uFw$&@|L7^8nwo8pZYe#ej;v zx?qLVz}#TO@k?WUT_2vP-5x53N)F_jA0&baKLvb2_QKg*U*nb^KfFyngl4#5ps7fS zm%F!er!3_@Jd9ISvY31F2I-#{+d|$uHSP33^=N;elk!VTPZq`$@Ze1p<* z!OUD>7VzQam=p{Ow>~M27e#HYE)AfYj);|ZiW5jwiITugViMq9g&b}!Y2*{;?{?+57t0xCip86I5+(|R5hMl18)FO>x9f4EIr`mkM9qZVr2ikT23rHRg6PJrCo?{8NY?W zw5fXW&GapA$A1~m11rTO`?ZwZhZb5*$umL1a_21%;)}uC|qk|R{$ z-Cpk({$}#gYawRzNxW+NZ|?V16xxd--5>t&^1ures=;>gGh@lWIFcrL0c+OpY5`vr zG|=-Xt@k2zHujId+1mN@w~v4Sxr#iWKiQGO*T9oLjCq#g73rTLZvH3}J~sZoR0%H{ zGX&=z>naKq9d$llkGis;xTcum-=yR_lOX>dzU>Bpng!6WhKEbFU6}}0(#6s|-;O#Rt?GU%E=NhVo>kzaz0B@ZL}kS2G8B03Okt;xblitc9c?Z$t}RffI7w$S!o zR=b;!S7MlD%j|1%YyK+=jHkEp*M%>1O(?K-i1v0ilxtPZ^K(Ji_8UJ>p$sPZdU8{- z+EdT!Enci87|uo&=v0~R{7iYIS(OQAHau6wf4(5`Y3bH8#&rM-jOnGs7YyDUtn19fasCZ*og;A?{<0;ugkdA)*y z_##j=q{-de*9T9LxEX)>#jtLVcdVUYF7Gl+eI1#JW75M`s?j8c3;Hu{AccPS=<0Zx zcJQ-dCk$B*l;>Z28gH_-66$(y&C}BVDeY3YkMC=3RlBe%@K=+0GnFaXDN|H2_Q1un zvtw0y+&ob`-#WS2R^p9I3(D^&XanWjm^c2u+I!=6V1F|BTiZl|!C9>Oss(iXLO{$f z!e3oM&+mk*sxY~E^ws0-Pd`(;+}OF@K6t)kL$0j8NSyjX4^XTbFU^F%tz&tw4{z%& z*5)r*%J`;bZElOz-iVR0xRU0xS3l2VV7hya3gsxqh?RKONS>@&uD}!a)jV1fq80QP ziy!uF%U9w!n?7>+q-7dK<|ibd>`kyUwFpzRdlt#ea( zqjI6A{l1#1bmepfmmA-msm1K14wnx zYM{>Kt^3Fse={C3%b?vOOU=46!eh4en!-lmlJqg{Q9I07j7}c)B*#}{huOZO#@dmB zl4!#Pv=v`~9biL}c_j@d6et$v#M;XO---XeT-~Vw#9(D1buy?up-)K>llK_3onyP? zT67(%i`T33H^-N0CZ6+=m1jjI$*|*Q#AHhvWGj-J+E%dLc`G_BQVjo9q?m>iTlXRt znr->0xsKplFS0Km$j#Np>E+WCMJptmk_F?1iV`2W=Cc|Wk%mN|)=5O=J)RuSAWlc> zYi?=7P(?N+r#OVQQ!UY`T$Y4qBot^TdWzuA$j zng4o4B#`&+@U5B>v672bbQn^sE%9P`((H>CPuDA}+}7sR^`q6<*Zap`ZXA8_RPF4W zZMd5>@M5>bu29t=r6+X{%#70e>f6vID2yIh3(2KUV7$JgK8$zbGMXibuZa$~$!);O zOs=&r&S=k&Uc+C1amJW~DN+KAvW#i3W z{Maz-Y{6w&mgRB*f>a$gy4g1hSehsxjTgQdh z+Mx?U6J^u6RzBeu+BO7BW?5(4**k*;yAW>bSZYG98u;5xdGb`J)=*r>hR6v_o#CAF z>@JeyWk63}stJV?_SNj;OW1V#pM5-NTan*nFZX|)qb1UsOe)WP07HrDHj=u!9rKJx z1H&)PmgsZB&^}D3)LB7!In}Mq51uQx|Ll>b{W?$HL!E4*An&kwG$fGOA$LaM**_f5 z!;HV!Jm8=J5hakVJX8{4E@&ACi*;p-bG{dtBHyy;ZOgKE3OnK%=I*2?>9gh&J6}bB zW#mn`AI1kZA-@`~*>Rp#$@LWDW^C&&8vFx0H2(Fpx=3k&z%J+ZwmQd$WvtF)53Ja`w`ZS7MpSFQW=R6P*P#2|_*6=qE=BMHqXq3BU%q1!9 zmp3h@Z>y;#i?}LQ@$z=`51LaSF%D-};nt0$Tl710SX*oZaks#Pb<6m6Rxgc%?D)Sj z@_QPHejNQ@TDm5=EKgFJIh-BM!=*UBo}>_}ctO6Sv4j8KZaqJHbgo)hDvIS0u-7Hj z8myjox!fHTp4LrfQDCJ*8)3T{f|Hk(<&B&vJ@8jSWHmv#r;Bb}U4D0Z_VvN)%iW_t zZ>_%Ex%}=xc^75L6vhQ-JF=z^Coh3C-WZMA@kex&7Ua!0N*o-p_ClGxR;CW~0Fa)H z5fxF2m#M5ZIH{w$_@B!QozJ0fy0jT9x$hL6VN~lDzESSTNE`R1&-E`J!#nS^+-qIF zmb*(cAZYU34qK%OqlUe8NO`BHZ^w6J3+8G&XfN3X0zsoBapXjh^$b!G!b*`5FAk`M zlagCiSe&}hT&;O`&_BLqtO5DE4PR8?{mmuQOr}p@_3c-&fz-xCxTh zIFH0hWXf!t27?Zmul9j`ji`icHMAt}x&AVibb3&=Xe?8Qw3Rc*ft9K!pJ>SA$amU^Nm`Bnc`=X)u@-qW{{=M zB3Bm6C%%G`$H|yy6p6Fg-5vP?e?iOw>)kt7cVsc&_(gRLZGf-xx3i9wO9}Bj%jY(1 z8JDFW7ELl~73sZ%u9o;Ohm~$eCy*%`BLT}Co;7d#;ukUov~q;#05hkQPNT+bqr2nt zm#YiSHPaS?kM+oC7x39i%$PJ$Rk@^@`DWkFiZs0-wvF`L*wHgY4OagQ{N*n6iB6AX zIiI;rW$a`}g4AN-+LYDv42uhNAb(~}ZGyR#b*#kWQ}hqVpfMDbsUVkaY=emIA{7*X z3ls(yhA!<<0cmojZo6cCWox-vdvi4jdqf^xu|&6Zqyc@>#%LkV@?&^b>Fwpl#p>bl z=_3{5s9IQFvCbsDG#I&RV=fXwVNEy^&MrJRD9nr`xUjG|I2P^t^?rbN zV0R#Lh5=8wb_~NKuhUaA_>NWje$RE*DE(%V4(^ zs{eYpc8K3=b@9bzqx#_$Xs+3jv+k-961#dHEiLAiE*1Rs7wlt@P)SqelSmcdUd}e@ z%E`C@l*f11P9IrQeL0{hbJt;@3S9IXstj&h8gE)&P+3N<8<`yG(tDhRhKw2%3Y(uc z|IMF9-uD}MP|a%1c{v6|p79r_F+5jETnQyBpsXcSSidZ5shkRn-@s&R4l;!j+3{i#1%}OmzjJl>(Ayg^M;WMc+rFWs)zRhmM^|qT zFEw*kjmSbX(DHj^SQhK?1I&{C-hOE<_+a60e4cRL^i0R93L45?CZy0Y!zYlL%R*t} z@9}#r*{vG1?*E{khJ=Qq2Dw}@HQwSs*KBm<7|8IG1xO2P>VP0$;uD;#g z|MKa<7f+5KY@R*bQ3`Z@!Hh#jdyjc18Nzdsnv%x^7mD%KN95ai+hf}ptV6= zVQk0nkO2Ux%yz+F?yJiN0v5j|`ro&6)+?6J68kHgA!DpZ+VX^)TY8|R4=*>{l2Z*K zGQsGL`i{lQb4>M^r@YB|u7>T$kywnQs+GRJ1Wm&gk95V3$%9h4dtlnz(B6i{0lsrq zGpMb5#aW%K?0Bv5T^Ik24;d5&c^P;ivw9LHAUhDORUXvZuAqlY5;bVY2hwG=?~AwC zT257Jrb`oOhg*+R*eO(0glUCPMp>%?@9GI98 zsk+0Jsk!lXrSP|lN{LxqZysMfIZ(k#JDX5Xs%v+s8>ck-m!p`j6?v>X|?XAhq_;p&qId!Ii%d$=XJlfR59$eb)CP6tQ`!bY{RYa5|j0WcT1Skcul?*-$+{-fqn5|w6}Q99;yiw z4Q-w{tU@2>+#N<-bE`9_SQ+MOLO?wg|J7Y4{6%t^ACU-DuFnWWtJG*jd%Uk7;tN%t zswAk!D!zhdokrn`VrTwTvik~=@t;rG-K*}^5?4n~eopnWdMSlfJ`t6FR)EIhc*VcFJvE zJNYE3BXPSq$OWjWtL1I0@&c*q6M|r6#Z!IgE&Ldz`KJ-mW74B&oNGlCevNG)2!q+_ zAw#TuB&0)IabB&(aPmaupijYH%>)rOc1o&)Zq|7Cu0b?yf#9*lhbB>#$m=ud^OeSk zeJ&EJIvbadca9%C+4|F04}bZ`!!IA-Y#gAy)!PvSKR+J+^7GfofA>2wp69o&uann@ zd+GsVM>#gpQs$V^h{gCKmDL~Hto3}sqW`Rqo}FbB(HrI2NY5~$(6o$};_P*W#ey0a z*QY^sRGvvBXx9AwM(+8!!J5@S8gf{MR4N!2;Ifm7(9gGS+)}8yK6iP2U2ww*S0Sp} z0eQ8^k&AWoI!A$TgYO6#kDbQfPCwqIK(x0yhW~Qk-g;~ceBH9{A;jOQxc%UH$FJS2jhs0$yX5uz<$t$WyaM6cpq;Tti2a3+`K z*<~~`*lgDsOr>5Df9zqFl!MkSmI$J0g4EC=tFkn~ZV}ReqxdEsU`f!xUvPOGw(K;3 z0;AfNhp^T_iR1U^hpfA;)wk8`PsX<_Ol&BBp$j_6s%pblwzD39%kn$X=gN6bG?_}0 zm9-WB^IwkdemHu;Tw@VobKz9e(pgT8KGE}39NKhAt7j*tkN2*g9ZRV?=ehG3 z)A>f#bh0fOHF}r?X85nZSHLA%PstG~j7XjjpO~}yYsRZt*c}}289^0i!$=@*v!|_u%}6;V8Qr} z^=G8a(u8gt5g&;UzR)^y1njb735s1RNeR_*CT&Ln3=J**^jPs4!yMd1Nc+3?3H!=BaCq|LOT$>}${T2vu%_ zTLwATtAfgF$KQK zsb;L=->z+!i}6UVRS1_g8oD}`^Qg7lu;&qX-Dw>^g@)TKth%xZ^aoYfzD_ii09Wbs z&!64hZQsA#d8NioqFGYX;oavoirk-lwaTLRVR)R#`C(>RW!eN-Gk!jZzt%q?0%#sX zZQxHkvQ!Zqov1(kuVltbk6D!ke`|h0X7CAoMK{WJ?clF2KBH=3@q5VItl1C@ZdTwm z-eY{owRIZvK--1wg3H8nRYt>HHP0}voE@{~uF-L)9Z=PaEF9g_3S>=n3H3VKs|Nq& zzXo@8j;a7XS)DxE6$_UAIk2Ecel$6fTYVefru-5O!2)0RB*)N8U>~TV-~Dy?jy)HK zyGY#dZ)n@#xXk5|05)EiSrvst(oxL#DFz`Ayu5s(#!7sG{0^QN6%>@mgIcWC@kG_l zP%A_Zp01D81!Z`=!K8+T(2rg9DgokwzkbbwbK$AIiyFP|2RAGhSuZ2|nvq{t3MLRS zzSfZL!}i#a3qh&_4xI7J8VfG2YWd~c4LeG&Icu=`*11c0E}{#u`l3k)4gpP;K@Tk@8?ep(;PaZrw{PUAv|J_eN`{!RP)kg!P-(syF(p~;#j*PT-I^LU% zWJ!kx5}3t17Ov&%$aWRJgJSg^!QW0HKpAF8EY@1}_B3+(+wqNzfQ%N79N&x1^d}2a zuNTx2V*Syf5GzPq=V)aB2gwi(+{? zQoF2|5^elKFX1ZpaLvKm(hqtstzf7bOv=dW8NM#VQu0!Ax>zQLVAeo(2Xs_80saXF0}i=Ee$AN4zpU%Z7N!&{m1)A3wS0s;ZaxxzE7t%9@szMfAN zx|+E{7H11}*6__t;hyB{DMd68ytm7_s{Y{?MjRk^1xt0W=*G#T{mZAvGH&sL4o|*J zQ~Dp9x$<(uTV;6L$&gihz+e2U+Ipwf&a1i;SwR3w=(@p32i&$bJgBs!C>IiSW-tDV z@If=a{?pG;96UKrHMcs>=YhWgz8OKR4eA*jME3K@04S|8X`V)Z^9x$Zb+n(Je^wXT z=b5Dv)daimy=k5y*b44zu?bg=&ZG(MuB|W9{s;YwYf2ET_XNRGp^3G5ikzBeyejg0 zbff8V0^)(f@R7A`?5G)kOVeUkN8-Q{qK9nxI=qo=8=DjHT#ldCuFypBjbE%}g)YFz_9>ZJPI|Zcdo?4x&fdB2r_USi|&c1r|=;y!v z$$$E9dw=*w>G~Q@;L&h73Q*QDYy3$#SXR@5Wi=X;y{z7|3!-q%VkOZ{ z-hnmP(PX=eC)@HXyC7FTXpueuUxns%q?$o>P-G{^2%-XTCHdP%ERnQ zQfftqsSBO;2Xonl5li`8Z0w&r+P>a$IW-Mn=~%FJppSM8sh#Eo?FBu_QU(qAI_PSy zpl#s8_+`}%t?O{=V7`!FZ8-pc6m}ZdtZl4!eacv6w#N)}dbVQX7s`JY`L%(OIkTl< znsQ2+6k-NF;k>;y=>}P9#1?|YH~1dT&eA>amqk&p8Gcv}EqqOMW`8d~bN^CSy2(X?S&+x7v6Ecr8NtURjh)8g1+N$SsrnU}v^ZNM z%|@*&rU&a8I_cHY?Xd*L-4j7$1vly2i_48uQD7J1&_txI3$-Ld4^HRj8Bc;$-$3k@ z8c;sqZ^FSzFWI-V@#4G9!!N$w`P~c#)+lUETRqw1 zP;f&OVD_|s;Ei~@vWq-=%hJf#F9}WMmikxYui?8a9KRzkg|R_jseH$qJ(U5OR}U;! zWw9;f?U3#mx6_<@uFusfpP87&+B0Ka7=JbRD|n&e5{cvz6K6Y++J{APAt~=ytq0w} zGO&{P>|nClasP`*3d=MfT#cx)Ck2-qIS1$)?YkOexueSqo!XVb<+21agIK67EUqK- zeepk!glgG)%s7_sI^i3tqzW0m+;$o_T2_nb=P#I+{0g#TMHrgFTr8a0Co`n{*pq}V zMv-s#PSa(FKxqt&{|<>8YFRkzBlqvr%3`m23-(XKfN-niCUD@UXAMyE zxoWDj%88cvbSp~hKy7?Dc%jL=XfH82(#Ad?2Kct08O$Xos<$sQze(^4DK7j4ZNX5k zT)<yeFR6tEGW!b>Ne%3w)|y^t<;7D_1l*j{zv_88WrmYe0njo}NA4IeoM}>M|a*h^u zHA8Nkm>)|s`1m#(Uz)w}k7?#?{bK;^UJODZav+$^MJaTcajJBz* z&5~kU3!~I$)kDVlET&_=qsXsPu#|{(wsE4mlxj|dRxFP?LIq6s^6%Yhv4j2B&kyd^ z&_6T&j@mokIk;426=GSFNqw5{Uh3X%(5s`NH9#j-*cjF>M%KUZ%D z-x-aF9+HUdr+vMJ{tIa01NSQc%9ZJnWbbKRhUUl&-Q`;sdWP{u3_EIBHb&%P?aGUP zo-ZD8TY|s3AeaRTHx>t8gTI2{1%!u7V zI7wOA_K8N7ET9vR*+nk&EzEJ=v)IcX!wZ*jh$XD?k5OtACWgh#Q@o(zO)y_1MQfUP zS(Bi)VK-FQ+hjh^$V)YwMqZ<^RyA6%I4c5cgbVBnf0NPVK%}O?nAjG#eFu4!tUK^m zuI2&00$jl{&S!8r5L}p^!yGkM_2#K9o@e~e;ID4&xUh1h75}Y-tE!#lYgXpf+10bd z<8L?49&d{YI%*m3qWWJnQ-`_3r*STAAdF*94O`?6{yQ?Aq$rTY+Je96*o*-rv>@2v zyShI-RC|4pmgNX_j8wfk%Ail(iTzjhR8U(_U*8WlT)qyls(pX>T#d8fzdXOx3$r;; z8rBAQ?dt6^4nfuTv@}Q7Hj>VhvzD<6m8e`vmI*Elm9tUJ%OUL_>UU4)-#S$(c$3_% zh+BcLG9(>WlG{P8{dk!PQdYsFThK7s4qY0RH3V(AMlErHKl|n25z*?w#0lxZltNzW z{^6xe?yf$)q*%kiq0G#)6AC*3QCV}vbTR-y;ga8BX2_(EVzEkTuOpcr6P}h;W*nT6 z*I;c$r0JifObP*mgXjV92p!9QIwT4wJ;1-RosEz@C!8xjTj^EQqjpq_^7PTM(jhLM zsuqV>uydgm1xD`)Vf`5E6xcc^4f{~~MCyW-lB7?>g_UM=@}1@!efjOb|Kj7n`Mbxz z{KFefDKb1U$96!lK2SX;Vx_InO|XT%$B(5IGj3}PuklB|!t$-lLMyrRmwI0=swcs5 ztt8k53AJiN78b0?;DBIcsytvkVR9Apgdp#Nwl!`mJ7Q!zlvGam%P(way9K&)oKnLhmFpb0A6d(TAR%j z(PkC^Y`10&@1E)~yMsD{T=RpRrsZt>hF}q4STGQ5mk3U2eMjxoI4pSeP}fuCz5Flw zYVH|ER7DDq@g8IIVAKA34@ZZ9q*CUBgZN3%2}mC{f8lRtf0w-%pIb4w@;}>>pYgZI zLh5LB?95dqN4^k|U%?ir3YXLtsBLQLaBUKpJ%K1dxxPJp{YorV;bZNvUhsMh*HjeV zx{%+>o|50#pu+^EFjmm~&BYfE`6mFlhi8B#oo{BpXafXKXS@upVLrIEMa((f1 z@A%uz%Z+_xN8;(0IYh-YCl6K4`ASxEt-vdY6eu5f7HVj(FFRB;``Fvk8~()TTRE(> zaPz@R?f&psEt7c(#h&)QR}&@^SdOo^`}c45UQO*~>e;&DqVxa>PJRqlcqFqf-tRFQ z^E*lL5w@ye&PuKFE?5^Sm2(8R z&o@tr3dsXzkK<*W03LB^Ms&>0umqMXRf3lrOT5 z-3chqHIpMd9sRcp6wD>XtGt(q`_wpzdlLlbX|>G@n&TIGrUWIkt&3qzR{Hd^&1>c? zsYXdbvw#m_*A5uOGvIm9GwAUcV{6x>GeMT*eSz{LmkEw0(@0E2PcEG)@LfGVmJ3=G zSonLXa#h<}E!{pqJO;BRP&RxpON%0@u{5DlP>f>Sck^iV<&&M?|LK#z|EC}QZ~y7! zi-)xuNEl-GTRw3tK%y|9ZAoQ{%xKQIYp^G}(Ze_y>GgJHctnw6mCY>1oi+fp)Mq`(Ni*{b!j-!_Re4&&U&eN9 zNcye&p~1gdeJtAI9$42Z)+Df3JIyL|~gK#61o({UdimeXCwps;{hsA9OLx@(yd z8esT~Sfi7Dwj+ZV__O_A&2#!V2Fo^qo|-_U{dPz9oL^PMQrP$8mAB!-RLS_^>@I~L z9r^7#S1PqpWtoM%ENkd(&D(XoN;O@?MWj#VUbgcY>>gLh(=G-8-ey?^+UYq>K2gW} z$W%-cKRW$xS50L%=;zXGjky8hmn#2`%>yoj!iB$?DI@r^Fybx?{uTc*LP2eV>3{FH8p8Yf1te^F<-f$?S;;Ygjx>ia_5W{ z8N_!WBi4b7i%*YgMh^?_t-S|m>8q#PTc3RK;NzeF+kg45U;q3U-ya=gxs{CsrlkM* z3Q=PI1ZXq;s26llmr`ENZg6xUc*S@u|)up&wF6 z5nH8f;7Bf!-;Vzl;}!%r@>c3{0MXx3O&^q|*!yITt9@*Bp)0!EXG| z@^W}SQC2oA&buSeN!6=VT?Mtm_kFGIaiKr2c24?^Jy|#enOkdHBP5}40V2c0<3Y1UJKBD1@K-zYv?K4Q{uKu_ zPp6=VGcaMnea*}%hD{$x<6UY6G)+jndAf4d(_&l1d27KS$yc*l(_0wOoL5}`t$D06 z&d;OCcu3K|V9VzKZ#V1qUpsmt)oKBfq z?xumM0+E|890%=zeD(97Tb5(w*-l$9cidMe8v2_TgQ{3K>O*M>-L%49dZaYzkU&Ot zvX7Z$P2H0IsoW}Gus(;iAO0d4pK9x0+&Ojq{IP0umRFZsn*Pj}#h!PFYUuE=B;I=2 z&bX!nWb)PZBiG{{`|;{^-uY8+SK4*|`ewf@jY5;=*TcV!9@AE5$tIDv`Q+)z(W4_p zfyI9{HC{10^8&dpbd?yV~@LORMbk5lPTCV2UhI_(8g?dS3GX7GZS%buF&-u)(BuG?9 zqe9}NccFQZGz%8^)|?>4k5WuyYz>aPXH9ZXv=(shd-Wxin)C`vd&gJ1s&}-?O2N!; zTHEU4yft@`>SHHc__B#S$ji1zxHI8--~riVhile&o2!+kY5NX?aErQG;l3|E+|Nmbsu__LadUC43mrh9qH zOz?NUe#vr4vDvNy-)be-NP5LS4Au?4AHYFf${?sPs{*^v&a`^68iW_y7-@z1_I1Ko zn^{Y5)bnuP>8T8D`jg+M?FX;p$7tWL5kCTnmH=nRchOVd^h(|vW> zx_flKa6u2?*N=P9$}==aFRRwtsSE%DZQ2fO)s7Amq0lNw!rxH~%kf{O#iksVvSJE< zwS4(Ox#`ed*f6cxu4 z1~zR54NmcYy%^3m}k)-{8Y^ylc+t6B z?2VWlJOkUL&o%zO^L1wa%RXa+EnlDe(la$O%Mt3l2sO{q_<$^E^*yCw#AOuE4>F?h zse?OL%K1?@`ztqAA%y(O7u1u2)x$Qk{wU@!Tnu|ew*W$Z?}$+EB@C+*qVplVBc zUElz{Cpx9Yo2`JdChaC88m3(6oY1faty_q2TNPV+UpS;bA43(Xgl9frT zp5_Y+k!4_Ld0c6#(+7K6gjCoO{EeTAT(xACOE%qtt&O~3qJx#7WP6^CU*H8g8SIk- zVKkS;(4RerG^gv17Lc3*Ep=Q8m$D}ld3oG(mwHZB$mTV5`=z7V)MHxbNWyjg&A~VS@`q1<{EKgX z_S?66XCl8@*(8|;85Q(Zj}Z4HUr=VS-f=L>;NU!Zd25fK%1BH;VPhPbUA9AXq6YS0 ztYe&@C#}Bp=?hgeCf8(yJ1%3a{rRL`msj7G%;b%)XKp`(ZG#}P6i!a}`0wY;T1&yr ze(Tg%ArM?%eJ26Zt5`TKBg<2*{W6V&beasRsvaq@tHk4W3JnXb?8D$MK4iNul*rM{ zcius$wZ+b{^3=M?+b2tNLS{{B&_ztT%iSIPP4uVmcke2pjSG3CBe3NU2ib@JSv#S< zh2|VNMyxKye%vx1naNe~wxq124{7q`CLNifD<_PHahiWDZ^IJ&y##-gRxO{5R|;++ z%0xUQ=m~8! z))opo1iZRqv0!hr;`ZR7uk2J6&$(o*Z3&ZaMBLU zcXOo>u#%h=4hDZ^ptj&!5aja0t`;P@#X~DiC^SM+x4;RUbPe{nWTYa4VUM@DZa zD+38a?xZxLD=^*T`wh;b>l_T41+DkN_tJ-0AZ4jmI+msQ*&h|{J=_{nP2JTC zcJOR1^|89!XO6=3worJ+Uvx#ZM9oPw|K${{nmRqbipF5u7LH9Fkhk$-u~^6!(|whR zdcARe_MMg^JW?)JQlQVO5J#ef0^bRLmj%aL`(=KprZTCH$G5AUKW%>epZ~|_|Kqo} zPmliN`1$v$1l|m8SSe$NFWb*H%na@1Ij0e_X2pfK)>n%qQ>*tT^G$CAQPq|!!$;@` z=`r<5X;+XdS6d%rMmn7PCGI_iq<~ksG?BV8&{KBlv>Sajt zjssJ)^Zr{G3dhegfA_gJG$edt^_Au2YVTT2R?~>^3cn=W`=r9ksv%#Fldmsw+7t0Z~1B*;NX|$*P5Xbht58F6aVKJkP43p~*q2Ugd-+E#{lHgoB$P>#V*4vEa3B z$ZzUTUU+fgwQNNqBzQW@0+?dlAd%Ut+yf>r#2`ct5apf<)V8K=d%^7X30> z_s?R7^pg+ALhXPx=#74oJY?Sm^(a*-{(JJ|c=hz~WaH>mKIH9VrC^OLEHDBljvnY| zb3|B8SvQxPtK)~eM_)es`r}W2{J;Or)8Bomg;8vG#mBsxmuLK>UjKjfEAClo%G z-KDWT#;rP`dYEJ8ZBLJ~W($9jqMJM+?i^5(-Y!&S`~-cll+^iB^^8E+tDbg-<2-zx zE%pyKfVR3IXTjiztFn&(+5r-{)Xp02B+@#3uolL647mIIlEN=N9{eP_tgb1?MSK_X zU>JfI6z)m5BE_&>q*eZEd!ci8UJ#2~uP?kNV~XdjIP$&=Q7YV^)+l@?J-~jQ>nZ<6 zVU4kYys`nOO2fgyFp?6NDLv3DDT8mQOq~9dUZ?^rCjYw5F8YyK%beff7CQRO(q?culAkg zD^E^xeO(}{Ja>To3mSO;hR*Ltog;k-w{ogyNM?kWSl(QyLhyxaWdx+ zl0~d51SL(Ak?RC{s$v% z1YZ40|8|nLvVG$54*m+w_2+ug1ad~H@Mn(tOk2;cxNZKhmot~t1 zY2k=OX-$(L1z`|K8DFim6WX#Q#YV4{kHNZvp;nb?b-a+*ZO`rKP)%5@ru4a?<-h`T zvbGuk%GgOKgTl_xt7CF^g9~PVEPp9Oo`JWRw4HXI?vxdW>_q=nk$sKMPq~yV|yM;PW*GAVYZW(3?f->7Vz-sm&ccCahiMIb$a2ow&6Dwr#`b8}cc$=;6H zL!LmCoaB_H(mW&bmO0XFmvAswfIE{_MYRn|U{M8Y%qX77CCLW;zCO*6$|!2QNnZ;pqPRLm5;GjoDOi zo3i%{5u&AANNa4b3$+V;=|zSG4m@cqC3CfLq*SZBEvXtTTU()0JHYYS4gpuKIZg6X z6L72Hr|P>eDOgehlB3oks26l{*hhKQTq~(~eUeg;N3>95N{)y5u4yM<4W@uo-6`vW z-K=$jrsZmVok9VG&em75A}8{N!JzH(w9Thj4XtEc{Hvt3p2x?_?otXO_A}dY?vU&H zC7#kg(oe$&a+t(leQmg&`&r|yi zW-Lp0EvSLVfaq$W2{k=ZZAQZw19-fSj31dpB?}qnDNS?Fmg9ARE)^4+5Yn=B&)0l& zPbfgO);2a_+vLd=OvVD=7r${MZ~QzEJ#eb&m~)09Zb=NrE;2v}lV{9@q!D4NF zVz5sm&^wt;f46s>7A(#&g*EzujzLUguSXXb)W1~0+eN%9(&~6C9JZ|Dd~xo=*JsNJ z3BVN>7U$(eqittXYX_U8w?$X$MVh_he->RGR9EAN#Efh;CQXgqqEEUvGLL>jVcwLn z%JcMF_#xNvcNv)_*J!+q`SP)}SJ@*uUga12ucua3%}rNlj}so1AoctFx2{~ZZ5R9% z@~VNGrIX{YhIv)|_wc3aV%;9zKRzInNFpJG_ez*&H_E0csI;9^MzBxnRqS{Fs~e%dLwKQugG?GFL-{o&y5`~4Se#e&6-`8xGHVYMKsAQiG^ zK9aegjCKjWcHZ}HH5wMs2>V21Z=SiTMwCq9Z*+wNNy1#CZF{$*%t2oGa^o)^Dx}p_ zs?bZVolH7P)(RsMeM=mzv{R6}Bx}MEEM9#JtGJe23H8|5j^5D_9E*z>cjGVKU|YzA z0`pER8FD1J{;c=FYXAbUKxRK6=_Sk1Pv#|A^Q_4@+;-YxP0cW*7kTF)_3nVLwr$~a zeZ!?db17=nh3<7qSsKMDoT796>|BMQj~^)o%URH;Pfw@_ty+}Ig&tZ|y0qWXj&adC zDMOXAQ9+x7KR%hfG*x@6y~lJo>r#T_peWXAbboW%c1QPtLCH^VyJuN`9&3_wNAg?K7O$IeB_XWUz^9x_+rn4JIVb}Hi-RW zweLJeB~ZDw*3u5LtgC5965;8+TOzYw=p-U$0~o2PZ9Gdl%*{Z2e!G3F_ChOMY#rTg zAFEPU#8W4N-8OyK&}Txuwco)bb)A6~ZMGn09SM$PjRx#wEcv0S^uC?Nn)q^ZEs&Iw zmq{xk%ltk|nLmMIvOH&=s-H8Wm;BXt@+5V-PSIhynl8&$JL*zwgAZNJHhg1x;nBj2 ztCym;qkGr2igk-lci)!XVicw|4YB6wUNSpVbW%|~6-H1~{i~zz4yeU3R%B2j78MW5 z3QAuYddLX$a z15C5yuA~3;XB)sTEBKZ5QcE5aL=&2)WKBCd1e(d#sPCI<;b>7z*xz}WJf=u&NU zi}FR(ICRfT^Ba8hMGFcNG ztbvMN1(Dacnes42Zwd`+?e$YggbpwN;w2c&H|%A3S)hvd_2M4*B67(g+XM$Og_LcTGP- zMP0p9ad2YdK(GSkj+m<;L*s8_Zv544pARotXFD(J=7u%}jYia9GG9M7B81zJ&^ zqk}WaSMz+`I@@Ytvgn+5j!7(7_-pV@J9g~2=NKBq%iCtKD9zl+f*P@EiY6)MqW#8S z82y^ul?rp0(AJe2GeNlcteMN^0aKEKQv5oc1;H5)Iqg`|3p=ud#>Nts zF)ZTQ2hT^AVdJlpzC}xOoj!i*6LkK(&As%2^g4~OF=JW_rDF}aUX*tJ;#@#3+}^(*&@+ozr5Oe;}%(7J1aV~f0?7kBx@z!mr+4Jm4P^P53`Vc zgKq+$_uJ12No$9Uta-GI{V}TTm~vPd|F*qS$Veeabp%;~Pq0q@q4GWKvOv~6iikKTX<$NW$_mhE2hqu6ij&Kiu5LWkJY5$lYM z*Z7}!YtdVNDV7RX_2lB){THm8z>Or}jF7erZ6(f46RwU$}XD;ID6GqWrT3h>gDkd5f<12aQ75k>V-fXMuQf{DXVJo!Po-0roe_A1RbSC>FqOAi^ zEWI^*?fv+gG_P<=B{&4ru79P*$TvaWmcKfV==h~uZ`e&8;jP-j;LgC=p_;}HU^=$Q z)XXMY?bvHq5(md*KFHSX-Lsqft?O6Yg_yddL2^gqes-v9Y-R*CwhpeEU)TSS-pQP@kKap{^{CBOF)^OQ$PtRart+EM;)lRcY zn+Th!(f@|g)GU(f6EBeRyxKfJd%QY&czpWgtoZNd<>j-htF0H;+jpusWJZwrkFr~& zX)iaf*cZc;) zRh@N(ac-lhxow9$SVYMbSmP}%>m{XXWn-}&;IlmdM{Zw@{WX)heRBHwIbUFM;?O zeDyExg-=HR3bx*g*pj>D!d5F~3Xw-$PH94x+A{4I2p*uB9oJ;$cet>Kq;|wx{bypz zkmADVRIi$U=f9ONT?m`18j0=BKIe8&n@txY%L^?m&cs;Zui#Ijaq;9}>(htE-xw!+ z7vzGT99~~Le#0@xOrj(q<5jLL=qaJgPL`UDS6{w;`~!w~yT6?2(L7fgvW^FBg>i(N zwX=@Dj=FM|bVwQjU-sj*?%7Gsg-hgDdQYmJHwqE%Wfuy6N%>9`I8x6jtq5Hr#f7}P zXRqBZ)bLA#(dt7JSc=XylNAul;7@4y{N|aSFWya_hyF@z;#z zX75f-dP%8S$KS1XzuMdSWb5;ve*Wvf`^D<317#z;ZjUQ3u5ag^n!oHPsfczu#HMd4 zvEhUAjDzO9-qAuNYAjqnwn@86dK+9Lb2jNQUC@}T36csVC1(RWcJ^YuJy6uE zSS@PP76Q4KY-26-*IBr5e^zoN@FXX!fJ=w3lkd_;+J^%!k8QiDt0Fg8%xG`qO_n46 zqP7nMUar+VSYZmjIaI{RWbRm1n%Z^dbFtL$P4G^B54A$!{gW4J!C5$UZG)RJEo5nw zf61nGJVgUHO=FjV1k_d(xa2+c;b-p|*DMx%;38euX*UPwYPSbhYGj6h?0(t2ZyrpB zN2PU98K&o_C$>Tk+ggvk>!Zfj0)JcHoshZSl%yw)P&H?AFi2ftk%68PQY(ji9T9Cq zKzzV|W`sHpZbBAureH=ljmcSYF@b9*IW?1 z8F30lvW`5tLoHu|mFCW_R3?6{MlLiO!;szZ4HHL)!+9HjEASMeQC!$y+L2$|;X!Ar zZGgW`&8$g~Ze7#n0U2CX@y0bbbSe9p3x7{l+k^nIU2ys_I4l385_{AL4I6ngL!qh@ zOb88y9W zRT$x5B8U+GEB)n#pkK`wF;YNwo^Z5$szI#eyJ)sxecr)Q@d zm*>v}*|%3a%H|U6YUZQO0ClN8w-PFLKl^Uuw}1ZKkN)MefBvP`<6s2pzY&V1mj-?% z!>Ytyas@2~G_H7xLd(65+hSu??8*qQ(_NNmhT2 zhj=+aK(5xFjd)!$AWcHMg?Kh5Hu((&=10d~71J@i#u{w=T`;chQkAmolqR$aGByQ< zRy!3_;*X0ARiJIX-P*h@TV+TkJeqlnVEV|+)GY>6W?+xk!Q;&{u-`J`ei&9LVmoau zN}ww90om4U&CIQ+-?R3I_PrFyTrVqV@Qo<<>tz{QXay8U7;2VE~gF%qGnie@ae5pboYAi0S zQ+4)$3jt{{d#Ns#nLXVNJjMJ z6_lQIoA@uk7<`jI9F^i@QhcM<93)ERDUeC^eEw`D_x|;!6RtG>SLSit&d%3T%Jli; zL$#o@%S3DXlG;nF7cWxpCU9=7Tw7vpM=UvRuM{G=Nj3(Qa? z>G@1f^Wc5!-E%y0RCa8TuKJ~9 zy0o+h7^lY=V^!*!?*as)VbGSSCaHQW{Ekan2bTW#>jMP3i%vl~cBY@drVqjkB0G^(cZ9e(zB>vs=6|L0%)?!SL@{_WNe@-;EhEaR$e zLXX3i8tONW1hZzERJN&Va`qbGI0}f@+Mq&A0juEK@ET7t-$FhA4r6Wn&Dz=WV;{WM z@j1mAbOY-pHRUxbA2<{^`mkgKge{MX?L&MN_1}g0A}snk?=F-hSf3#YAg-2%^Z_V zW7%vsI+CHKt&Wua%zQM~KCt_@TSaV>A(jx8{OVp~i^kpiY3r5!aZ??r@D~Q1+SD#j zEp$74p=V&+Y5)g_?eQL5-r$?KGnS<4&AQ}lyzy2w7C^qyyW7T7bKBMfIVQ@XD<^oZ z*lKGrDci~Tr3-~8-XsmLHluKHVJ%i@T<~f~@+aHFoSa+XjzK{fB!*RMzE_@ugDhzt zz#hXcWT8QuaAd+ORL89>Xyfcio$^ki#juv$<<;BKlLT%Knr3zvZp zPom*E@pUx*!g-6`4+9$=UbvGASAclw_VQ6@?IA6asFLsTFwdh`Y*&D*YLlZ0g{5o- zxPslAts^HqyEMls2+O(*g5Arf$C@}Sbgmqpu;U?*+5thxsTgis;7qEZ^v_U4+L^Gp zWz%|TV4`-)0?B*nEn1mt?XY=$htjU|U>FxcXEq{Z=oUYf^l{kQUDB&#PLuRi<8sbQ z95%_EYx%T{J?zvjgTD*|eRSdj$u3`+c$z>@{@^Z%qR~=Bi#kQ;k@V*@E3WX?^u59@?h8OBqMKH8vrK4 zqV38akq<|YC+HjxsrQxOa82YwceH)p{w|x}6|u6e7=9t`a2NgPu^lAUjW}Lc;H+T- zdpwuD9TYAJTE<3QEtD24)+icn=E53~9)vxg$S4zh(Wnm2g1NC9leb*Q2W52d@I@QA z+uK}dOU}}iwMG0z|21z-o|VD(un{wi7<3-AS33f6a-KO+=MMZ$;#LYf$N${OTWB8k zZ39C%D5Mqy?0vCu`RG{g9gP7*z6IZ+O#^4AEtZB&4uluK!H{KoPv=%Yw-1>=O{mku z=m9jXSb$nRKrAKQPFEPtWFUCjx)ihU7YNo?q}U*$LRClSU0)@IxnwTd-b1r7*ru2i zR`-4_Wo{tOXe^)ezE>NW*Z#AcdaCRMj!U3wCs~?KY2I>@bDAMoELg2_p{bPhYy~?8 zn#ss=EVyeF#OvieQhk?*t)5wOWYp+pzP6}{=s^966eBOSjkK2DgVn-7r#@M?H>R0c zdD)iGkmX5R&Jwq-QO&{W^C9VC*}WgOQrNd(x8OSsOlNEP9nB&fAGkzJW1~<$xq|MT zxC^4F4Tbt|K8gd2X!S;fB?_!(_K)2g?&@^l1kED&;l-NmyLqOtucshAK2>uf*3)z4 zWhpd#vn3KF47$~hR`5LhVsq;cUw!c}zxwF^_}k6jeEI*JzA;jy=XFO0yumcT6#fQV zs}HAmH@}G6#=#qQjPIyIauKo8V`izyBR(q^>^*eNe9L1Zw(?V@ncgDKZSg9s^s0q|MdZ#<`gg`icyZ=#Zw*Z7Wg&= zuDL~>ZRKnz^1<+H5zD@p(hv(S6tV7o@i%t$3(%>F^ zh6ST_@kO&$|M&p(Mzj#$R9oG8=n#Lt=4j&G(iJ)#lc1j}Ug%yCNiRiinDjHvHbCKp zkrzn^L&sJ>>hb_19=+D6Q>0i?W6u<3=S0;7HL>XVvqQ+Q z!M7W8j_x$=ck4)prJX7=SkCIysLXTyb$?C2SFxcTSBF+(K8v(^HzRMU@$fI{9|*FxXxCQken*~%-*u!O0DYq0 zO*FQzZ#FM3o}Qn4w^}_qJ9&IA{8dv5bh4ne^r|_O;P~qCYWLHp8^8YIPk;aM?|$^J z@{0WB*y&8VnuNt}dfd&)>F0 z>Y?QdeA6C*K~-I|AsEXR{SW?f_s$&PIwTUrK8ZfzNqSqhfj;-m!KoUtQJxTcl`Z_S z$>UO~vVQS$z2{EHaz+*z&W;r0QBge&7RGXY=_~>WB9Emi1;H9cLRjYgb-c-rXxM$_ zLvG6^bZ(^G#o_71K}OgbN09y;gV}SRsq>?7m2Gk?Os2Q9KYs=9GukDOfp{{w{Ee!T zvnLn7M69rs7`?OYwBEOo{V)Z6IQ@(3g8~An1kHu%uJu9sbe!Ia1t-G_&vWDN0OE=r1E%9d z@2sA`I@ReaR_Pj9YQkT&TI{dzx8PeK?gLgg-=AH*IzBu6=9~Qo4=y*iQ~?-U3t50> z;^S?k^BZ9;j}$CPE-Hn$L*z^9vAR1W%Yc<>NBSb_&%)o>LO^6oU+^~`xz-Z)zk&Xx zD|+p}L5<5tJ!M1>+s2+q-Jl+q;Te(%^V>zA<|#c#cnyldWKAg}%;2Av|JaWok~Lm#3i4Y;1z?%Hn5qla~t zPMd5MdThpDWU5$3t|oy6s*DRcH()Jc1oy#?*wu~ETrh{(zuJ1hAl^M}S)QBzH9TOF zR%sZRY|AN`x7Nex=MQd0I;7>&!Zl)7=*y;tlJR$NA|M#G;4OF8TENdd1xd4tl-s;| zbRr5Y{(Digkc_|5Jm-&(S6@99`ThFiU;pxN|NhgT{6bzD^?y3V5M0_OK3-Y-_So;d zQoG-Mt;R1=PzAgCE>Ks0pdd@p=c(eHK4@BIVKlhgh;1uZH|2|{!_|=J2ye$7h!JG# z@J&`u$tNfu_$-XAcsu3`Tko&$@0*o*+2U~>R#1jneXqcc^Bjr~&e@OC+RdEX-!ydz z%0J-i%oxEqugA+Vb~1Fo<<^?DN^Y|`thz;G3G`$OyJU;!ygkVYe>DEu6wf>IcCcE| zP}fOG7O!&Xu!ZXLEQLYjNQsv_nU>^R!i$R?cR`rkdhr;5zXNTx1H#rXxf&@YazPtX zv1SwP($|(-g@S zg(Rxm#FpxmwLmZvk+OB^EuoAWh+O#Y!X2s+9fsR8ly!i}ubY=_@Tv_YiRokQKX|hD z)#Iy8@n5IV8+4-jN=}nYECr(WD(l^A-2}C_D=#BR?vn(wC*f8d>|)B1bhNAbR@$I2 z5ubKsoYi)ackV0l%QQ)CGYP_zEF?r_)g*0fXqBSLePw#1x30uh7JJ#l@rah5nO0n- z+Cz;rYa6Vx@w6UDlC^-K9MN)gyEnp22R{w!rSgX2yml;${}%1N-rm33+Pi*spmw`? zsNcKtq~{+M{%#42RS2ufLCbwDfPT4qbia4#=EcOlp12y80H^^YCZiHdK!@ofb=KaY z)`^fZ)M3&IaYj{rrx>N4K_J7%EjnuU{R&|{&MU7tOz{1B%hRlx!ol>jWH4}J`uq;t z6^3@!N;?^xo)-uP%@buCXXy#Yes*RrnY7dJY#7(ju57W`O^4H7Zkd^)UNRfS>Z=f; zNtXYNKfE($t({sI`CigFUWoAV>CrcbCl60gzdIuf`fSt75lYkA^`|*X-yZCL{`BF$ zfByS__}Rz*?H{*3`$k@-fxqc+Vmi@F>3@3fe(&BbmBdQ_V6mis+8XirL>AC6HNwNA z`2~KV?P&dbBDqr?OMG0y;#RGl8qrZFlt8wo?MR|tVc_pOdeUBkYItbAu@p(HjJEp*|y?3fZYvR0iT7;j%2Ojsm30V*s`Oo ztb(y$|IMi8no2@YDuN!#c10}4*_LRF0!;t#w(+-2#E8FTe@Kc+x{@dqG!MSdTPz=R z6m*6`HHJ!dVw0~S@A8f2uT~aj=_R{~gt-DlL7wyy3{EswKrpaa_9x?B0SIz%8d-c`ise8NkT|@oOC0vnK+^DzSbB-MGb)7mQbg$Lc__~NYZh`eOzGibLuX|LCg~8cuPA8yU(AT9e#bV`u0Q+EDEefEsm(ZbFE`? z^!4u6A0K`3(?9<7KmX`AKl-W6tsjoBot_sTehvQ4i_l1R$5s^!d;A2!qpl#f1lCda zf9%6}jih-JM6mN2LsVBQgUS>b{I#>XEXhbvXJOf_#nAo#=z7m4Nvs~fVPyrw`Ktd4!K>{qT6{?V>b*0V>tz|~)tjwxHK~(`DyJu~F-5znD zhev^JcFoN*(!;~Wd-vRP)dT+Ol`%vP0VsPXUFz6Q$h0CaDTjL5J4#TMr7cmD1 z`ZcXO4VdEq7DKl^pvM(?XdWAB#4$e zP+1N$FgG9D1$-pB3J6v{^Dx}YbRa898zNsTN+R59zNqm&~>p7!h6aIz>D(9-B)4;`rMhX zhwEHz0fUE5Bo>t?-!DB6`B~=YIk9D^m)Oc#Bd-+BAYs~}hbU$i4oDWMW}NKaII}wV zg-1@zH`Q^diATcgF_`L2C59`kmtQC`W%RM3a6Srus(YK#**G${_S}5 z+A5Y|$=O85jyheWBl%|2NT7DI7ca zTNw(2r9#ksOT8uS56?wl%`tPi*%23Z-z^&|@=Ya6gSIOcc7gbgFvX-66m1!&Yd*7@ z5agu|`0HK}k-d{yj_ zeq;m@7F8E)e|6p~{-E2Z=t}bFX@oTl8GJp_vdMF3?1Nd&C?3dD^+qv9^Y*p%+I2LSZc7m4q@zJ@>OpYVy1Trmj^KhPAL@%?;;8k=xCwL%e#p8` z(3!1MLlm>O7Rbvf8Tui5cPf?|R~0NcTnaF9 ztcYR;X{KcOi($zjs_Y9)RY!cX$OyeU!q&ugSsIKJV$1ddXnv*tCQAVH-;)s;8p=vd zFo#PuyG%22oNsJg3lw&=mkt@OB;w~5w09gMFm%S)%Pb$CKh2|Yr;B(>%3JW|c?|gY z)PRl|7ap1E7WgY3OL2vPzgwuOcLX^NKP5Eg5GSr&XlsOxei!mW15F_Q#4Jw<$;s8M zYuUaaF$u*NTZ#C^TdIp9Hf4^MW z0}4Bm8#PLu2PnN~u@ZJvy99V8;Z;~URtnB_&BB=<4|l&D>^$yo!!0(PuQYEjtSvWO zUlV*VW*=vYpO|y5GD?_XlhM#IT!-p_urM>+KL^`-|A(b6qE?F2E6*Aura4wz`e2wl}ivK4wbFpa$k5p^jT!KVV1-ZH&CuZ@w=)tE9fT zW_@N1$4#ZuDnOgnX2QyVGr)%tFjEfHCsAFztOp2J{#?U^t9<@ z@b%jk@@NFNf`&;9OBC4IWwAAqhSq|PN>@AfmJ`~Tn(}uFs57O_o09J|vqP*2H%e=R z`Ce~kIG7tD1iZ2|Vk64($||;4ZDGDP^V#(qM^2so;Lj)D|7f#Veca_@AH1n&*sl3t z%#JM3jEBG{;5+qmu;wxHM(m?Gf-4561q#DILy^}A-BuG^!VG2}h36oqVI~<1z$g9| zTtO~^3_7gcZ$S)-NR%zu0IBC~wS_Cay2W_(VyC@`)6gwiw zyiQ}cW6J4sqjrSa$Hxa&g88nSpFT6fa~3jf4;lbyLGQjmRq89b>{t{D*05RXN~o_k zDxIu?!;!TVUt{GvZVF<|HODt=a6DT7O4$+iV=Lo2(+?Qx!HdOzC+#{HRuHd^w&rNw z;uAXV67xko7nr-vhSJJHOn&JMf&+PT-8Nj$%b=}3M94T*CvASwQvq)eEix$h69UHS zrGB~XJyL@77w!at-d4eywRWNL*RfzTU#wqG@p{37O|DSTS?k3m03-%V)ipY!bTr_!Ib- z`ajzj?U@^BXiQWNmFtf%XgPF^x#GfURC8%%Ui|rW}&oQSlp`= zAJmo})|dAhwfi-oa6?Jd>>iKzC7~?}cMu@h;!8YkOq2cZ8-M~=V$htK}}$#%8&2*Vr61G$xqGEl?K z!s>=+EHF+;gm)qJLQd)z~SZ$U2)u+)dFisU~@AuAv@8{|21DmA7*tFr^7ZzCXk&M!1?QMnO~d98g2c# z*`-l9@k)YnRu7f&P2*rPO<|yt7gO9XC}*(~c7?nykfe?(_BRT>kYWeDh#~NcT2_ll z9|a?5uT@$rm3k(t)eZ!?Lu=(ewvdf#UqT&)<01GHwgj*kf;*D6kTv|o>sEPSUWdqF zP8uoRJN`5_c@!Cojlb}+($U)8c{F@rs+$m5DO>ulBgKjTGGfaxaq1q4Kf`6+VU4ha zJ*6LaHz7u3dyGMT!Cvu0l))G1ONq!bij{CGfGqN;ArDt}QHz8)FXZunB})l;cLn!5 zfMAec2u5<_=5q5^2`#W_jxyLpNtQ5Y4frOqosF~3Yl@5}mJLGG{!w{o@m+_ulc$|l zcX+L-Pt0PRZS}C-%fzhKt0N)23%YGY_SX6V7A&wYdDZC54N#8XCar zDTf^-GfP+njG;>@4338Oo4f zQRSMu1ZOcXmO-hP0!=J*-qVUU)??P3Ah~kBNg<2MVfz%xQ_P)_DwsG&&$#i$9 zgV}Jd)tl{r1xE>z@}E}jo3bFum=}0J!ftD z6MsF;=x|{{_4$cm8(^o0ld&!LP4yJOvl68xwe%DV6wZ$yozQRpnU}&pQ`}ZBWqixV zM_+@hyfc=kuZ+F-@l z$nYb+WDpYsQyybrC<3VwF^T$LcAE7sX4r)9rmM1DZQFnmkQmjX+lDY(u!#|)IVu4T zqFcbN$v%(unB*jvuE4xTb2fr$xorW?t{2+tOP#gF_Ub~5GT*U`=GvqA`pR5wWuXcg zE*Dn{rNMHszf|ZiFAWOI!{z#@*aY~l6xc@>f~9=caKdsM4JELHKVg^RfVI*Py=$za z>?eoHvS%9?3|DrVwi7+TZRzDrXv`qKR(WWXex^+h2nL5W788y!{#AJgbxk;HW8J-t z@Fyp<1?<9;Ooy|(F$<~9k%K9xHGcss_De?&6)<=3#TUJ=Adk9VQq-xEZs#uhctG5# z!`rND7Li@C<1c$#U-eO%v0Aw@*ScMXRSy$a68OtoQ^i(bllUp_2v=fTBC{yCBh1Cu zp|S|LnHYJ+c8$CO^bs(T(&>$*J=lY&{Z2w&tkGsLgTrky1mA{k+@R~PKHRH! zcWa%!dViN8SNVifLsvSZ)cMw=sLCezi2kHnW2I1M|v_Tw#Gkwl>3p>ArX`iK%HPcZL2IbogMl*_~Zj&v@T(jMH+BcG>@FS6?z+}iYPGYc!(=iITc z*q8`~vtF#N7nawSm*8YuUn;CGRu!6Q_4bA27 z5ZIKPwo3B4$GR0zL}_fuzo8lJQIu!F84__$DK*;px@NBcTnPfTvCg2e(pz)qj0kH- zg}tsV5*9lytc2(}91lGs6pOQ|TObu6 zn@*EBGmOjPBO@=d_(5kVSp^6N=MDTN7AJdNPG(`NqtdnnZ27q6%k9GQmVdInJ7B&k zG8KVEb9~}&_`oBTMZXvzLL3p_k7v@1BxHcE@z*;{1);2{pB4L0?$l6o)Cjg)>~D>c zx3WO%fA8F%+o%87JNrTV)O)pKZx#=}Qat=}<=AV*kfPFTZdpqV`oCEn@2u)ImataQ9Ib}qBX7HrI>92*L4tWPGtYxc@M6%!KQRmys2 zfL8FZqxf>=)3&A!4x7AN_|uw5FS=0>)av z$iIRYnu`)H_6nC5ZXBAL`t#{SZ+>+1$oa1^XB%t@&bEiPZDg9dm;wy}xT6{(`_s(L z12IN?cPW}wF^w%9jf5`wkQ3G3M^9ET!;Qu4g;Y_*3c+w$uk`9j`6`P_I2 zJ5~{V;~=&IK6hqz?7+@jh72+`iXGc;PNB(f(;aCnMeI064xahMHK-)YpV+SgOP<3r zm(So4mA@+Yqcb}Dn$tVw*baOId2dkFr|@OeO4o4dJMz6aM;iExAg?9MiOP18 z9Y}-F=p4K_Jl>p=fvWOT8Fr1rfKmLJ;V*F~m&t6e4z5a5J2Ccw5Q^j&!Scqph*RWtwy#9QnH&=(OE;UDs4IjBO zkHZSxyUP$XS%n9+dTpV7r!<(U^ry?c=_*dryVrtrr~3%g^yY?0R>WkXvAWcP43}z< zQK12W*9bByb}8i{-UW~sYmm`|U~oJ<{IGefyV862$dN^tG8Qn@k;ElY`$RZVwGa}w zDd$R@*Xtj#Pa`CPwrbnxPp~!HHD7l8*SyvE>-;NoUau{fP7?jEwDKU=KQjIX3s(FW z7eHi}V`{fLDqo)M->uv)jaNp)K=lO2*0UQkG^}6DXnEQJ!6w2u4@LNE1dMI+Ip-CH zH3_D(9NN*%LliZ-&YB|w;~=3Qq83TH(DrGEOD*T1)Z>($SyY>ZRpI0lc=ecj4MrB2i5^GkUXV=>@31m{tf8l(FlNqU2%(g0a zDW%pXf+^+Vda1Bknq6DIw{rVzcsikWDQ5+@~2B#-pix1*Q~jdn_y)7dcg-33~i z>qXuastHIcAz^0*ixi$Z^H2!rURV>}U%rv$-NTt?Nv)z6V@M z=F2z_g0t(*A;dXX0AGW(K!nvsKC{u*7;1bs+GZDk%dkH-`^D_%9Onw(a$W7^UDH(S zImi``-^}-0=z@4+DJ}|J!GuvDZ)GrF7%mi77t55T+S+n`6a9z5CYn8&^z6|G-Gyok z=t-y-C(j^+htf#wBdWVvY{0n(<;L$8T5$Dj7CIZtZF{pv%PmO<^)<*yY~3qjefjQe zWw2Q7FI0MSRqS`?tB}q@1JYe=_Lf@x<@TV^K`Gx#rN3J3K_cl3nl0sZ`2Bi#_`Of? z;^f6840SdIpsH6@MAAIhIZ^gG1I5T9#ar`SIC!pdrBX%~Bf`$YaxHK|VXcm&<%4k^ z`bhg5=(poz68=6!nAlNZd%Q6jF|9V1yR~Zz@Gvl!trhJb3lF*YU>+sS z5WU6c&;x0vYmN)KFy)c-1;I{O-H|j!)@iK*;KPqvKSVB;{AQDVNX7V!zDj}Gov`E=nUfwPgwq~!a+&VwJ^l{_NJB<^+YoC4x z0@_(^l*d7_dfPbXX0{IZ-e>!lvW?~H~@WKAPkzEHE0TtBo4Z8?FzYTThU-M~*( z^}t#rYhYvFBwed~G zyQpnomDmTh&4=|k%uF`vYU^`dWaL6)X||x)I5RIeZ*Mr>q?)GP${M0$?K@SV@Nl8O zy1ar$9PQ~&>DuD-xm(wde766M5B~hxJ5U|4oS`?Y^V2=DpFqA3kLMKT7of@`KjF^U z!cU*rlb{ffxNQm(M>9!G*g)76)y+t8nwvynTgcV`##WpsBevc#{$dMHwSPs%fT0jD zN6dxTk%Fx;h&x)%mP#_>zhe)%-%;`~@KotXnOElj;)27y49B$+t9r1N3c zTK-n}YNA$`S7M|So#cy#_=0`4h@3)#&&q!Ad@=T=J7S7HX1esh_EX+qX3b(|;VZ^& zfbT|YCGc1EN0D|S9+z0Kc`M_y)*;@;Rq1ST4GR0>PMLL}2p4+dp+)T$`tz`>Z40tm zYeP``20#Ro8cczS5R}5LE#pJ5SNz%7D7GLB%Sw<^g~c@25CvAX1=6KpC*YFcuVzR~ zP5iLcPxC|4YID*iTWy4|n@;aowLJ356y3%%q07AmlLi5CW@{rvj_SPC<4DgOttPG=Uu7i}5y;vmRc+#yy~AcswC7oU&wuLQ4A{7ZO6g0xbjwZ)%EtYU(Y zbE~|?hd^z!Y2!7^m-;(j_a7h<1mvw>UxbUEsG&58l@(FUs>VTobmuaEh$#&9K6q-M2N@RbbZF;D#~_N3YK9KHpVG$O*K z^v=hv`w*PjCWI%if%4gzYinP8!8U{0x4q(K$N0K68_%XY3jV9wA9qj(BOue6%zq~H z75{~8NdZ=N)|A+RV(?xg}Uu6^oymaactsWTc(r zTy!{!QHh*ZvLF^GY;p{gd@w5R-TEfEo$Xx8T+n8Yz%Ne2=>K2_#CNe*rT9*9CD~>8 zezBEabqtPZo@x)-fy(%c;Y1IrXS&XBRg5Qt2!8p>99(Uyi`~&er$60p-mETPn7MiQ z($ptMKY8i*%TqVLM2G~mmnnI${h||>D=|~r6*Eb!L-=qA@v;>Uv#kZ8%i1V=2C>`m4ajVF(N2Vi6X{+ivG-870 zS2)#u8qIk<&Z9a&v(@LZEdkx06X9b6UFNdv5%%lIu#(hX76@B>&sJZKLH>wOuEZTy;J!XYSJRVh;kPL)ENO!bU z9?mVT%rCDkl~6-5Tq=(W)zwl5qO@dYh9Kq1wMAwG0CD!^tJf(=A*=VCH-Z?J1^FcY z1Q)CdM~AfdlyGf^X>V;Iz!{jXnJFk98}Ye8O&0+mo<0qE&$98u9o4kLv&zs( zL={~cIYt{ z9eA;_@A>k+UzGmxT>0Q{>qlP={5|wi;m}JpGR~}e0Y!Yc zlrYNooQ0*RrE{WE%|*-|OcPiOR6cU_L6b=6Gc>P^D4u<`377fG)Qc&zHgj(0px9Q$ zHh?iZ(!8Kg!(s+R^_8u*ko?8~AdDq=gc7!jkjm#Ay^$kfSUG;tCpkRZbRDbPJX$^9 zuWZI$3m1^l0VjEyr(#~>S@FKqO~^KY?>*b{A?OK!j3YCICVjXLHj9_$uxd71Jp+Q< zcPf>ui?e5MT;6|n{~Pb`d;L93sIU7QyXC>d8lEtWnJ6@lUy)^-8HXD4)Es$1;GOV-@I!{6SK1Q68zX2nMnU?8#_)IeF z9)IYSXoQ=AfWl>rnar9En^U%Ee3-_Su#)%gYx7eLOCaZ4!0(QKQC^oULgXDNgfxom zwFus=5sMTlhJ%4(D0PQyH@md%Oj;vv5HF5dJ&-icNolhy&+f&R>Ns%2Ni@NK369Ax zZ74TsCH|_mm!OYuk8_V5Adc!J#(V)8L=AZf{3|>-rF-$@aWJ6SmVntPD|5YKnj=m2 zcde9L!%`EyUmz<>&A~#whsf+gWmGJ$mMRb|sRQQdT161pguE>5bzdOhIO~r>R3DU! zUS;H$kg9x6bB(kejbR#TCuGXemjK869`pE%yQ9dS$vhQ$aZKC5yMTMVKU;y=S>3o- zYRy#IGc5;>mFU+YUv~rFwNsjY zX?e4e`HY&TuX{*?TyNf-uUxqY*94FS>A^4{J;y!kgNL?OGR6vTFbiPS7S3YQc_Pg5Y z*J~%S6t_xABi=l^v4gWtS%>-hPntGk#}7#K)#bhI?=nK&)^xW_*R zJglG$U$nz3j88ll+@n?deqtFPa?%H!#g&Z`$C@R!ApDmspN z>L)DL?DOOwtUpOJn(q3wZQ*%yKc6gqO7F3H5%>&n=hESS!!bWve<2 z9!DEFaq0u)iNU{Azg?=`DmL#_JJXHcYzq|!$O;=Swvf05|2e$wkmwCqSw}lVh57f?d+d%wuX$wfU$(v4`lgRX4c05yW~(*Syjgaha~-8dm>Mg~NOdTz7mB-ud!6g& zo1cGNKmSqZ+}q95Z?#Up(Kv}zt2dB&h2U@b@XLh*zg^z{V)?);m4mNV54}-7@EZPH z{LAyxe|m1_&%Y=fe~Ydbb3(t7IM{C_@Kk-S^5^us%eH=58aXt3>=Sl} zoa7DT{o>5f(s|^?6Y#u_sV%2;=G8IQ`x-C?!T@U{=b1^i3Ca}(7J?_#BlpY@6T={} zS~^q%XH)E$FQvW$r;`EKBhcsZCllusJH~DDzFB|x{ zg2{SAT-XSkZ8M*9pl$GCb73?MShb%G6;cp;{%!rb4+-A5-nDEkX@IO7iOpKgN0_Ee zEW|$B8k7gf?}A6VcXz3MYrc1HZZNX|H#=msL;>|+QB?%s)CsVLd5Sfgb|KCi0p?<0 z8+pBN%n|_6;RaxGA4{y}0NM-EN?IpWGVsL~K-^n@G+6&)xDgE3%Ot65hcl6kjDe3{ z5m>A_qs%}nxS%nT0lxK{3y1|^wt4_#kb{KUPYB$Ea*#`wR&?eqPqhrp&%}hy#2(<_ zq=}y<_>NVC0G%j>#$8*HVjHNc<8d%1lxKSA7L#8^$IOgyJTDJDh2-7`T*4|)iW>Wt zxFd&=*I4gPlJ-zpW8lW6BB&3YTO#*(__y;pSAGuh1k~Ny%-5O?2%h2ZUS$YDIq>}| z7WPHVLle|F~8-tzU<gl&? zXSjoGtICmAmiNC<+W*_~ftM-=U&and@$x6XT>Ru;ra%0r`F%g@o`0it<>27f@&4TH z(b6;~=2{s)O3OAyftkbaA_(XKo4b>0A^5LeN>WfwLJ~t!uvb+&9kE(z{=iBxri}k$ z$B3LbCebj!RJlw^_lB=ne{v>TH~L6x%lOs=G#FnpxIxG`n&Ci&8sjv88J>f=ZiE5^ zTdiR`iLm<7nET?$uB>!sI6RqZ--J_H6(m1A^U8vwDaT{A(CyBmf}`5GS1ezhxpVU3 zg+CqL_v-umUi*-V_wWb_k35(SNdaLPHVGx^S=AH9V8&bvY!!R;R}#X;z54)cjZo=4 z!eSwKiX5prSm*Im^(3{o4f8acxnbd`FSJ{N7=?k?4pnE}OU4(k>&FGak2KEtSG3*P zZsy=PvmYHZ@uD(!%z}SxSp`ySNZ{mndPv8GASlMTSgI@oz!~HCf*M@rfF7qRcEZ}R zx4~N_;B7bxk8k0*L8*Q5} z23e||iH0!f45O~d?!@R!EuYgk8+gqoQ5pVnB|!)~R}|6L0)40&@S8c4$~`U|+r@UR zjCD>C6z{-9h|5mpk2vV>HFye#cH8i*A7vjhd zj@Dd-j~Yc!_3N{}TZ@A`%fox~tFsJwuPzqNh(~2~q8qrN4C|f2DQ6vJtzrbhETH8n z3^GR8gPQiq=5~7<_q*$v;Lj{&3;zYSy21$bHlL}5e`RrDU@=aJGYgclP7~WV5rTV& zK5slmzGb;zzcJIfy9m~XXr_Jgyjd|#z+B2l8do$pS1OONDFc%odlKhmtoDmmPC9CE zXHWG+cMB#{CDgWG+DTkA05aUq{8ZK?p$B9q*dOjLVa_xo1gjkuHB7AOKum{nlSZ}i z%+!%Mo8rp|DASkTUt?DgSsi(+8~bpH*c%@jFKISTb5;B9RXe*CbY`wYcIwq__KK_s zf5m^9A_Zx!7n<8k&7I}e#@tlv>aqH{cWYSSyf${N3`>eO^Jb8&j)!)AL7q3_Tkm&PaT6{+yWQon!?C+gfN*3 z_LaC;ew&yM@i#2bTtzVAgIJ|EX=!>X<9Ok0{yg6If3XjgHxhv>sd@q{CbzA(p%`_W z5qBe*jjCtfi^wtt1I~~wub{DE{KeAh+3M<%<3ZzqJCUiJp^B6T&PJSmW*VxP<#u6-_Jaay2ypV zP&77~t^fGR;EORtlO2osLKCIIrI*tDRnAGyw~q%7fOfuw@i+Og&Dpt4b`jruoKx)H zIy0=G3BJ$Y_2bQBYr5v_gxt}Ka0UXmzQ^F#Y0+&w3R_u#+Ki4l=7>LY_?wl9tqR1| z8m>HmkMYaZFTT6~-S@lSCvNLN3A2~OQ>5%E_qM`vMG*U+jrcKZ6xC&8XLP=btY+e- zJx)f|muBG>lFbk}8=i;lqJF#DeYi(sa(p9QaEwV=0yr5eJ!!^H;Tyb$9r zc=1@&Vb30Bf9O7YPz6>9I2+dI0_ZGk@S1u0uVOh}&RT3z_5`YmMm ztDVt(I)UL&M)&ElWt*lL=U0pe29qxX8KC>8{d2 z6t6c^Y~L*)WverTIml9`!BWF3pD3J3 z+7y4ODG+`T@WyMuH`EPm(dwpPVTprh3qWv#vnnaB@GDIa0jbA{h51Wcy~ z#5J2qTMDyswMc#f@n2Q!8h=waNM|(%Mhh;9yynQklyh-avmxLwkvF)lYm~%ub4S{K z&d`}o#fU7B3MEl$jcjQV767q39uRGP&7SYJJsbf9Kog^LSz*3MXHTUJA~HB<^5DUC zmfG#vDzZct&))?3J^jI_umAIN(`PO{8Qsr6Jq#EO8)y|fUaVela$keHQiPvD)LR2j zLotoU{FwCDjcC4?7>&WHHExrA_$+Sbs`WFTYuB{MFMGNs>nIg(W7-M`#<` z@<(@vpB4!vubIe;PID(NTYviDjGvEm?=u>nhVBIN<|2>6Ut0oxGTHmZ4%V7^y8h_9 z`(HiTe*E+a_7=@t@s=K;STU)HJp*~lFL|M4eSZ~Pi zE_09wqsq6$SDq1fyTxkM5Tp%{MO~T!uM~X_oT3PGeNK3Nv*wV-UqAWGT*t)m%!>ai z0o7QHV6MH;;aDcCwXE_5IS3e*8zaBYmXA>+zlS{)9@}Yju|u5)j5WDZm5>qL>dY3K zw+av>L-uDHXc&U%FtVoMo`NQnGNfr_iz{3)(kJ!>0V@##_Lv7`5IESTlHpyY7L)cm z>gxC=Jgd=~Lp?h9XY{lID`PQ&}#1Gg8EM6Lyme(?v`#u}&p~b#&DhNdC8i z<14q>pW9>LEr>8)0k)ZlK643X*exo;VrMN2elD{jLZLYUPi`nE?TP8HPPyR{&v7?@ zF}|*f6>CDX45eUk`7>7gR(cRLykf0-1-;6fKw&tafxM9IqNTB21QxT~-zE5 zg+CB~PrZfuqw4Y3*l^?c%jFZVpz^nL^o8=FUjlH=e1X4UzQz5&LOD|T@GF%guNIEI zT0z$I)Q6p`e{S8J8qQotYgQyZu9Ru$g6d)=?hxVcT5S!2xC_aQ6I1hY!B~&fEX`tKq`*ckA@(YpBQqk!|l^%%gem{fW5k*pqB4$lDl) zKSI^?9B8Ga_cHcin6dgDWfndSU7#@A0dd`j*t!a1+}4AJaXKNxJ-8QKgRKat#K1Vc zFZfzT5?MrpeFz?A_VuJcBVP+|Cuv$R`mlBR@ag=gSIQOt&0uI8d0mnh@f+ zJ3_0IK-S4N16-qRX0FpLk=NSqBC0=<8l*B*T-bBU9%gSb=J{a?Jb6b0)iZ@zIvxY| zO=U-B$9P7#FOYErDi_XqwB%~unrq&e>)c*M;uc$YEu#BJ7fX_1VKP4lN896^)-(1( zPexIJ!pN0^Llqz*$cw}$0I~7cc~+#r#2uiv1EBm99|F@2zJ_DbVb(4&aLkNo299B( zBbrdUauPBh?kg1=>22tIr*E#phLFM6;728NH7BM_#tPibA#4Prae0Bla!uz=g7{tGyTLcJ0Yux`F#~VS0V8b9^{fXn z6vi*&Ct~8~sYKdO>WGK{XFDc%7XNbi!xR`^{X7faE;k`NrM4j$J3Ga~7CNuaT<%^y z-97(t<@h_bBQK%aw|eOL3b6Ia3zefU){eYbJ^TXxEbjYdao=;L{l6$5_%)=s4~M;i zTBPE^mlh8F7Wj+w>E_iBE7#7q?_OD1#5`riUo!kKxfnu9UTCa+hVlnCn6*zvfhzJx z-nBTd+?xbnI{NfU`dq9^k0SpZj6|{3c(w-4e>X59I4rwQxB(Qww6kq|H^Rm?hpql@ z@^7;phjBb!lOm^HV-c^FkzXcK0!a~h^s)=6>Xmz0pFF7VTCh$8B9)K{6rPAtI#T)U z)>mi_4*c!S)6yAvkun z(%8fAnHbM7tU-Bee^iI-8KJ2W(AxC08{IHvT$y0{9D%vj5p$S!_iTwdj~T-lUi*VOq!w;3lWm zHdt^js~Pdu%<&5jiy$wm2)WqMVtq7U1@c0Syv{Fj4*@4raZ#aK2X1PL%?TNM>39DV?LAsmDAJFLT zfabq+QsKR0c*jV{c~OA5c2$iXRxvBpLdSE#dMW3J!T6EMhc)2(?Ym2rYtyAG(^%9V zPH7YuBVRh`a1p)&JafEg$LKkLZI_018g+0$H!L3j{KXn!?3WILNt%iDW-Oeo4$|(R zg_LHXtx}J$W%d!QS>!@fssyg`Rx`J5>M~e*>?DX!w_4n#|pTKM9E9yeeGE8eXvrR)S{sd3KKre_{;P&jq~= zgjK#4jCMRC|2H8uOP2fzu2U`oUO=d?YX{u_s@&WvHL$AS=FofBGV%h9s+fFV27bufW{K;peLdepTE5i^^YqUi=hQz`v{=dZm8& z)$*aYN(bL8AAYTH;ANB`RnENCzOfHdx_Y5b#=4af>>j0 zRyQC~7f{>TMrwpAK0-?;){mzY`ZIkZJuLm7IIjSg{^ZDQa3t6N71y(VhGydkkD4PM zZ=KB0e2UKFzhe*&jw(13u;%2V}dqVh**yMd1DeG4jE@#y}? z-gkf9{^q+)$A;Hv}?uQ2rVX-+HjnzbHi&AmN3E<&W1F_^UNggwzZlt8r+?lszdt9>Vs{ zX(x{^06L+hi-NMjN~zbJtu9?#tlTNr@1Tg7ercg_8+H!v7Z;ED3!@R=)wIglx1Kwx zCSBfOqMLEn`0H-ZuKZ2->+X~+@&!8v5x(Esf$a9T@Anu}W+^esh5Ag;f-m};;WoN} z7BAmH*Du7LKu!^L6$FphS4}gW)^FfZc!r?_U_2HiT1Xw8H`p=@tXY&fYI0!)WM9O0?Ptyz08wIWePXSJtzT1eaAkS3 zR(*=_D$~-v4sgFxLE~r|Eb2@*hB|73b79>It$$QifqZ2k7EJq`34vuT!Jsat81`t? z%p!uq(%RC(_QJK{?elBbjgpri-bF_c^mm34m11zV>Rmg*S6aZbD%I&-M- z*?R#eWbDYt{6 z;IHp1ZP|A^`d&s4E~2p66FmHATHBhgT|Ij7#0UHLz4_q_|Me%=kDvSN&O;$DM~RmAx z{x{YyJQ%J#8m+EW%eC3*#Va>TH}4`$jOHZB+JdJVZ7}gX@d_AiSJi`*UaY|4-^z3j zd7t7n!C`ys_G}ae`F%!!ye7qTL@Pk-4juqphm{=&#{SoX9h6QW{)?riFw5YuHOKTU zw$CcfUlY%8aM2#d425V7LExq&{Kh1PvZBK&jfEa0p8nV-lw=1mTn3DGoDdTnzhxzW zEz37hliOm+t@~q`ahrqt7lFSbt@L2yzmUj`Z)(bl9wzUFvj19 z?EytzZfHE~=S=m*PU0pEQ8l_~%Bt3I=nmJWq*$=l|7Aqj)d8-*Lu)u>%WuxjtlYUZ zx^}ob^%wMft{#1(apYx4^}uf+?IX{3j=s=3{HyA|pDlm<&x;@Z_oYvM4*zrM&`T%{ zuAg}e9ifXSUu%8-Zu{C_AeC!pmoJ{^%-q|mqV)}Xy4uU+C>6As83g!6HnyQEleQ-= z7~O!dlPZ+T9FqJq6JO!i!y`&Rt*Iiu&f^2PpN&PP5#@Jn@5&f|o!1n3T&;E0?9^AuK%lElt@|W&9|$! zN>`3tJo(4{?>+z8&;QH+UAuSp>DDe+ViF$@@2e|tWobM!kTAN^2qa#oI~D%Q^Oju( zE&xvtSD1W0@;Ir(8>)KMnUl_WOUz=&wxU8G3>OQp>}B|C&g+SZ*-El3{PkROpP5M% zwgt-wopmOjv*8$9XlYnbeE7+J^FAy+t|+JWERj7f4@tys((++!tEk)wqiV*gLMT zU#TOOCtxgPUXBPKB>bI>|Jq5V);D!j#_jIbA5difd(L{sg>!~2<515sgg_;>r|-7z z+-=;t-B;mow8$=@cSqazj!WX}69otjU^G{Zj42aJG0IRiQm}--8UF?IMW!FxPUg`T zgjcmoSbdm(-4Xv)XKY*vUP|aU<*rx*1Ahq|!idl?V!jVrC^qby=?A`x^HNF5ABdrX z6OP6Gc^WAAZCTuZ!=2(VfDs7J$AaM#Mu{$(Z>%mYZ_Lh)?%rL!u^&A&YR6xxpLn%# z^o9Bn_MEKj|7G*Y3vfZ#kNm81@F%7H|9#<4|G4<)|61DrF9-@FG+aCRGP1LPzwN2_ zP<+(7`f2gXiRDYj0)LerX*OZw+-|+#zp5z+70M*%$a97+HJn+gW`73jlqNR{k8eLc zilp`p!GwSxM}hsc!y~F*OvYm45g`?ZXo@O&hu07l$Kz)XuS@3kjJYO#PUtk?CJeQA zJei<9mRH8u(^h}0O>Mo^W zZyVCj8JbFY&JP~6NP&sPv1SfG?hnX+q{Rtd;uPy=YyUns-G zeb^g(HCp?2WB2LSgYUK;f4}`DV!o0m8UFrl=ieX+7gV$>G|5~g*s+fY7DXoR9qnU` z(MDL;^m%Iw1ltUe1BGQa0LWNBTSYa5nnj?22GDka$cyc05k2v$DDq`tQDL}VTv?%W>Fq$kM^LodBlda-)hv_$P1Rq<5eHF=dq}3Vb$)1t_M65F#Erwuhp_inZ zh_==k9xc}4i3+(U4nrLsHk4@~O`g{b9E|VpPGoa zBQ*<8$#xVV7{fa_nv_6vmvT{(Dv5&6#yY|b2g4Vw#ea7n4)z`o&BTNYGt?gMZ+qm>V7524XE;z&bum>0z=HtTA9hNR-ztRvs)kXJaVwOaxi}NhlB~Kw`JP>soBWcUGb7Y%%3} zJsu#to6Sic^i;l-HIR&aPH`V;vtSD0DP^zAz~9KPrhgA%cPUK2ah5$AeXLzQ97Y~kHkM%Etl|0#D$cE9%_0?M}JW{@>59p@Xtzze!6_{xzdqeRgb+;JN{zj)QhDP&zH`<+`9a` z`i=dy>-!cjo-ABB)0>^cn!@O^g$awkHzeJmrj#Y3bwxdQn1sd@2y|X0QzH`PVy_0c z?e+@9V|ds757iSa@&x=~1A@iZq+?xMlmK+3Acbp)L@nLkOk#7v+!L8g z7=f@ra1z5YK5tFe=g!@n`t%ffK0p7D|MIW@@Q;W`e6_lb9dM#10S1k2~)1nbH#p@Q?hZEg4__R%dk(LSZ9a+QRY@|W{}~Tl5FF+KZJxiQOO@Lk(FeY%bp2!N0veSNtbBFPAaIZb1jB^C zb{x@bnu7_37b$R)!UtP~L$wE7_-3^C&FbTC*S>tR{`Gg8-~M&yJIHrCPa)xdmKXYO z_x}n(^)AIR*($1UvTj$Tz=qw-Q)Kf@<7B)>AuuYeX=53N8o6S*MZU6Niks6FF3DHHq`{Cnh}71N8226>ywK zeYA|c=F5O-h_f`u49}lQHsookz#$6St*%*-s`P4ad}`&qQ1} zZ?k0);S7t7xoPf@i%M(!M&m1{MVL1-oNd; zGh+&-uQz3%He$YPgP@!lvj#s0bf;vKE3pY239~Aq-s%u#?SYZQi~@ClNg05QXJz1< z;V%pK5Y|~LjTQ>)3-c>8(<^r_bgrH5T>fMI!uz!|FPBfgSUL6^2s&?64*v?s3q-D+ z_*Lo1&le8-Yw6f8tEXRRoO`MM*(=qvFI6u5zH#Zj`i&Ent4C+gomsy4IYt+|Y^gTT z9;68r*4kmtDna!eYIb66Uy=H_FLl{P0g(zchQwcUf6BSRah~lYydd*+Y&6c6 zGNU*JgeGkJk2SA5dq{zX%QTnWM07T~8F?8r1n>!$(^TR-!W7H&d9r?GQ-#OdmO$o4 zMu3~WXTe|K+{Otr!I^33sv5#b-MS(E_iQ4qQ<0Le3vGDSZ<$EWuoc-|d zCojMG+&}!64_|oc`;Bc_fJ~^6=OlhbK5*|R?LpO%-IoVvZac;)A7d@_2n#DDN$k}= zC88&03VYya#2SCuST&ZnIW3r$?g;Z-)vty(xv9V`mR8nq)DBJx4t^+qK;%8;>DRH-NuLyL$Tl)_31+e-HV7=Z8#wxc_eum`Rx!Wm3FS4WE_SYc6J7 z2-dF*ZKDqz%WNj8n5MQyUZ{dhaE3U!dPovpg}h+C5U-Me<^aeEfpHGyU~X%Nuzko; zr7N#8v@{Vmmp2vriV-_6+8$|SWZ5art-3P$NMvDA&?+-MOdp0+YDk{#=GlF)-B?$G zsERxF!_nQ*p9~Khv*RLDjtO!W`*r;cvrZ{^J_&z$w$P&LBF?r~Nidc&@Qs-wi!P1E z04&ZLTYAG*=D@MDSMNS-_E#1bOINNo@7{*Pa4^4wBy*TUFywGCKw(_7 zE*pA=lU=?tMqrIK_ALWh9LdGjYYuu<1w>BaxwRKJ@K;gbheNc?+5qe#Fb$XZqXAb` z=`SufZqJ}-1(^l}7hSEf#u(%rr$AfbX;=cX2LQ^h>dk>*3jWeL7D-UJ7`Mehd={rw zpf+lO4YEEm>~Jw$BdNDIIUabr20E*44CIeN-zvzX-sp?|DrTY^P5at#m~k^Kc{|o} zc~%d)eaORh=TWEosMUMeM6)~mIl$NPML!2T6J0so-FC{k@;9^D(f|VrGfTu-(7PxH zE_JqwZHU7??v9QUAw2Vy`Ql)9dF9@{-mP1M>qk464|XoS-MjMc@Y0)&GcVSUy-+;x zi^9HNV&%d5iQjh5z1}$eYW2iR)l;uE&%M<+|4#k0w@aUWQk&Xey>bpR|JhXa+ATWe zxUvaTEF8zlZ-K=StPKH)PWAHcT4%;km+r+#0MdCp7)kT&IK0q_3uhke;GXgbrL!rj zo;?3l@2Lf=T{QSJ>m6w(fQ^eLuR@4t3tAJpl zuwh!=Y0#hs;vB`qno%VI*V(i93zIv0;4$M=_27}T1zy5yn z8wkiR3$9Dp-Rp-WRDN3&AKk zAE0wk-4CIQWzeG^9ovMmE*J;nMQ2RYs14>~VaA<$nR**=>}-5vYs@U-ujhq(*W~-i z#V3Kh?wd4Fi#(PjK-;euOPzan3YVse7cVw%-t632Ld};w1Gae##4`0m^YG`;U~1s- zF1@%;zKV#aRwi=YYj}EdojUL>E|xUfd@x*pIOy;7y1PvjM$I+v+#Sp>V9X%sX`I5= z!f6s5hg{g))A;MmtG({)tEp;qh|v8OD0 zB{L(O*sfqoF<+)YCn4^G0$Uu^#ujd8IiHEY!GDe2Ud<5Pw%OZjwjdAM4N3DsySdkC zLGVT%wAv3X?H-mMhQ9(a<%k7q&A~?<;BwDQ87S=L@E%&+M)`yKpd#&=(%mk><3!6A z$U7<^r?HOp7aaFN0AgjfG@70n+_^ive!6@4c>l_Wy~`i;FTL5m@M`N6nr!?GJvWyA z@{`KpU(}ENx_t6Cr4zp{o_ewQ`5Vnke}t5#j+HMS$M`8sO)Xrw-nz4h_H>w}fxo3r zt_DhB=z4Rz(SqzW+g|I+b-tOy2RqiZ_7Zp>+hMNE4kflZf-bThh$kO8T@x_>!xcH=Z)-Z<1deKUAaecx^m;M*mQ1Xfxm;` z5n~nB8w~?(!_7tTl?{N}nXh}B-wrqCKf84PT6)fxNg z0qSg21sWb2ek?LF1F}8`^J4CUeq_!f-(d|VixfYRExJn3Op=v+_O2`YbYD=bz-K5QXd7xtUMbq1030?sIa zwq>F$CVLMwDs1gTJpzS;|0Ze-AT|UC{>qDp7iunpt$9IgT_d3J<2e|5AV#JgHH;zd zw%Zadq^tOvwHz8Ovrt7^8drkUGVrXyd|~mzXLIMyl%}Qz_wH>Ls~d$1TWu@79OjC` z!88s7h`}q_xMpINAFnt-+Kc%jP6|w9E92y4VzL&>g!?;>2b&Pbqmi}SomnhgyooiA zT^Ao(l#3&9eFVn2GGFs$Bd@o%1e{SB)xO<6Ak0XbEOyrQ z_y&^;TFtJF>~TEng)`LVX=Q#y?pY+2tv(*CeKi<;*nns%J9Hr<)??EY7neGn~HfDdTp_ZTuNMq=c9deo;xVkpkrK~ zv1IwP7i)Na6oa#e4LXnnCl`4H|7AG1HBdifRh)&VSD2VIGe&+q!!$>P9Xdw#YUba@ z=t73`4%&~mu_iBCMvw6q{4}eF3VckN75X}z;bGbO8XU)Wg?=8XTSL8f&Pvanj7{TD z+)g6(xIE){FlY?;iMt3CCU^6!PZrDY>%rEOmCakn&K!I9oqeyr_U`k~J@;S!Vfyn6 z-|e8WJiJM67*0H8qHk8-i-(ivxnZT?(Z$o7I9^A34(3tV^EAA0dj7uV@YU#Bo_(H= z@Qm3k#{ssGJ) z`n!$qA>VKQ00H?W|J`{4QSuc6!QS~6T{s-^_c{pf%W8jeR>+KtG5-xWwJ^nT;n)hV zvom1FKHG@+)Bvn(#?F3p%eCPWS`*j$i`Bt$m8%4nDJT+@a5bXjE;i?7Z>tccN!xn{ zP)JErfxi=R@EracontGXGvdFfGR9WKmiQZNR|pIJzX`Fpc;+6%mZ&7L4M3U?bzScOBktEKm1|e#KRa8zG*!8JvwLrDWxk1WFeJj^ zY1`t$V1QU}!1D}o1_c%o(>h|zV05fggcKg;5d=YDm-^aejV(G~fhYP5n4xIWHGS&p zaJKHb<4hZrPDs>$UQxfG!-30Ml}~FAfsDhu>jC8!l)v)NgFVj zXqD(|^A?O73c<%OcSFH~;ugd-e8{l|b-pol4+uo|*bd zrg3Go8e>*MvGJJ9ytUM3vW2iUP;y5o8Rtye3AX@OKg1l_Gts}8ur*9;JY?J>ZpUYc z87i3e@pzT#QsMj$RW?RmaT!~2{}E5Y=hF<2K0{ngW?Hb81I4{Kf*YV%)x1KMoOR|;HA zf#rdc@OCQ0+1XIo0(O(1nQIP4M7Rha8ot$pWFBVFTYxDbHH#6e*RlFiicOnGr?U}% z-R;@Fma$)kTbt`pa9$2cGu zl&U{HyLkTdJI7Ch&U9|ijpm9-O2=Atej`jKuMdzC1Eqofx>%P1{WP>#Vc;*KX^y4Z z8S5sKUY!BWCk@uxvI_^k2 zZOhhkHp5oz;1dZkG;vs04Rf13+g$9)$cr2^?1KwKMcX6&SSu{RjYg3-+iM8E*qcwNMQ&1z-+PQ-u3Wb-37_UT%YsmzRMiEM@( zgEsz#$J;Sq<=Z3A85cHz$nrnqZ18!0xBku4zRwQ5_36hizW>wz^#8p5^OwHceE9VU z{W^GeX{5>9ES~GneGTat|2Oa#uOq%CeG;&xQ~7vfaW){alQ=BRhV1$oWLH1ihYhqG zVdPp8k?RZ%p~~=e+d1hy64@DSLK8f9#C1lNtKj8Jx3+Y44eUC43#B;&f1{TXG1q6# zZxa{hmH2a}XP}c$Dm;N=bT_BM`{C&Fd zBm-YZfxqAQ_J_?UKWu*Y*RAi3!uCH0BWnP2iL-Pa*SlQLNO{rQ&Azjv$$G-eYW)52 zcI7}ns6<^M`8d;)Q+Sm^nHWy=r*bB z8RBRMehC67IC8w}v72FwSqmui&}qU_$WyCfqD&zQN*OM0_Zf24uTj-zwFd z?aYK{RM{7*i2k6VYYuFMs>z~A<*OU)Zojce!XSI^e3ovd9wQN41!bn!^x z!ja|khYOz_T0VEMc z7AMPJ<6QYyGAG!H7h-Kg6BMFe`sRu5iIH(tx`xav!N<})vZ?L#h_avrj)HicJk|=78lSKR1GNdM6Zp%|jUzxSvns(B@fT;)e|d7_ zsS!D!nv1iEO1qax_}}-w z{%Qr2kt4*87yH3H_Ut9jBWcXFm~qS~NT{8`cQOwB1Y%3?ddx(jBmkF~E37oGng2So zBH^#R=sBi{b@*S##2r#CfE#Cr!Rxhwa)*o@cD!Sy`)6u$1Roxwuw<1-pm|1tkEJ`RhlI$rsH!KC9y6E#Y)x;cwcZb z+uQ7Lm7`SV#oiiWF>9nWXm!98NrX48C$6XGXQ+WO^vZ)#1q?h*T7rrtnL0QR{S0h% z!i+mfE*2I}IS!A%gTGX=7VuKGnvKwJ>iU@UOg=lB5m>X}tQWKkk#bwc zj*Wm{cGmmT#lqCB!AuDkNxG+uU(a8f)3`Q8)@-#VOIr?M;V;2Hsv6OEj&?%0+hhJG?Q6|@lEBen^Ad22|MhJ9lHbCw9roI0cyxcMZS z#zety9T05K^LnYUR$Lwxmxs%XtA)kU^1@(YmNGXB@(aI5@7@f|t@iD^&D%HYH?P;O zU#?ueSiL+|xNxpKb*}XJSy0`@b4Qm>A6z)Of9b?g$lS@3w+|h|kkW zRH~RMY@lQV4WDc66pP`KE6xmoNsq~bNG_)gA~H~V#$Rm%ZKD@G3M)qLMrChp;z#DW zYFh3QV(Kbh>8P%2${ddfBHSG*cI@v8h3%D!kxWd4syBU8R|n1u<9jxnay!CV_lD9c zqOKA$7ULxD3*obI8=N&nOEeS*c~JlSbIuVdbMR;nBc>osUywlV%%gE{({tT_8`rASk~ z7!J$0F!i1|GHyi9vCy3CnqH?0wLJqs#|{RnOJKOIHwD26p%Q$JN=TwWlPfHP3!VAp zV3*4)3?Doh?LA$6@MP@^$(K*ozGNVH{Tnb}h*_|^K!3jlIR5sBZMdVKY&KRQSzRht zNr1;2+x12li!X2?q&k3DHNuUS5gEv9!Jm2jMaT(RVF>jirHTuHE;o>d;TWQ8OVQqe zf_33ba4DOF(zk5nbu1X25-p<|CUOJxpD;}#%Wmaux-j}*u&M>P2_(wkj9O%zrn)c6a`#bLJC{Ry@KtioU7rW6R1uNpMNBwFj! zwwZc#yZ=U{NxNIA?fKZom|^a;@E31W!sZ*nRfWPH?N-I!kNKjJ6Zk8yrVLm3jyP)q zwV`1I`c#9C8Ypc${UW_*>38|-7_k_u06}O5yu|vQ+1c~w<}Q5JxO1yNKa1Qs8bz*& z*4hpBBI1k5+~w>OY{{>y7;)mi(E*YbAMSCXTxBGXbnY$BoxK7SM(~{oI$7xX#vy*S?(InOBg)q=mwrbF;|b-4iW9V{&j7v}`Nb9kry**VJeeDB^| z@7}%6oja{Nw;=UfSF1NJ*REYdR&(LPh2`_-=T4uvcl^lhqx&Iuj~%;n^yu`-^Tny_ zox6+OdrSSfMbK&|Qm)skn~gdc1jGylTg(yL>Y+De(gS=Ya1GHrL`hwItzcPSpwB0f zH}H4NB89)M>w&t4*aEZ-6CQi{jcq+{wT6-rgDFhDiu|i!)|!dQDh}INgPfRRYav~& zx(ag=J>x;;Wv2=za@d5wW3G)U;~AF+N8w{2BQ8GfT6bqar%8aH6Pja=KiznMRJtQ? zf3)wFci#Ew3;+DT{loHw8~=0n8zdXYSP}~3v<4%c-Ws zA{5#m4pY{cn`pvV6~JARxzV5nocbxHaid8nbk}}gQ&e8KsFPRUpC?xTQcflm1>&2 zYM~f$t&_3-I5_Y(ntTF_^C+Bo>}k4NCQvmz$ui75bLHucRgdZC$NS|M^{p67SZx7@ zv?>;<9wd61q+RVpTlIAuwu?@ki{qRy1<7CQE3b! z8Y6mE+icc>i&j2>&yygSMY3@BjYX?1I3jF+ zAP#`oW`4TzaAmGhn7Y%uSAu^7=ov~NW~;3CaTqJ|HjSa?fbj&(g*}Vqf5xeRxo{5C zMJ#S>HUt|R7J~8DTUQBX&>R3wh&TTg_{PdCtR@Buf6}Ox)MV7jM1VTYn#bE@S8&aR@EJx19bk9y%1rnMvNtCfH4 z%s~bejbSW;P{vY#6?n`>OUo++-}(OHd~ac{x3JKipYP2sK+uo0H$9J}yUyL&_T77E zQi`f@Nb3$dwA`#*yI#0-dHVEcR}UVY`t-n!BPWpRT)cR-eDxZ7nRZaSG{4YaDh#pg zZ<*5;BLwQgh98DHZ9fST>flMsY5`;}l3QgXlT$!zr+ORgCYL8{2eP+ey*L-k?9#X( zj0n%C9S9m5_CMQ?z13caq#hk)zCb$I0QNuz!iHB0C`lF!Qxk^;?53b__9qWepRU-d zYqaqEtZYXZ+YfktT+)%g4Bk^5=udEUL%1&pr)9rn`z0;h%cl5*eKY8ty z*MIUa|Mb89@0IJf{^x^lG4eAe4)hN{k~9n+I0WNOs45iBdzBOK#_#&bqDPVx zwQbr41te8YQk7IyiU@3=4YmQ34A^7?Hra?E34sC%m2<9BIhU%G0|+n<9p1a|hj;(3 z_nGrM*REaA_l+^f9=lHMv(FA|t?&HK2y~jb&zBj>>K(pX_wYKsHH$RISWp%&;J(qE zjM2qg-r({P|T+Z^$!ac+wU1?m&Zm~v0Fbp)bhdmT}O|w zAoGY&oTN&L3oBZlm4O9{d1oNvD;~29f+=BcIc) z7(I%q8lKI-Ut1s2y{xl_*@R6#P%b`syL61*wNYRTa-Oynp7>qM4i;j{j2tK&JqpcN zOJJ4zh;7{v{)W%m^Q{sKUO;)!8Mq9)66qU-Ns^IQCdLq|a9AM@GkSAy7JT9fCg#gC zW*Poc;wJF7tsTGBcv~Cb zD}lEn6HT;=Hb?O284tudb9&_L=g9DxQ-hzM7&vvJ|K#!E(`N=xecpfKB+1$ggP~I= zhfbdyJ$HJ%<=j+zi>m0zAH)b@gvUDRBVThzJ{qPBqc_H$d+5Yd8r`M=~{H+;Lg*H zv+Ekz%3yAPM#R3y?T4e;7=7HGIC9vKm1pGFnXJl~uieq9)K~r`bY8bLOmS^_adg;a z@Yd>5>(P(ieDASB9`+w z8-VMU=lP-0xxtaS!69URXcz&i9IXt0m1l&b7VssnpA4fyb2rge&~uBV>K~cz8y2gb z?&(HILUcNBcLWe4z^fzdIHu_=3d;bxFH22|+m|<--@>(&JiJ^(RtLXn{Ei8t)p8M=|z~txkk|`ELZcr`nd+bchXxpLFgP(Om-*90IU!)IYTOp3!vaiEJEiZ>ogJIn5?OQ;o|mQkLKbSL>0ZbeqSQY%B^p45<9co zoQ1p>#>bF|$9DV`w(<(n<%LF(*g+0PhYKKYaNA%FdWp6|>VEkazs{CVGhrybAlRA>8SM+aV7iBK5oig+M&%S3YvwrEh;r1>0v{{o17_8h&W zCDhg!K6MruIsG~D(m0NBA5XQlW87AhHBlKhS$t;2TyW0|V?$cK1@GBVkNSeS@5}`(HhMCeBht$9U&lDj zg0%Je49=<&zY{Y0ultzHk4yo9wtTMp_1nqm27$2lN$^VVkSoMokB>G6^2iux9-01j z_4=tl9eVYNr+$6!-3PZ<)fe40(tPHtB??53Fxt6s*;T}LbHguIhIlgKzi}7q5T74q zcykMB#u+S%rz@)ntQ2&{1!tAXg>+G@CbH&DV^qI}&^gUsjdp4Zs>J`B7;*T3fNVWO zTO<>~5lhwTjawhp?#Ao|avLcuaCMS;__Fv}6juJZ@dQ}?LXz{>)||*`!ii(5qXsZr z7#v&!2sY{M_8^LO!Y7u81}=_`UY?x3HZyl~ZsEr4{EeAKgyNG(4uUV;n!k#CxnQFG zujj6QGk;ArvXbVILL(B-1%DMR!4?W^X;1^>vx5_)pJ196J7#Mc%I=@VAxlX~N?Lhg ztA3onfvtW0(=I?T{XnfHs0(0?Ky?v%)FR;>cBU+zt20>1EThbx9o$2w9t*fK@mi6( z$P`@}orp<9g5io{o0-&WzoUVh_{xyJwdUv5bI2?Y2DK83cSV~*3x_iq7%}ncD600H z;V&FESr5Fgz3PF>hwDD#WH)egoFg?O%8+rudj3i@TD1OTL&c6sx+pt7sA2}CIb}nL z)dSr?XUJPS74H{*kQ6U%ik~xFML*`(D4kO)j}Xv{JP^L#*7U)jPQUkd--%C0$Qvat zx@&lX-**CXa4M2@x8mjYRO^|???>#eSq&!N3Kk4GRKPHanWEd*eCluCVE!;0C%s$NmB`{j6FVugikpYvjt*+6_ZXw+%P-}5__BQjjLEq0GFXT1jGrFAjKf)74@Pl+$Gne)DO!st7bpyUQe+j;l zk)6wjEGa`lsw92EiWE>{~HJ8`2DHO$UwkY{2K&Pxehs&Eao`aT)w+jny`LdIDDc zX#+|1vU0g>FeVq*9uuCfT~*WvUNda!=hAi!v&8RowwQ@p^F4+^RCL`|{MTzxua6%# zTn^2|?do|wMEspu@YT}gPv3s)g-0HK?A`+hYbqLww~V%&y*US?H~i&J|NrqZO6tTOT|4Z?8Y~J4(vaeu!BBl4k4S+Qam?ESroJ z80??Yd=&oT86jdJiyCrO)>|O;62wSVz|LvHCt924dpa)^My^gxT%DTXhi=TA&qPsJ zU(T#ZZq2TKHDmJi9BJAsh<4b1J%16&)QK#_C?T@!&jz@8$#?J({wkhI-FwHajK5_X z8Z&ZkE8NsRPF_+WGT~3@%w}-zwq+d2e~bT`xQ*I_rmW;OqRJS|bdQ^|$SAQpZ8*uW4yjG2K3Rgd1mWPA!Al=MJ6t^p78Pe9|(} zHagi}K(gLR#LRH-oGKztJWqMB4ZaB_@JK-iZlCo(oo>aJ(+T4Ha0)w@bRJ6z zmW^&`5(n<3;fc#5lb1)vO@MX#*P2xjTe^#ZHPPvoUFCU99A=irNW`G>RR{_s9+^n{ z?d~<>a7Q<3zvSndw0BImQE0S{;u}*|f}?MV2A%|RSKVp@V)SJ0*&V$>&^ z=@4o%Nw?5;D(dvBTbUv%rR&=8PD7mqFi$E?Ma=Qv7-`ArV5_Bnk(JhMtxCxC_@(Gs zV4nzmc)6B@E(3B77eez4Tc~D=*s{&<)2mlT^=jV2;jNx30_eo9QXYmwT~9T ze>o3dOe=%w8{fxYW+jhQT8F-++`Z`4qG(K?-JyX?5BF`%KJS5&%+8k0l%0+7knq=d z)A@lxWTi0r<Z?T0zK&UM7G!#W2e+Jl!=yMUKhtksSUGTNFod zSk=PH%4=BZDnM3PELNNF*Yb8%Tf-?Z^xPt>3~*CCvyM;f-b!br3|~ygQBEXsfl1gg z6SQhtdO~qx+BQC#h-D~$wH{-3^gzn*Yg+h zwzwD`!Bq`~HEV>wYij&@-24s~NRuTnG*H+FUN#2}*>l#Xw3@By^Fo1i%-ogn^L)4G z*tsJwy?y$v!-X@wGi@U?9TNze7>^y@hT9L^+@g@_AX-s zRzamx5Zr;ec)G1sGS%7whc#*HL8h8}ZKt{2L`LbBPHK}dE%%O{lR^IGlP|vT z;1kc>`-_96wfEQ5k#Y9T!bP5;roVns_jBQqWu7S4PsoiD{{rS@` zILB=ri1Xj#S=rfSJ@&Kd>1pnhh^>O{>;%p_QgiY8)3zRi5eS~SeSOE?$T0a_UNkJN zw=?|Q?B!dR|JHeS@P(&d{kOY+RbN$KTULAL4}Q3B=dLf7&m*7@e$r6?a^YB9xh=5y z)m2f@ed8X%Xr5oK(ve5g+qclui(*5=8p_LD7#zJiK67n?BiI*L|FXA z-12ow+!qR%4d=<|>1=1B&33e(@9kafXNeoTIyyn#B{Dx?qE1F`CNfKr!?E$i5cx@E zns$#cYpj7|-w1RSXEb546CFKM)V;ysO8}L;x_yJwePLl%_K?A@Bkx?NCG$#HAEGiq z7c7_<4QAvIDg< zi>{kS4Xiclpg+g|`2Oj)KI;GU+;r>cd?$s)usm2pOcXi3E`^yLu?)>kB1|I4gz`RO zF2hskW?e@=Z~5R;BKNM283Q%(8LIDcgGQ)f%~0v&Co*957^KL=8JVE4v%Z@qb@T*> zZYWlSW!V#m%=#^_Zmbv{L#1zJu3`R6%y*tJW-n|-&1&Phs%@nsz10HUD5fsa&dFj|ldo2phP*^Q)nK(=9ZUEdofi_?>NSbNuyxZn)^8&<@veOhiHPFCAs9j6 zuj`G%Uv*nxo+41-Oh^~~s2!iOSO?pUL>T5}}kkwb$RYvQf$O*Z(BUrC*e z^8Ee3*$=|Yj2K-eSQ@x-9l*{LcQJI{;8Q`3XZyYx_1pEMvivaY8}Y`H@yp+`)RH4d(rRa?u#qoC1PhVJ;$qB={hwx zRqe`~9gt{mmL(Kg=C9_a%+6^hS}H-F3v;l|YZtN=e8+>g{q0~|W68xH3!(n<{Dn)? zb4TBQ=9l+X?yf1{v%O;9j{3%`+R9DaesJ4Ay!q^l|9xHG5@<{Cv*MqF`SR!Z5?VTOu*6Oi=``Qd zWzyY49@BDPzZuL2MnG^%;jNH*GdPc^8iNw?J&6Fg7JlsU zR6IuWgZn;aX9*_gfMCa2X)IhY^i~Wv+~|Rr!r!$DJW|frRCIUQ`=6Fa22e7FQ!Jc< z;*1f80^k+^(y~n4@|^O)JzCOHSXSt;MSBz|>k_yuNr=A|i;~8?<1Dj#ZP?o<8ZFE_ z?2So-jTK4wYg=DQzJscnr6vElKFoi@ex?(~`NfNU6%x{A48mb?o;TR`;n~A4z2AML zeWrB+nQgNdry57-AzEoIX~M}4N^AzczUJ6c!!@H6b${CQ`MXESTm#O)FWj%pR{K%+ zz+&ZdS27EfmDoZwPX$bY!g4m{HR@5D{;2Mvx9(2T)K-R+*`=elWaJ|+TZahmhNfzn z{3r!Bqq~#Wla!XOd?op>)pb#+zQSd3UfG{z#||G^&g!KnsZ~@F?CN|3BlZY(TLUwm z%Wdi&h1YnW5kAyf>PnrNpc(&JJfwmaP;t#O3q@_mMQdAfMG=6Qnr89^s}`1!SG|y? zAi{|b>v%zS7Q4lcr8#u;QecNtlfqwV!kSGOuUP`K!D&V(S)5X4 zfi0V%&6yzA;K4wA_U{6zdqFvzdwh zzH$o#)E^!Of1iJ1?}PPsR=n_UPu>`}idZ)L{5fzEE-B4)Rb0$nTP!kun}O`Xj{QdL z$hgNm{{C;Y9q^Y&imsbVU74lbM0gthWxs~Jj^xF!lvyCln2tB#w)tzSCNIJ?F7E`y zjRSU6sCfOE=l9pu9<1MWaA)m-+U;=Ct|eyQ1f3LUq(V_y&l{TR!HpZWog;3_Bdj8-nIczRDGOK` zN%^e>KaYp?xCb-xv~0+!0=5^q5(ERvVS+Z3m%}vWGA2@;5UUoVHXv=hPK#+81|ZpnmhO@@KA);FK6LN^IZ#teK$YH^($<%*_7A+V?X_n|J>S41S44X zW1Y`gsC1nqC|wzwyoUSt!z0IDd8_sPPbSay&$XJD0Se3`;$ixujyk79WpNJ{R@&fjy=^5IxgS$6Xr?0w8l-r8_ zzpjTTPHp7cqd`(1;W;#Ff$ycURT>sw7+*#5or@FNF00@be+Kf>)y3t?_)-=l4eUnd zZ^Ub@1?BQo%^;S7=okzJwVt+|;VExCbFQUmEA#(?)M|?4EeNgu5sxmjKg<53xJ22e ze3DwXLBveZ9kyd_P{3aoBIu^A9OZiQ>)?wanog9$-`S2%a~XF~HLH!|?JIVwK`hT| z&w(FG!5EBrV-Pgfq}_?^PF|-)bX8+DjY+q2lGAD#C6-Mfw2c|(GfQg`2OxT4!rZ99 z7S$t(H;AbCG#6Z7#jQEIpa{UF(M6MCldv_B?6V$!HH&o(Z0#Z)zcRnD{$$G)W`YWb z-kZF@)=FvJ>{IVTp_R|FI7tFE4%Y0}*42!48lzOlHG&*YaPDIH^7y5{U-)a=$1U$Z z@yAzw{@gF?e%7#|`sD|oy)|`3m}?7&DF$8>(s2;h>5SFjc-^ydodKki(J8w-4aoFo zpf+wYE9RW)Ifu63{gmk;=uZ{J#&nxCF|{PS(={DH3>pf2W%T^j!s=yr2032mD%5#2 z7lvHuB<7uK6O(^gzCL)e?V+7NySudUr!{-_m+fpQDK@DnbJ?}Cw06TkZrEME`>&U- zeX($!FCt*8EyaGPS`YP<_{0Zf^buys5(>!XsrnM&=4@NXR5N92&H|%1r>61YUZ0%9 zy^9cTO|?%6^AV#H(Gg_eGJR&-28d}{=(J8r%e})e-<5v!fl**xrD8=D!-7iqZi?WQ z9*g2?2c%li5Ml9g<~6ez!xW*9OhXi4V0V%ag})xL9Qdm!tUzJUFzTLExDIw^3zRbP z+8WMz4R%eiNlwl5Qm=9{;jed~kOwxvcM3DL5oFBN&VQx+vkJN9Y{-ZvBQe!Fn)8B; zqNuj@r#ePPNqI33VPK79*Dq^c51jV?CwK_$t z;rLjrBwB&J;+g7Z$R=qjcO)$`&10Hy8Pd=cEueX^ZgwQD8Wh!6Bz`E(9i7K!5K|+g z0I{_~P%Mo|Mycj*(+WOHa#M~0ixH>wDc$+_qxiWFfbJ(9#aZATv ztA>#{f+q|*@~-R06nVSftLp4b8HW@?GYI3k1zLdz@+SN>IxOUswHfer{I#N#s&FOd ztIsW<+IuXbSBMLvOJosZC>1y&g_v3m^6xR;tm`>iCj)|Ii}t?2$^}+-uvjx&22U1O z?~4YOB1;e;XfWchx?rhBR1s#YIc>Z8COdn9U@TV?ZQTfS+2W~^c!U)n_ENHgs#y|( zxJ)`pGbGYu6p1mJzVjN*7p<$RKdI`4iiXPLuf|N4BS7jw(z<qGCqUExE-5sNmuk*yTay2Ywt7{JVG0#3oD$OD3 z4~tjFFH%hS%zG!_f9B1XAN=hjyMMmFxc-CZ-u`O#8fQjFWrDE7@j$SkP|^On>%_|K zw8BuHTPA7UkZnQBh_4Llq2TzdyNmO?RZ7tmBooQq^WAO>@@?fcd9PG2TQuOy7r9O@ zEP&gYyWS?lgPHi%+$w&GLofgI;R72Flomg*qvl}s_Wc!AyUR)&OG}VF6echr|u z@7Pjt$3Oh=vHKppwQ`ww4AfCL6m-m}Zi=PR^S4KoBD;VLTi8AAh-h1zN1jBF^OvXz zD;C$%bbhe^%6Nf@YmQEVg- zj1VL?KinVHC(>xwm`5YkLzzYZa<;Avkam!l-0+W=r^1 zR_97KWSo5xv&?&xCgC?03$0olj=X^HJdsob6SMu}!e7d9u#>Ff-kGtF@G8L9CcdsP zdCblD9QuRn)k0M*3qWBeKEW*xnFByoJIoBu=1SJT{3Kcd*1X|y4fJTho7_7~@=;)P zinFo+5*O+bA^N$<>-Vm~`a~lkZ7tYU_-jTI&plyY^Pzvf#+djmA2y=KLeR%;lD9XubSNH$RvUU=9$&pQ1`r$ahUIyDnV!WU zEU{;hT##4MR_ZetC@fqaUAdHr7`o>%SxT=JNU^$Pp}qJzEKlw_3xnR^y2xkOe}W)O zBVtroST7Pfn~$r!{aMB6ST_(HPq?3`3X88cAeh4J5ZJCBtk5hh`0@20t9b3}73t~rOl>t~rTD>MJfqzufNO*P%GW}N1-9}wE6`&m-ffJ2h; zO7U67%+0BlZ{{zYe*42i&;I^}2OfQD|APm%?fCqS5B{=nnPfCeO!JFO*y?=%iMB%7 z&RLI{IGG6Xoif|O{%~|vT5a-c&f3*VmqraX&2z*gS)qe#GudNZN)IjjGb*xrT~=P4 zmIWUzw!G-V7Dg7&kFQ)^T)n=0vG+{Nv%max^~Me0>^*fm_f%HZmz6Y>1HMI)-DSo5 z%FFkZmhLSpEB?U`?!4``uFpREpEti|m}U4I(=A<$v_Ww()o>7}!=B?hq_rMk5{);v z^nCUSg=yqmIQ|?c&PPc36QuT?REu6x{@lNB zHRe1DCv;XF6~o-XUkBn$>vjX$n%9^=?bUcRxV*iJo#wX`*~I~I9&cr+>(Ou70+K|Z zB;++mvsAKdvVVWam2F#*(jsHy(+6ry1g#7lAn`xfkW@M(A13&=X%2pA{Sm4PQnk$% zC3X_#np-DnwuZmy7}Jntf$2Tb^wBGCb{sxQJ!iZUuq1oAy*y;I%*zX$zU7xK<}y5m z!b#+9?uIU(@-ele<4xN2W8_ z5-m$O-$kISCXUpyTFZjHI<}Gtplr}QjypK(_N(Sc9+Bl34P^UCywbI2QUpIzhXudi z{Hp$)r660Eq<3k{F^GnQ_S3gK6V3TA+MOt{Oh9I&q9mO)|4OB6d)DJ`$AD6YcunHG zJb$w`Cm|4BmS_2}gug;w)oEu@G8LH4`rjdr$tPE^d` z={ClR*1&8$+c;0Lsdl7WiW2@tZXL(u9#^2Do|rrffNvTS_KJ;ihShfsaS{@FjIl>4 zGfK?`9M#&53+E=(y15oJF2{=%5uNz2ytL~ui%FNwMEkm(}%W@TKSDpReqiTPdRtmryEJx{Z8YaVH<730$2qDq~!)s@F$VTUN5a zv6=geX5i$huNTPy8ND$zEB*xLlN zvZ)?>;=%+Zt2$kgC96^-9P^UYV&G5OnS-9F$^U6j-im z+lI}`cg!Z7#0^=hK;;LgD3NJ9RUbWhL!XpWY&g?>*V?{^k-ky zV=emL7*0by$D=(HBtg9WoUA=DD`Vc-Qo}j3DWkbb(c_#6OLN5L4p#yMe}NU@(=%t^ zIehNjBRFV?sgrY%cf_OCnno`hmZ_MzZg1pHd4SVRgFT1OoqPM(#F@Suey>mYprJZWX&zR)(|iZ(fDUGpK{9NwH^&2>X+o57ZL=Y zzc9W+#aEMYLZA(HEgZ|UhQ*ZEZQTH9&TiAR9V^|^4lr@cTD^k@xj!-9U@+gHx6W|A zBAKd9dK{^~73i$$&=yV^^+HvDW1+tv@J;yZRiJ4LNMn?)Aqvxxq;J`Rt;5JvbN_V9 zfRJ|vixv?hZFswI77xy}6S8GH>R_nM{V-ld{=z8AGqGNz>9L4usM}}{^t1%$ltr~R z0zCz3^3v3D=ZWTHfB2C24eSyc@cv}N<_YYgvf6eX^Y!p5Qh*H^jg57mxTOfI(XF|~KY#sA^SP#<|JyIhN-K9& z?$}q?c%W|A?&|Fgl_k5%i*}XWwX<~duHw7E-^Su?drFIUmzE<9r8SLZ+et^-@PiEx z-uuwP#NvhN+1a6y%=XAAkGW@@Qui>v36Ikt?q>e5<(}llt?Va--U2GTHx)_M@|bsMs^%){=ygL@zp{;FbH>z60`GLMMD--;xqvx( z%$qN$T(BWB_$!&|#lssyUqVQCg5${T1&%XGh-}_ol6*~^hhpC3nuUoe7s(iFmengT zWt(u>IIkpQ)7d%gi=0`uWyjV)Uf-ARTg9$}Xt=%ve4zg?^#DH#uC|$r=8RS^Mj*>} zwm?}DN-BMpG=I$q3+B(XQ&U!x7AoUdwq-oq7d>h#S+O*a3{N$)G`bQ-%UI`jY{y?& zchfxjzJBX#S`L3TD{T_FvB(TZF^LQ*6qkFgFn@DmY3g*xu^0Z>dFZpLGo;J0dK1}> zT7}EN`~~v#rTem#mWhCdIo;gbf9!1Ydq=4PO}aXXih};U#n_=Xfv;P-5(>*49TQy3 zFI(U>54*mv6do_f5xM`l3ggJk~Uy}ly#R^&yg+=W< zeVE}gQQW)&9+5?@OVqVWNa6oAvL6IF*;Ks?VQ^{h-0eg5zFe>^1Ji5~PZFeZj+%a|6ZfdH z2#p9U4KUXaD*~Q9K1w4{W0K|0(HW=Ac-PTW&4)gj=^Tgask4tph1jl+&ipp^~k6Uf;kxQbx{=4>gv$y>hOj0gDWco7ZCfC zWMPMF9vbEShfnr@_S(^RpL*lD`<{O0o?jm*s{j0thyS{8<3iumrGXhqtOo8dO7qTU zHxDNRRz1D68m!%8I$#Z>^WRGUwd3PaOH0g5yUB>5Ysi_GO}GDVMWcmcJC`SCzrMJ9 zb$;rDH-CTs{`&oO+jmuz?yasv8Y|1|%S(5aZ>uZaQd_(c0e_Lk;$oN5#^O?3z(`%$ zuF{R=ciq1A%qQpm{pPpx^oI5vRhD9{#%wWuvi_h$n!ijHpFD5^OD~gUzH{&jftmv& z*csk<^6ACift&M-mnNntdj$$J-&qH4Ost~eFLjSiHepR_B}aIsy>+g$9q=`2>vh2w z+&|aRV?wdkuI`22E+nk7UiT@m4DT~FsE4ctgU|_CW3<)!AQ=kzNWzNUq{dR zULZI$Sfj2=`*mvUe_9$5w<$3MS49zR1Ai@ci05PU*5jX}@}$?F6cg*k*P)p{H0(48?UiXAVh)wjLpmMMCR2OS+K@@7&@0(jm2VPU`D+evW8t zYpc4B@yYj&%&tdM#ke_;@{k9dhh%Mj* zyaW62&#p1ql*X&?a>3M($|S>g?okJD21GeNItfhKf|=dJi>_-jM)ThgVAmiRqcpT# zB3$+@!B-Y@5n8_%fF9u}?tfiI4~ zihzQdX7Hydg2Yw5(@@Hrv$;bh`etrDx%tJ?#Y$MHl5$E zc^EftMsjm;!nv5?q=&n0d~n53IP?v(J7JF^;idIZ>+#o*zyF)JUbyd>rw=}OPibxQ z`$zw>beRwo)&%v>g1%iGfDTXE&LHI$CrP6~#E!q?f#u8$xon88Tb+D-rg3P+UGwWn z{`2&$bt}@^NY3HDeoOz-w|KDVFjrty3cH7>H@@rGnXI@qktNfM1 zKCvf!C|;Yws!~8sVW~6SxXSMN_B_^Ge?&5^8|QeKDIn`JF1qv^lUC zFzRCv35lP`!FPZ^6)|NBL#JuTR>sP$&C2bSd3mO9%<5UCscI9}Qw3G?N;4XEJk@C& zSzotpE;>5S>F#5eZlRu;kyy@oSix8z0z{? z+OK_*L9iAv+XjNz{4&@wB55pA{PgDJr7xx~4}9A3;qTsV{*X3yy@i&(@iw!3G8puH z)xw4`e z?E_v_qc7XiO#{D@g>$4ZY4r?c%M5k!wLAqA8l!kiPy=7@5E?VWlkOwiJr&2$@I#pg zg)Jt0as>}m+tJhA$D3IR(Uf8?c-(>_1jL0^wPs~xh(f#9CP!~fkKLS~{$gR~tJN!) zrWeLL$Cu4nf8Jt@(?|xAj%sQE&=z@3OqtsD4Bi2j;8=)?pDN? zZNhaN@yjG1 zop|%JHygmV!-TTwZ+Rme&ezSO9{n=eD4-`&lYdMTI{oOT&cPupOysg;RmNdtG ztopjaI^!SO5X}~uY$y1yW9$0Cyv7A|PI`ybg^ zvwMGSy-7`NLs{jn;wq$}xEvwoxxTmv@U1J}vJ=29-G=`c`}3Z%yBbS2)tBDUP+S#JwukRKI5#?QWo~L_phuPb#i~?in5r5?DdVC=m@GsaPWEM<2b`l$7&1;x+w2d&dbyhAm2g)#s&{0_`;p1d35DDqcwcBZ7Mj#~?Ep96~XdP>Pod_jW?y!YsB@u>7SK+_75?J)i z-8M9c&~O~FK)b<076pnGS|(_=2K|mT@@FNEFv{X7W+X<*=yfa(gf(~-^YzH*06|AU zbM7#kl8>0-t?7{Gw%?ye$=7UJG{42-uk}2cK+W5zSY_dIrnqK1aZaC=#9Ej~*m7elV7C|d@32EqE!+Z;rVWi$3#D+EJHaaF}98)tP5kS=<8Ueqe@t4 z9oYiHe_0%a%lc6u7$A12ox|v4rLMfUAWR1l=apSrT-YGkFL>R zWykYo3FqJ;T&5XfYIWoFx*{Ji;nDPoKe%miE@N6G_$Glt>6{|dl zh?)RQYN~2j@eM1)(M2xO6yeZ4%uv&m5c!qRmd$oS>usE35s5(ql%XUR$%!sX^p_Pp zG_inx-3T`*Wzy8bVOqmng1v#_1w=%c_0WsK(Htm0caG>2s)i8~v^T^d?mrf9~- zUfbAp1`;vj$ei$`90 z>DH`Ge<(0D07->lzBAJ?rY*x9Tl>dOTiVe~OE<~##&$c~BW2#Q=5s*o*#z_51LWN= zj0_A+;X~@|yW#VAN!Im~p;1)W0y; z=kPtOV-n_CIS%b>ffW~yZK_L;)s7_#%oUQdwH z6s=AgseOXROJEphv#H^>RxzpQUYg6Z-~c;JSnp&oS&G)Q;>0L(UQ|aP`heV;z1aV0 z^QkvJ>NZ+gv&DIL47Sii$T7!g&^=f<)7tUT(cVvvFLVu1oK3S;S12lK zAP6t^Yqlg}N;`UuWz~Dkm~R4KJhRqc3EIW7tW2fEg%!ueI+JohJl0xgW@P}2g}m|W zj_=mYcYKB-lNZM($m&JFSrfN8GZQ6z0_gK~o|Qd1*HXT})jQH^7W=YMgl1*$)Pm4b z6pdCe`Pr$r32e!0ENzXq?7(N;)96@gO-t9sKNG4E%r~lFIr3V1ub0Cj$eD~_s~_tf z%EXk>&k^RJ6l`|DTq6z%))n8y7M+s>BDq46jon$2b9Szo64_r&E%gzb_ui4Qb6v~B zRvvs|WMYXzE7S9r=a+9RT)1`q%2!L*znH&rWoCsYFC(4ZttU<$d+(hW9{=^P@4M%L zJ@p57RT1o3we1e%j@!2U;pun(^Xh+GEL>m#G!udUR@e=~;cIjl3 zS*jD?^bo;>%klLD!HIDj^-lj1N6uN=m32E}%y+$bPXUEpIq0Jy81?ol(DLx6t!2Wl zKmQsN|6*ZvyuJ6_#~(Ev{`iCEpL_Y2zj*qddwyC`aqds=(|iF-ho zG#sj_T%4N!=GyAz#i_&Z{_a=z)gIVhx37A~Piq_R-O+G&RSkw^LaG`uFPD?yTe7EY ztAp<*cy2@KrpB_(4Hb9Qmv6=+Y*Gpr#{O)`%fIZu#ieyywr%^Dq8*!ShtCduv-~xh zD%k z9N@xgMwbmMU$!(nWCFiHgxCHp@mblc<8%M7K3BMua8o`PAt}SyJ!SaIlFQ6_akLKVqTgE2G)dL4;4&CtjjGcqyxIGnMf$B zwr?(G&TLYcHX&ZfLoDnk950wbqLXZekAzNn^wcy7>0eGRkDP3M=Ql5$dFLb9WCDVF z%uQekZlqi4N%oDgGIxLSdFPRnvz>j^U{qD-bcJRL&SN6rEBa|WMskh(B8qm7t@~}E z#w^RiXfNAn#%;h_i!++M>FtFXs|@d+JN-)jx5Tf5^E&eC6Of&NXHtVbJJ|3S>zSj! zMOLE~+xnIsezucG+q7@XS4P13P%lEyUt1xs-c+op?%)IOL_9%W17BAc*`iw-GsW8A zO~>#VA(pMr^lvx)+*XO`g=9g_VR^<$WN{k6uD2ee*QX^hg@EZM-i1?{`%y{2?MS)| zt~?Fe$>rg(YcmTsmKWzohMGS6v?F29tABcN{m}V;t3#+S{8w@4i+{Bgug(ZY`<*R;cT#7 zT<}c;EhK;%?s=Z~pgKZN#w%=w#L{l(HCkZNCO>)>mY*~HJLj-?s?vZJ68d5!bT zTKfUM>#~zl746!HrDgufXJ#-J^vR=xwn-lC=x{yL-ae$yTu2ak3( z8Yt;Y^xE<+wx#dJs_sC%Zl3;TXTJ8n9B?6Ss>^3_ z?`d^%Dxr55fm_xciAUk!YmYbSQRaO>l~RotbVcc$ECmS2=-@aSp^}{hHrrgDZSO@u zUKQLh2qvAuG8(*$bL#bJ24~lJ!^$LsbDuSI6aMN~Q2P-qODlcjWYu|qx2^UUh6XQ< z41cpUetE2C;MAc)(~-r#=D%H-`H#yh3%wnM&i4MMvmYKg^2~G3-T%ZhdmnswfBEja zEBEfNXxLp+S6^1MtF*eYx}u?~ys>iI-kOpFHKqHiiuYHQAE>TqtSH-CUAgIhY(2Q+ zXMer)x9hVvWJcDQ7xJ?8mpFqigm<1-Ec{&_ou+WK#%-Se%9^UrlaDpXpx`xUn|02c zEYES0#DaAVaUMX{fZjyd8eVDTn>#VrV3zvINO0p!SZ4T_&Lf|Ay4t{BGT_KYZ$H}H zb*#1ZLZ zeEh^KZ+`mvA8yT}!WAx!kF#8;)2Oi$$KR1NO@(tU*ou+KmS!3r<;6*HhbG-@E3F(z z4IUuf4`aKD`UnujMaii?@`J|l{!Dif{b+@q6?Y-L6AR8bsj*eZ#IL7dD@rlOEzk0=8D%^hr%9_r2-w1Go@T4=fYZx z#7Wtg&k>FsoHsb^T2-se_l*$%cr}ELkZG%hg|C)?5R7J+&cIehhC|%QO`!f&NUFkN zpUclHRvY8rxuj!j`ma~y@Tytj4tWcnNCZAXk#SQeJQ3r6kF7R4Wc|x-t73UP6jj+b=B*HidCL9Ed;R~vd!SFL9Fc1dKbdD^$F#zhbe)8tj(pQTYyFNYr z{_kFGdjFF`b2s9}mxff+3@wOfi{+%yGSGLt>*QOXwjDl)9-p|KirX{77gc;DBHWB+w^@r${U6K}tK`lC0d`Yn)49lJxuHo)Lc$v&WzrP`X||^3r93Ik z*3b&_blWvv?&8Yi3MrhepEM7i?rr(_>_;!X_QsRXK6dy1d#fuyf9Fr%E|WxTf$_=m zY+1nnjd+})tU%H_#DCM){K7H&#Bg1cO!GRAzXsgv^_H;b@XYhZb{5^)ShZz*t^X?9s|N_YO_= z46?;_s~9Q>3$|bB!$L`*6Z{oYTarmrZXR}31V8IHfbYHMO z_n=L$H%q126ok1h4l|I~ZDQg14J)*nmt~O?7YIfWHs|jmFZgQ~Y|oBKyx38Az*3x2 zJshhk9g-^KD7ee%vt=QLBxf-SfYaFII`MRGCHP0Qr)mp$Pgr@A3CFUCtf8?LD)JFZ zoh*uq__9zI2TFDlQ_jUDI-fO)Wbo&6#7)XVp@pdvE%ye15W9aHJu|0|0zep^>%iEXw*(1yZY^dz;a zl2+Ktic`*VM|en*jz(1w@3uyjdIYK7)9|_7j?P5ew}o9N9@8ZRMREKz0v%wk+Ue z7qYrrc5-XwqXYqSNdvV^qEy?B07#2R%HuC@KcXl@LlC=^n{|e^cm1EYw;uU?4+NMQBO)9b#n7^9o9{DdTu^HP{*g4r@ z^xfLP*{54-+xOi)2wf)3rzrOFM z_ct7Ppz-dX?b>&6N8{e=^4*oCyDLi?LEehuomAK;FRCluN~#gEt9TRnL{Q-RV!-zY zyNZ9ImnGx&e$J7Ym0 zh~qCIKtw^zbhcr9o@{BFzzW^elu2N*(_!K7Ol#|GTia4c>s-q@*`MLRATO0AFapaC ztn4Ih=de_p7^ETzg4ahZ_YAoiSw39z>Y_-eNp1PRL1ViuC&K7+5A^}eoaF{*N(2zo zHE05AySVLoh$UL9ggT8Tz8u$JSAyQ+@wA|4W^*=wxI!;(TUM>LGRqz-6&(kLN4}Vu{c?J;=}+&z`m0}c zeDqP_EdIy7$&R6^PV3xZW3R6;+T7iLyovIaZ6AC_-PG**Z84Nb%mOSiaa+VM(UNo) z!xn_3_^aSM2C|O75FhrzN*1@oi41v3s7DA2k^meR3ggj4cyx^1e?*)Y{B;P9`W_h; zb6TC8En0z{voMzlfAXPxwL)Q@8zS^fxEw`GI86N%lBYX6OlCuS?1;`5Ui%cqii#rK zN#n%&4;n@F=D%2mmwO5%RwOU5#g-7vY!Z1!hP;uN)I&Ktf}e*F>=G`PWg!0R%N*h?ylK-pn41Rc25PEyTyfYnoS|)#f=puyDKUY z0&l49i#c&`Wy!ws(haxWcI@@H{_ksF21=^djiV$0Cp!GS?V9yC?7E)!l(@6`0%-1I zNJRAvOIeB<)HdTBd4X4_&(=q2VUV72>w1j^r&d%I<;2I9$dIxCY>$Fo8C^v#jIOSZ ztT+U7kxAWx`ML*3+*!D+U&LAL#%DwMjI-muB)OpT$Z4AFFke0ae;@zN@9ux_7rS;J zdF^e?P1x&z?>x?$;V7Hzj=%b^t|aD{qoWS6Z`o&IzUAtS=;ccj&D zBT$d?W0T)pyfQu1`_AuQ_}QL@13Pyd*jd+DU3H+YdQXjc93ZO1Fq>4ASx9qP5$Q$3 z-#Tco$%fkEJ9ie}jwJlOePIVtBm?BaHB{gxC4VJ_S6NT$h~=9$mt zOK_Y4F$%B?=`Hz0dvlBi!L$Wm)@d#N;?edAb4}p8rmMTrOq@3mZFuJL@ONm4Y0gBz zqLZI~>!Xif_~X#Y&dFvQ57c>>=mLUAnFymTT|=i@Sx-*Cb$I-2&sA11k6$v3^B7lt zlnaQ9^iF($CeW#9WU=onBeTfW(K!n^aSIz;fv;~Z@=DjKBLPiLC_MpFlFJ16OC3uD zYoP^Mc@->iXL;)YbR<3*RdAP=qx6g<*`G~ONi0})<9ytd_5+cVg_)8gdDKowt+$}9 zGh&aR6Y{dDcF71SYfKWW3sCq zVTWM;rQw0|BYhXghptXo`t9ZMfeWL(=SRE7+E4YLJ~q*DcD%j0|MUs+BIyVNaDVc~ zJHL4Bsk`rga7X#}+KL^3CicSvJ1X~Yuh_r6l0R8XDO-du4}sWPU{&P?WMA1PWN&#f zvb(B?<#$)b#+~IG6j7rpf_qA9k-eogdrGRQtB)M0DxG-IAnPH-0jWP~s@a<>F_;0oo z_Gd?A@y)<@NA|VD9KPMGZBb#endi661&l4wUdpXg?Z4~D$xmK>^R35!_u_+(++VZn zgXdrSX5peD%#+r)7IlzxIT>fxJ{JlLqfvK#$GRs7g;`QKz*WsuozmBb^-p!}xg|Zp zKGCuhwydGaTeBi58(=!j>_h@7F9y<0SIQq@kUsK#rT~uFr*UoYaT{iJ}=Gw9? z$o7(rE_ZG(G1*bNp|<45b)`RPD81dJ)uEAEGJ-uutG=AtS>;8y z|7-2GqTzGRUoI@oQ;mvfC?)@uME{VFxB!FwuEg3)8IeNBvtf_UZwH+C6YeUFLnrLao^an_( zreZ`uIonIRU z2u=EfXF#+s^t3Pcwa#~)o9}KpKhU{2*t=R7x;i^yGC%d@%EDhSU;6gqg>P4`B7a`F zKxXdZK;NbD0+Gz0k{tBd(~m#!%X{k&>?o?LD}k+4KvehFY=`{rtE?bm3c;d?0LRGg z>QaQ2w!Uf`va52de0;RCaiOk}HL@YDM)@WUC3Q%BXplj8XV@&;WMXeKz05A!uNqLRkwG@)6Nh;48zPF0;3*hzlg{~mt> zg`v+@lG_3<(^;gmu83&g$2LPklmxXA`8jmwH(|n=a++13kk~^S`Qy{CWo@`H~*RJ)HTi$5=3mInBJ{VgG;U16q>v!i*MF<;+7TV7+_ zg5X5k4USz*9~Re+=M!01mUFH3M+kLsaPV58@aGGw|8?`mM~9Ep?5M3SscPKOu!p^p zvbr)%&7}lc;pbI91%u#<&DyUm+w91@xu$qib;(`Gj*=}1#V0vF;MFm@wsb49qhuQ= z+)-{)Us=YPcTy>fplFhUEl0Qv{G~_X9slqz_tw+{Mi`Sxhqur~FqeXFr2M$YlVkyG zbd-)~C^#g(Cf3*%!e1sbGUf(B36)L?2mVDKcTgZ>N6J%o<}!4oZl z;C5oJz~b@d4y4f9jf}PSB9pCMG<};N=ymBggOcY8C44NHC)$nS@(L;b%EGHKEccj| zIODCrrSO;QXGgIdY&k=QupFa(oktv2=Mm=xcJqx3`IFBbh{J>I@GXLDb)?0_i7kQK zCJ(QzIl{YSRaPXGxn(nJFxEK?*XAr%1!>3M09KFGS{xWM@d{U%tbtAiK?!*i0(#XZ z?-8v0BSl0DTiHe_=~O!l4x3}FR*X932wPOMw0GezTRYM#_(7R1S;^zL;tJ)D_OoTj z`;M&HUN0+E$}$Dp9ZBK;_4S2DdFc0j&!-E#= zF@jBI${E~R-fPTjK!opluG81^3)D#b_~qA6y!l??Y$paZ(ikT?`fx`UTG~cWxAY!6 zb>^*)XeA6^z6PCiRHBL_6m=b=9CrK-V#`u*=b_2O4EtEht9h^|&BoVc(YM3bVCM&c(XM+J#m7;eR!I3SOt>VW&rY_}cPyY6+rOTK@X#^JS{Af_pZUS1dB~W>( zYZw-M=(o?k_q*Rrww=2?HNHGDC@&b##~;zWx7eFT*QG5l&b5iAHiU>QD+k@u0saED z$XHAFcxz8|9P`hGR`RO`$4p7MnG0AEVybu}bk1-y!$&hCTWE*1`4$BZhATUEWEhPQ zuB>dMh=>la@h~4+(ARp=G5jJYCl?8?vAoYhUTuS2_F{9hs!9&-L{d?Lk_^-E0*5y? zRdP1(n7qTO&NEmUwLqOa<3YQaQjt-Rb2QB z@fn#`{%?vKiMas41<&;z{Wp$r8?}B*SkIAQZW{UC#USRs&jOFw6sG5ApS;HFr_y(A zo3cVDj+v270Mw2YhF%&OMeu_FzQU0}hhWnd?}7HqESB9&n5AQ|_uhhHo^JZ^`Il)K zOg|9{5!0q&vTd}`G&p>!ix8}nZ+`Ik+aHnSPKPg9m{&$85Jh=u?t`Ads483qA3{ zo6V-kp zkkUQrHuBjwCsE%pQIykt{iOLX7bY%F&E8l%kKA0kK=dg^AV2#3JMTaL=I?&~QHS;?dN z(z2bU#Z)w^t0>=DSpk=;I;{5Qjcb|Lm{yq2XDB8>Gt0^a~3||Ann6c6sSer|Wk@}MDF4feT zd6!tF&roG5R5=+Tz8c zH~UxY%p3pdN0l444Yl^(AX~9zpwI^F!hc7#g?m~@5>kXZ2bX*LPX6)Dw}12G(5Vw& z%+sD@5RVTK%=6)qV)T$R5fq?e#@M-@v8Eur2D{DOh0vTRiHw^ zU&l?Wcw=R$FhM62W+!-Ygl0vQl(A8Hhuz?u;VC;pSS3EN6=WNX3WeSN99ZuGT?x&N zPz!=5dxs}_h9uwrI!Sl(DN0 zzu~d96DpL804h(s{r)ziuIvErf#wkx>K5riE5l_5Kw@A#6B*P7OoOp{{f&rc)_yJ; zli6YC_sk>Vulz(A{#ra(>d2Pa)|ew4rgcN}D`r_*78ojifK3x5W}p8zS(qJREpW&p ztdgp4DZPRK-*I&Cyi^ZAum}W1zSo$u(g_vKMIwfob1eZ==m~6HpI#_w^ z-Yc|0j5ZXACjOajusz4moq6xWqp!X}@1C2}WHxzZl#yTXSElb7t_N%af|Ds&1u%_! zW&vPaOspGzzub{=+XC6XGlV`8xR`|esVQn$kw@&{d;T(ge#chi^W|!tQKe#-Hz~I#^k{x4vxm_N{ef18v_{y{nuU7z>8kRZ(7FS%NfFS^@}*B+gmFmEA>K z4s`_z>Ang?QrV!Pyx7HJX2f$bCWE$gLN{M+S>^uHioInfdnDy%XC~U&q_WD$FGl9# z+J=&Pq_MmP9ROsv)!4mt5M*}v35&%dQo5>aZ&mg7En9ED?H`9)I{$k03koQiT01=D zMp^@3#|3Ap0hDW=-=wu_BG@s)K5aFF!k_CXlQ2}EV`t7xE9``YXB}FZ=4nAz9(Lt5 zCmlWMHj;#q#zpGP`CtG3KD^k@<0uTG0lwU6_wjRsXW9!bJ)_ONU7t0b_`?T>e*60S zzj^+bwY%SX;^|w{Rs_aXKC@Zn=Q2ovX>O%&akYQx!obRE|M?UdC4aYV^#tTOJ==>KstB~(W+SR% zi^nQ&@Ol~~86h=DeQDLMl1khVB>Aeh5&r^t`9wLt1Z6SUKwkgKR=F*#DMNA5u5HCg zu;bEVR5--`%;PWRI7&)!0pIzL|HSa40u{AlCOdmk=dA2`X8{xjNlmFMgUsn)FOqD~k&K~i|Mnf02wCooW)?V4zE!5rP%q3T$(tvxQ5 z1&nbOY=T!pS~Hul8I~zgWdX?!z7(A7>%~Bg%=8bSPEYrtR1?3E%PopDA~RxJI+fYM z28nRCAZOZ1cpvaMo7=8o_rSL-(!ymcYuSf!%+rzl%;x+hP}3v35~f=Ml=6%+Y&E2F zyRp2w+L}^9BMI4)9fcKhrLCf%<>5_;Z1^ioj}A%$bMw{-lz!Z%-q^VH#^S)S!LEB? zdEn{mk$J@PMBJrrWe^ZiX|ZPU*6`{^=C5T>$j&Tu)|uHV?8FsV2j`l`!iUOiN`r-)>mLi zD^hlO_Uj5lb_iCEFe!?R;rE|=@v}d^rSXaL#Zb7ZdHD0w=iYnk*sCv%o<4DFhT2>s zNU-lU=dTfSqrj=i20N;G#cvticBEE3l^CT%*fe?qOOuh-@%b!+9_nO&I*hVaWp#D< z6<(*U)tpTG&VR>em4k2F;M9a{#i?X7M>)Y&o^T%_2ls2o^f0n9`}Hwknuz$!$OlLDtn^D@xua^kZlQBg_D@iUg;qvh4m65{N3(J2wf8oUYA3m^qA5K`}oDbGj zLw^m6EC}=+#-e+ zH8&cHwwj@)#F(;|<|vXb2Wy7^?%1}ewg?LBWje}As<&+Zhkv;3$tQntac**ExNow( zhwi#OVG|l*1HL54dzyUPM<26ZoqqRyx|{>P=9Hv1phx{_-DYb`w_835sHRsD!B=Bu zi|!ad*G_7%Wd{=r-9jN~ir!cw1_WFK;DmHp__3_YMoX0wsaZXm119`65{f`(#ch=x%+~d&_!s{b{tB0q0W%VQ*O4h(x-A^SrpAD|A>8d^ zd3IW7m_EJ`rLs0=84e2ky&dBSS`G9sVR1qMu`t^jq&obuBmb!o7E3IvivBCS(|3cn zx&>9(LG=Eux%t_S_BWn*{Pa8Tq88#e89UcGey(-!#OaQYKRol!TWqh77lgkThsI4j z?}n$Llk?M8N0*SxBg-@*m#{{0ZD60lUtt^w#DS$)kN#nnG4YS>oUSX8xZYY}}>tq@;{K_@mkwvCk@oD8+}O1Y6^TXE-}iaROv zXSM#k?jI|gC+A{Y-V(KjO3Ld?D|VOcK=uHdB{jq+TZl3tSlIo_$}MB4s?0FGqS)-u zZjp{;-U`~&kSAhgdm%vv#J+M;UJ|~%ucB&id8O>tuw4gqme6g|3iSCtJi=QmZolo0 zXYPOQ|NZKJU7xxk`~v$_?82HgJ9@6cgdv3(v2$L>pyhtz6$63;aZn(=Af(SJcGvP?#FX(J0+0YQ|SC}aN^kX&zG zx6xa5_$|TPyQQ|Y$ilSBDzGXe7VPKt>U^~;SM4m{t)6V7V#+>K{Mo8t0pbW`8OyUA z!OA1%y8!X#AN{a=^QP7lr!UMbjJJ1BP@=gr#2~W016k_r88~tL&~v{%{PK%e3L}?C zDM~y_eFpiJ#EjCyZbnXmAq@&UyUYdL)HHVH%-GpevEXb+_#3z!RceiUCginV zgp>}L>$WmJSs0ZHYp_kOTiY=Rj)<&C5z;uchm4h&AmmgH5+Dx#>zFH?6&43BN3^q* z?BH`(nJl+Rb+jVFN|t1|KTA6e9W}Hmv@E_NUxSVCB%X)jgjDu4W`_?0W4>H4A`A_D zjMZLWPhq#Vq-*^b@*49>OnHtIO7%53OI+IlH*#r#bwmtQ)>`>Y`NX~$T-ESemSi9h1wVXX3)lO;4MPv6I^+`J-FQ z1+zC5fIeKnIsW&RD=mkPJW#XyK=Jkim6e2)l>Yb!)ti2@bMqaI+cxbf-ntJPFHOm} zZED!Mad**X*@Uf?43SPAW()}x8DXcfbc0FRPgodtS8OyXr+q47$V=*OCPs2g;jkt+ ze+jxxmB%#;LZOV|g}+odj}sphj(l6U2B$s< zW;HdkndV5$AVoO%xCMM&%!w>LR<1>n=cjYY{(>B@NKCD?U8=6L79{srz``xe=*gBB^mcFamt$nRXveEIa zTl3fd^XlKaK5cvK{)g(zs`u}#K=%EAguRDz9Ot#|3!h_IrX)r%AkQF20t84ZS(YWs zR+cD>RHjIZqC^Es%sEF8IcI=~%m5e_EIFU^!mYZ0)cx(fzHSVCUe&AG)zuix^z_Vh ze`|-e*Dg!clmsP7eq|fc>w1!vNgBen!Rp2)YyuFu-5bI^NfVTih`9fHuF%DDEY&lBWN4||QTrD#_j$I5b( z-;TS7fM7yeWrXk&NkPkBO5Li{FnG}}bEnKk@UmQ^BV6q%8ROJ?7HfrPo(+ZLI|XnF z>wJo*s{d+L32Tcdy7f$Lg$U7FWN}$+fD!xPH3ui;lxbCA%W}cvF>vcuWS(6z%X@4) z+phF>{d8rj{n(LrpL>4ShaYtvJ>Som;1dmPhtJjTK63i&eTTPw33~ylRm-({+~*W% zs{#XF!c$vvJ>upBuJw#ymX%OED?$5Z+o@$dhmi~40RPB|p z)Z*Wee>0^%nUHE1y=X0vQ02kQR`YS+Y9UhMx1F{|n9h z40Y@(I<89;0l{_obo*K;je);J!OAp}b;KBpF5G6P$am$N!IJ2`Lu zQw1e|xpE6epT0x==vI91wDxW9;I@6IADa{vM*7!x)FW(MtW^9B`|{Qz6c!k<6-q`( zYL(fjZ^(!bNpW>?-pZm} z${7Htd_Prk1D@umR}?I*ELdETzeq|}ToOQ}$j`LyVv1YEw&GvR7A(tKRGwHYoynDn zC0I>jsSJTEACSaoLMsphzJP2MdKUXnP7jB+`L(&gp)TB5Kh$(lwd0Ua4zmSq!eHn&vZf&;dD>|- z^k7KjSuc{SPY3)Bn5$U~igrypPY^uT0AtEXQwFc;h zZs&WSwjDg%f3m&pK>fwtr%!A@xcj5eoA#dqP4742V$B&~Ty16;-fUDGfWXhOou7H44Zvh6tKy7ci|Q6iO7;CG)T~$?Q5y zYg-)=LbTe0^tW6RlXF}-0iqbQSmuA7Ry*GXx zx5~PQG~uXgbP}FYrfda1fltg?kV^9eh1Y9KowtRbJb5 zc65ZZ9zQzP3-JL{>mYU|=QwN4!R43gN(-TGNL>3db@J{5fYIE-g!zyandWH>TkoCk zIo;fG{32Q$J2!6o`n|1x`t8eaKK&B(-5b4=cJsP-ioeG~;Y$RrohATUjK*qX>r5w~ z!l2(Qps=HD#C9gQtRtSzdd zU#vD+f}xj!D+n>3-X9z}%zku>NpmO-GZ_Kny0S~l1)S}|;F(^-^_^uE*-KD+wlP?_ zD~zm_x1>C|&r2C(iTW z2PPilcE|yv97%5s4gg1R@ zoswc?F~<~lr?Q|;g5x4{2`$>a)G&y8E!)`CkrY>KBdBE-VaOGG8kvE}?NKkV?dxfT zx`x7z-4Xsu2=N zi)f2DPne_4HI|CM?8~=jg$c_GAIGsH20nHeQ+Q`^Vxqoz@B1HbedX<=Tfgo(-qe2R zLesu8XTCnXZ{rtdzB<6_0o1CJ3xFDidM;h(uc$}s3~{IdvCr}*YNl03m08NNW+r=O z=3pISb30GR(CEJw$XlpuA-o0qLS&v}7O8Ns+JyZsa@4_;k-pwV3VMj-@YT*9-jr@= z`3d3o3d+^Se1^Sj#tx(6oKh9G&h(Z?!RNA%)@sNbR#8t}gTIJH{4{f~yS}A3GqW^1 z8!ctf3N)-P2+S5@SPyB<|0iQ?^e3;zf@=o2z6O#s23}d0D2)+QP>_xKK};j4GFgNf z2`ZT+4-AHethF<;gzp@OmuuyO)QWhK*0fcHiz(-^Uy~0^Tbqf?apY?quBA;WZ@HrB zs=R!xGFgmOB=XDil2SdYCYOjhR`L9r!bevYK2}{cpFgn>*C$GA6QyMtB^ke7e&nk| zf4=?$4hi3h@*8pFYj7MTZMdX^2k81L?0eDHpcadxVB7IW*eYkD$TT37wnTlvUzOJt^@|>Mqwnf>6L%4k-SX!9Pn47~Y`3nYsJ57C8l}~) z_>Jf{Nf_{#-@Q6F)MUA+kivjwkEs)!&NAXtu8I(}Mr>!rytM8T0z(@h4MM0(mImTh zY?ea7+z;d;M7zV}J3wCfnSQnF>ecmwQwK(KjhPWFGY%wuVfufhp&QeE?(CJGzRuG@ zJ;SLfHdp~`Jh1oRr(3@H;637EZgan>RTW}Y2|32-dvhPgeXtO}7}1W~nOF(pG;7*` zFC(MCZmfC{P*TY2%x4#q4E;xnXT^h346Cd!bzW3t`|2;iu(Hyy^PdBZQu$5EHiN$i zbj0`z1Y4sbpzr)E51a+x4fGm&`Pf+5Lfe5R6f!$P`>@B(mXvnW0b>?cDkDqWW7T9+ zw?baLC%UVk2dLj@t`;AB36IU?X@t)uE|#L0b|YYe zzn9v2Z%<7Rw)UzRG zFsNCCS0|U%=A{F{#->JTpO)UD1XfX)D37MzFyoaJcWpts45dmAV+Q-Y* zRu|PSdo<&VO<(=_*3Vb^C&v(-@DA>H(omGT3H4Ke#^5XpibL4qe|n&*jI?>eT=Q<; zIqq{ABF>Tx`iU{i!8bI+M&L_-kY%}J5N8McHS?sj&^DZddWyZ52ksMuD``Xvd376d zAB9_~)z~IzUNQE!^HifXke+MCI!;_5B0jQh=bnw*KYr=$m)HIl5e67g;Qj-?N7N+| zQeoV%?lU_C8wVTX<2e3`UKM=uIF3v8#w|VGIX2ccKoR8b^tq$^-+1fAr>cuqAbi2N z1XMNL@|-^B{CRH)PpxYx8hv0ZK7iMS$-`JGCW*w#g5>hF^m%FJC25bZPJ6s+XtSJZ$?k!`02Jo$C~yXYS?!WZ7YZz-aKd-e)&kT z4`Ww_Ujte*X&IX{*T>`cz21BI#-K^Z^jvJmdM|eMHFWnk_F#~cU3*I0W_1MHR#jaU zUI{I+fWM9H#-Mgx9c7Y%VB>3<5Ofo`iG`rmnbQ9ge<=rJB*QVX;+T#M@>*owU^!~x zuN97IB6j>GBDSFTqFKbhaz=E@{8Qntwh@MUTTnEhfmE}|R}p|({7ab0OHk95(Mu&| z?8r~Bp%3BRDJy%34T(CHm-Qh2T8EJyaf;NSjjf?qNCg6qf)xv=ZX={twg;iI!0l3gsovMk}9J|6S`Y<)fv<} zUb7V!OIQ})`vN#&oK_MKSK$|#Of+_@ZwO;<_@93B%>GX{H|*aFmG}JaoyWF)vFF20 z=XdPCW&y7WBa_iJpVJ1r^gxHUIkR3-6gJ^@>t1**XA$B*gsTNrg*j#(1t>lRg+0u)Os{ISByX+u%CI45 zFwUeBI}~Cn0c>?4Eg_bOCXKBBuFB4<%}Lbd@AxQmVh2-xAVc2np@}a0trf=l);zB9&LNB#8B7f8 zq&!STf*#>2iqUnh{-dBtC1olvj6UEkds*4u)Qijf3;6Tpm-u%Mf1Q*?ZxIPfAm))-f^BIgY72ST~?cwOb~cvJSA@aPV~N$%`FlngqTBC^emKuHSXy=(auk zHh#6`<#%6O`&|E-mOGcOaLL8Do?p)vN9%LVL9qMD+?D#IA(jtiU&r6jRb(qhYGg-A zXnNq%ch|3ub$7n|!G`jx(vpILijopay*0%}^mOB)={iyML7tnq?s$V%d-a%9`jE;+ ztM1^JtI2@l`xWLbU%q(WBk8|<;q~9P9(eQM<|itaFNRXcJZMX0XDz|#Ho|cKP}s1B zI4(u0O&URq3^uAAOUPj3ym6hoi%7e=jgvOcA(I+%7lY0`N8Q1hk3P0y-Xll8`07Ug zpmNT%4q5k*_waTeJ>9tXaQ&`*Xj{=&Ym*oJ`w2g~XYa*eH#dg-JSB!sW|B@|Q{5An zdnT{-OkvmiCT|X0o*kN;8Jq|iVDAZvgq<~8!L^+YSi^`w&=43^)>m6arb5Y3EOyL| zdWgJ(*k6Sjq(GB=wRS@E>Ialgg>uMj;f#sv6^`$!9Ynr@Y4A6kgR5|OQkuN9%#?rB zg%m}p)|P8viaM6hC?qfkPs3aW=O;@yGE$Fpg=g@-^;H8d(@qDi5H?~D$yp)IY0@jRx0 z^4^skt>>vx)m#*;yfk;Q0dxv3Yt3g1rt^**F#IuLsYd{RSs|GjA#-B(Al^mnay;M~skmVN(%f$p<1gPc-%{$Q z@N|LY%zcN*-8!n6K>E{->)YP@II#rTz2wS*l9h#dnpf_vyH=A85rIdhVl;^j1r-^N z6AKYd9B^Q)1J`FC5Rh7iFo(!ALwH9s5yUNC1;1MSWJ<{f?m-iy2>eAY0V9pPGEpE^ zD>B;{g9UB2U||kcU6g6PCxw}e7vw<}iod*2E}5KC4kAR09686n zP|DHKOc6^Kbt+M?9%GfB>@xhde_4Sw#Mv4(x;vCfli-KRUcks3Y^OST;XPxFD3 z$3EY=|Ksi3*L|?z*_Y^}zdbTVRt>+=%=wV7R%tS`B#}`?dFBQdk`8mrTTl7$(Fyos~-=7+6yED@N zmur*T*1c7hz7$6N+QLG09)a4{h?J9Go}FYHE4QeQ93~P1vc_$2UsN1Fi2AuXRase; znVFTD*;QFNm6^p=StX^p<@5h@VSd)~3nz~bwKuk$Io5gnMAwOvNH?33)rkx3hfg=` zIY32tr2gV~Q}b9W5_UB847NwjT>N|fak7NiPl!i8xaH9l%Ip1M(8>63BT$K zZtmo1nuajaIC!aPXtZ(2s>30r>|z?;Rm3r2d_jtX=^c{l4Xvcd1KY}Vv|^?-JIZ5+ zY_`{kiA*UI>yeN{b`+Lm8C!aVLML*>mlh%$IyVi66XV>P28F*ylU4_@kk^@DsqwE? za*#9vaWw@~2(~J)w+6@avaL=5cI)LHK?@OVEAZt8oC__(lI3|hGyyusoQ%PW;TEYs z`HgS7j#>|efY;gwu^4${{JqlFbG5zq{_v&tLr1o}vhK^b-#_!^o(ta`IQIExdq3Xz z<=by{A3u7%i;{O6s6_OqdZ?8|Nkm8P0l=ihvXGbBtM%}Px-I8vlx$310gvmQkNkC$%Mw%m-j5WZU@ z!|-SHh}d+|*3`!Ih3h0OV0Y)oci_nNsCGQj(e#g7*Y|(6wP@+$ikt)p6)7qkiG@sZ zY*yYi7O46Et8xqALDl9JLrAv5Yp&H>!D|Y~R{!;mq=39ysXW@{RW&*Jikg)h43#xz zy1^{<8m{=&F<;C0U>?1aaJGzh0Sa0fD+`yCr6kHO$)2CR;IU--;@l-GlFPE{%Bz1< z_e6DZ6}+9Ag0kxTQmTinb6M3y=Z zE|#OM7XpmjE+57{KzTB@4fw0-fQ5K*lo#;VpcW7$QLLa|fZm)c- zD%_;dhRlPQEJKcQ=LaoUfjL^!p^<4M#7<@{edq>)SdWOV~4Ic>~iN6cD0+B_c94*Y`Xt4 zvm;~h1%HPw^qDm{!PxtWP`GlDE9)D=(2qp~(lWq}HV>4%tIs~LEjCbZ&BA@>ZGN1Y z_XxA~iN-cH7=(>$xQcxBLgkHxzp5IC&R@?zN7~Vbzr4z#X4kq@?pm)B3O{g6kAeex z*_Ek6EjT6HOk50f64E;U+AQE08QBR@a|{1%A3@E&xfSF!OPMmPm{&RNn1y9TEN5F0 zDUY_A{*~=S%xe8BP8tUuwb<89iny)ySaGxm#dDM@kWpvIF*!21M;k7N)Lx{%A}01! zbHABZ_~zbF>W${IMbWY4ky1^MNB?znU=UAx&y@~3Wzd$Cbs4G2Ypg;s#krT;C+7^= zWHTjZ9p=EesAejpBYMT8bP+|t-B;VY#h9Az8Mr+#a&Ysf8~^a~flXV^f3x%CmtP$I zbnB*9{%~yj)|vjUtDP-8)0x9Pwk)JDhZPmF#$wCie6_^jJ7#s&9 zHBPrwbKqZflDYD+p^&d^@)3|~@kJZh8l*l2xcXf2mQNQVeXj-cTmR$6y)!!w)?^ly zFU?$=ELOLXQJd&|gokW{P#uK{D~LXPqJd&qFcG7#$7omi%dwEPraoJoD^{}*Q+Z%g z!N#EWsxGW5*e{``?9^OV&?1B9UKi#^muDZ4;Sx5*&a@%F1DfQ-?9EW!nYi$p8X9Oy93xMgY z`RR+&^7_wT`SHrEwHP4d*c8*Hq;7ST)phEyf|4 zYy_Qzyd!CXp;7dG?S%wWCq(hYbM z$D7aZKi5Xv(dm}1vu#Z6BuXOh*L$v=mc7$oA2{;qC*QvJ&g*MeU);TOwhv-PH{XDz zydoZ^N%Tk@%yU*j*Q*tcuL)F2%tFu|Cjt0efhNrkclWKKYyW+BYNYA-t52;)<6>1| zer-_!#*VTg;@UZ>&(gdukzu}HRsLe6uGH@ACAoUEkM%VYQb|Vd%PN`9yP2wZDbaj4u zZDMgL&eGLl*I7DBB*w`U&;JPZxA zd4i{mf`vK|jA)Y2hd%kNY2PuzVf=jVU#b8(Af^P_R~9>zcaWeHoS-Ha&0+ zn;E>0@nzl~zCK*vk43m(nXQbxrV@=o+@S%Q4TU45a}^y&Ou?FJmSLfzqqyY&WkgYKDn3N(K0k4s)NIuh`k#BPo z$Q6T|tX->;OX~`juPw@1U692{Bjy>&2yBU?E$mI^plnneEt4#UlauWizMmZU>FUUL6aD{vd*()O*DGt*uFS|> zlPEwkdUbwbU9zAqnM7J-bzzbdK&p$RcX>`e0S+jPt;(w?S(;pswy5P`+ux?YH+*!h z@{lgDbbVk6YHc()Go?5)KvTGBX=Jb5h5nkL&g!wP`f9ZRUT6K3w-EtV%P|0o!93InQ;`%zbp`Ljo6GHnd+(rh`Byl%$pC@w;gNkJlWB4 zx~=Q1HLVc;lHwx;-FD>Uxt;rtZTo!J`yal(=5gFt+6t~&6~nlHy6g3+Ia+Lr#8rvb zIkUAtQ~eO1=g4Qg(L4U#m0MFimp=R8vu9R4Q=VI1m8_~Qs=x)ZQC8MkYjRb@w$@DK znPjTEKA(tj2`2JvX_Fj0!{t&rTbYxr%4U9tsh(CAWb>-hWY+S9^B&E7Z0!q=H=R6r z=kjH)e_P?D{NLEfe6GuV{ZqYto7TTisiG>sq^78x_!scyYBZ7Oylfhpq)Tb#-Mkg6 zaxyWrXxQq?*W;=|(_hSFrQ+=6ZdoNc$VBIrX6Ke>Bo_b2JcQ$#51oa4(q~G}$XK0G zMY#9m`TAW4&g|INeBcDrJh>lry^+ti99q;Rpyy3r?9HY-r=hbBaSBL!N6C!<>NQqR^Lk7Xa<AFTAwv^|#8g zSGdhmu*B{3DA;gZn2YBrx6`+ta~9?XtqjXK>+}Lv_Vh8$_9m(wp;$b4y<-TAbI9z3 zyUCl3g64*Qih#V1zxomsbMgHs^76&g+1gR8%1RjU)MpNkceGpDZt`L$lY90K%@yuc*ymwJKRtoxi#=Z&hV- zO<8VjDXLd1N-`E@E_`^=qIvW48IY7iWK_N;zpO4%f>vZ*qI7k#EIEJK%d4OK`N~Z+ z`Ej?@rA1Y7ysZsA(tpj&e)Hkx%8cB){8A%17nm{U2qV0zDM+rQTguAHrbrnqKSBKR za9Y||n?L#c{U67A2Iy0BqhZYy&oHhoRzj`~3+{sQM&Kt~BE6qMOn1)uz7Vx(f?V9?Xhfi(Wz5j#H*FW{*H|w|D?w^|O z9*cZkPc^&O5h1=#G8CZY7-(yLf;x`f2e{VWeZ8aScK_htu3Wn{F!BEHU*p#=URJa! zS-qyHj%u&b@bJXktQ1R2kqy|jo^Hjz(%De$b!AQlR-Kb)5-XH5DAy(nY6^?X3Mmd3 zFPN9UY*E&y?{1vz9scL^9YRZZu%ZT8V=0X%4Xw;%xj#B~cYN}-r=LkIUb?2F9Dff3 z$1NOoT`VNC;?5&jGHX$FHneA=<8*k2WjTvT%lLf5QsC{zrqN{Zt4s6j$mg) zrDlC&{585%m~douVIgnoR#oE%@D~VnZM^F}L-!U03!sJkZ+oMMiXU0_}_O~IGYUt^$V?XEor zyD+uXbL8tDMN&cltwn(y9VRA>LHQu%MJ#!xi(1JzPs8pE6Mpv6;r`(tUfBJ?M~Aj- z1%HW%4{rU88MPD48t!P5e5IHnyqCY7<-Ma7&lg+o8;av!)S}`XI47wbwNx?~F?^kr zHL%;xb_CuO=c$N8IBq_J>d0#Kbx=U~@%HYUgO~nx`@45vek1!4NxPw}9%_Xfxke%KRgZ~SH|-NnW1d8mQYx1r)9d$wnDe6W5f{(J!piz~ zOhzJ>x)OM($i^{LzZ*$*ye+;!x{w!H^7 ze*MAoZy;rJXXpwjper!7=Bo@qlmOSgBa)=$xn2>{A7%c0ez%35}ZBBqJM$fFP<*>wrp!;cHRW z&tJY^#bdvGqH0yci8J@E%w8RwP{9t>qcK})eY|^Pn-8{bVQ$M$m#?%RJ@#}(IY+K5 zK=>3Af=w_)5lf<2=whi`tukwQU4HJ`l6)wC;{SuZSY2^?b^gL~o}ar=`dTG9NKBIt zcU|}Yu;nF4Z7HoOvy`lEbj&o;PP8kskI#%cLGrgxTH0?ie^s{ewf3&UdbmPr_ zrekoi8EGU`xFB408~9=hfrY=b!`E(&T)#CujZF{TnjN|GKvlOMA*tA$A{Fq_tHTc> zvZs-q2gP$d$hCy9w3_`-sbB^Cjg+r|LW5}wW3BmDI*>%uGGgk0blz2D4#3>`m_H!X zgQ^h|S(|C{?}KL|P)^zUJ2sdEH0H3Yp+yR5Ey5KVLIVa>BV4xU==ef&McI5<;d`9E zRt7s%gnia92T7$U$NQpGMT{@wHVee{T7qR~Ls^e-WR^ryZY^XiO%%FE=qb%fy_d4k z5#~zk!?mD2tETlN$~M~yA3`nn=XIa*pg(Ti`ZA&|o`yGWF`Yq-ft@GxQZs3GFMH5u zTY-_skA_64$moe{a@S!{pzXMp6p?}*p=^;-z08nO2X z%g4|WBFqiI&7XQLFdbX)H{3Jf)+5<22A2s`5(sw>vZRNMvGc?{8EAwJGavp-lQK@l zL`Mr2s_kB#)N0Q+AC^$x!SliF5uZXxLVI>#_`h%6-~RS{g^SaftzeBgc}Z(>OaujH zX&R_ANcridb?p=%7q2d~8u#8uQ9C z9+_X5p8w+GfB1Oa#sgpMXQuFt;nCT#@q3rA+@HAd^YuIb&-XuVe)DZHxKP`0BZHmu zc)^OIMGq}cOB-uE_2UF`k?jxolWhYekn0bfv<2LInh1+Lvt7bE&psybWpq0saKyp3 zQ0tMjc~4h8b#L<4)xOD~G8cVsd{^N+^axZO07bj<%@{q28Clv=C5F^dFgTZqO~qej z9(1Qfj4ZDai`y*Z^1DgVD_16_=vXX1wx+G&@v`4U27G=)h0KQClDZ%Lz9Y6G5qJ8V zLv2Sdbe(SLIon3?OU~Js&<=2l99{2;#-_a|PkgfX$j3WBe(tT^8@AmWx_sTb=FR2f zY;_v{w$*iRH>uyboUnMjWZKOwY44sH9{c<3jlK(KK6?F+kC!HkGSf*iuPG~`Qw*XM zbSW!CkU(}u6HzTjtD2j&s@3FXugpeFV>#_VF%Q}2ZdksBE0YREX?|ka!ud-UKDO@l zS1$GT+`3BJGC_SCAZzs+R~=M!k4T3(BU*c>`ns+R^nAJL{hFe@+LFAQBJ|Us`dbUI z)tdbsp+LaoC*Z|aB@sVgN?it{^T=G&BChHsoE7nJN$!&3oW(`CD~j_n$Y3+*p*)*z zrreSxxl7XKRc01mJaFn--$=``Gi^r?qnF!x>^NFjU~&DPy{Eq3aemisw5M1`SR46D zRlT~}c^SLbb;XSMxA!s)M>9iLXNPXw8oqgJcow@oatn*%O^YH$ri$WOg32Ipq4<%l z%CeU475aFmttE;>SnB0tB2e%Dl_GS6zXt0%GJGv|qGKAQDu(3*>>MJ24u8Auf%A+h z{80vAca?8|E%o7lT(@Jbn zuxV6|UhKCW&r!1lnjgep`>=d=T+siSsn*q^=va74>DH0j*;WmMfbX~)&%r_+IYrqr z*0+q{crFV~J~2$(`e8NJnWykxqQ-e~$&CIa%nja1a5*e8^{{#fU&7o8$6toA@5v*p! z{Y>J|vx3%~o3IJIz_gyJ{Ii%^F5Iz{c!GoK2Zu;Y!U#xqd;_po^LeG;3 ze>t+BhUEmp73KFbuCm=k&Up?Vk!!QxzcM&8+I43C@!zlgEqan=iISDYW~+**_|p`I zdKIXbMX^;GX7QRlR)FW(OEZ=)&Ro0d$)n%yo$MVV zu?+1K7-*l`nXBk$Mn~6b(Hd1s(e(r)5WTAN%@E2HV zK60$(=<$xDXJu`NPhZ%*2MC66&Y9~@w;z{6Sl1-9XISgTyh3Wj@ppRg3i#^~Z1Jy9 zII6%pm&u!nZB@fSj0622X9_ng~Pok&sU&XdjFL27}-jsh)xrhkWa~jTs zy~1}Tu9bO)+2x*I0Pe^!aLFtKYj*}Zd>b^g+b=F^!r$oXbtc1bTQbP8K? zTUyPiablj_K;A0wtbk@AWBBRa`wDpNUWa;-c)rz$qX|l09iEG$!)sfhwi;W*h7LCk zM#BxQp9mvvwKrV!o&-a)}QlC1NI@jyjdZ zEB0v7tEFHui($ZD4%j}7E;RA!SMR*F^^KQzfB5d+jqh)I`K5!OY=NLf#Htg!+BSp< zf91eyMdiIn0TEMJl8w|ioya~hA;hHK5gDqws^oUVRHTQ9Al+Ix?S-D7)O{4;FQ1g7 z&CN?v%%U^*lQ-J4nWi&%JO%gy0U;wAk_l15!E3u*6*1=Wp|sAk)rVr6iV<2x^?iz8 zZIIX2{$I0q`p%qB%v)TZ&YT6Z%6U|HS@guJf(gUS7lITw)7=HL2p&WH=qr*KDFnhM zS4B}6Sq$1kmOPqRnAURq!+*{W{CK(jhsp7~mxdvz!&jedZXauC8Efc(z#xsdpycM< zY@G4P^gqjtGk+lJlc15K!wMK1Jt_wyH{6-vU z(35u$aLn^fh=W9~u+P*#6lLp5)0HDM6jH~}1S>fQ_)h4GoIiM`@kC?W*|x6p?cEoG zT%on1H1}L;9UN-v>%Gv~bhMt4C5N_t{ndtdHoo{0gYE7N;i{QNlidjJQnD3Sfvu8M z>~IctgNtY7mnD(fKW2ZLxIDe#y-oSKrOBL<+TxXUCDk>Bm6eIoiUg{p#mJPE<>T5h z6VO6VV^}HA>`G{%3he(v0RyuOl4*dM%d4{x$jU6tShD=FhZD;dz4+7O~I1Z z6qV-{as?JH&wjjYWnED@MGZ3H$_v5Lw>CPTLrt3=nwE-x3v<)Wa+YJInORtQMh-Kk zvlq-G?X&gWH^+NgI!+yHId+KrGuD0VLhp%-t@}?N-}dd1Prev9RX^P~jN3py0i!v> zD!<*A&H5(I22Akk=Fkl|SD46G>}ZmpCK-WN7Cd?!N=zEZzbb;OOGtERVH~h4z+y+d zCWBx^uDllsRcD_$*eH2t?fced9g0;o*wn2AIQMLbTczOPq-%pt)IVH;lAv24YoTsP zcYC<)j}s=lSwzhF`^lSsh}1?1#u*QBF_z9~N6!ko)ZFaW3fp-MsTg$KO;G7S0NGS;N^0YhYkoJupWY?A9~Gt5nZ&6b?sl6=F+-ni(I4Fw-JU?~aKQ z(T*MN{|k<_dVoq6U&x}!!hlJma#Qe^wpl#GC>L!gscTi}tiD>UPFH*Tp;*)(*!AHn zFMR&?8{dBT&bJ@F|LM9n`c5|xu_`=NJYpbafTi-N!EvFQG0mY-HX7 z>{%nNOTyDgYC}pybA8SK_DLPDA4qx|ta}Hsy=ez4zxgfX@i4{^I}m$=jPu+q%@~`@ zk?Bw2kv^m1UsbyG2cwY)jN_Rh84&d=}DqQltXJ|CT7EuY+h0_@394Wk1pY-|LgVp=6ZQ9lryx#9G9yh zuV34KWh!pcC;jf)m7&(={N+^ER)GGfO4h)hqEV%wygE^`^q~bWKJgqFePd`UT{6{Ii+} zli3??>Auv~KitwwkkWGW{Ds{Ij(_pZzRe%fvWJfA?V-@`rmIM)`1sz5@t4b(&!zL^ zH13YwxHUe#_p@CuJpOEjx6`gFti+(WGfbh%@Yg0~0KNb-Y-dc|C(V^s{g-`wEzk`G zDudme<+ZR5aYTll&Ub;1Qohw24L#__LPKU7$ry<^9ruf94^S5#WK zjD|ZS9}0QZizlTlv7n2xmte&?%dpa{|VRhHQLa2=h6ae#4tu}ZMxJ8c(N#$Tfd@Wmf zgw)`jWuDzDlY+;krV+Ox)I1_>4O77q1DS=(F0*EIx`^NH8#5Uz#H`fu6t*^NNijw0 zYIjmt_aZOdbvZmu?I;Yk8iAoj6NClOfeS4szPp!+_EX&*_aaRpMhY~ z&LoNjopBV*GeJx@buL@Ckp}2Q(TaVdI977U!(I}y&8qu|m*xGxAifgsaZsbxPbij} ze~7;Pkl5t==d*Y?Z#ivNOMVXUmz*=5{?;b!OzEl4PBYkOZ7n}uxyFN!7gd)mWwcNM z!d9#^rZb>9=7|-ofyG(K;V!Sp&%jm|WurkPLy!ttD(U4SL2p9z2__~itIc1QJO39H zdC^t3BNNvKM_+hiZBYho#d#|WlSb6Z zFR99@s7b8Mel+X1E1&tt%wO(~&g$yrJMpij`+D!t-==53eD4DUj_5*yf&~!^{-#osp6Tm{{J^!L_*!wU?v$wSz^-cPaE^BOPxcT0>xWz8y{+3mSpT~<)vHU(>q^St zS*>I)25J@Lb@Q23lx<-z8r_8jWyu7?GO&tV^ezgKzi=xk&!K{E&AFOeL<*WjwDeml zGK$iFF@Mo77Oi`3-9SS>F(B6fMLiIf%Srb|@LLf2YaU{?WYMDvM)1M}`JtB9OD!Fv zt;YKHLc<_jrXQy6zw`UI%a#?d$**0_*(Qo|R;1--q&-%(h8#MkD42HO#QbH1z;FHTg`1ZK+s~eCI<~*{ z^*+2=h)efedmsR{LRr#yRJ44e?NMQ;MdxI;a~M!q3wv??VJ5qNj_uh z{GA<|A^AKz0{`k3^2HcQ3{3dTk84dvu1Fc9BtQwSdFj@wv`RRKjIOh!w6CaH+c>c0 zP*^Pig1zdP!X9e@H%*|W$;`Z#y)roCbFe}M-(hTIl^B%L(T!D z13mpzEmD>VNr~t!sqR`F>{#I0;D%#w?!Bdg`Sndf%R6~fQ5bXvCC6GiF~zXfa1snZtXu*>qF#_IGC(z2blB>I1a!iu(p0$MGtS(5d~Gi+drLomT_ z&}(`Sq2^H7({3uR*@FOIguf$DjsZl&L`BMB+jpaH_^0cW=l6a4?jL`)zb*%AZCqw9fX8+WPOEo?5}8)yzuCQtERE3nEuWg^H9hcxxf4PD2o^{TNJjwyc5i zX@?ceV|+!*ypZQ#>z;#P9ZFefE3qjBZBrgB=2D!c)lc7cRB@C4W6bmDx3Kze^n4|# z*@j<=r`bn#RX}AbZVvU|9vk`n+SU0>846dl zI5jfW)uQi~-^C4blx2D`#G+%p=(vJx6o@dJZ0^dB(#zhX{mvI?vGX+uc{s%G^uNkjyh-??`JO`Dct3 zyX|BX>R6|~K6dz%o!egj@RL{8BM?UsCxw6IzOmG*j%GP;bpZ$5Zvtf?7? z)AEhYKyVwCtTTzm`9HSuZjY#RGIm-=#v$D)G6Gba>m;d_F zrN5fD|MO2Kx>`F<9Wgt3yb}mMbsjGC;HkFeJ*Q7?J#hB3!xQy`_Xnn@doCOExp#tZ zRf3bKO`71O*MDyh&6*A0!H9gZfWIw01I^t7%^m$dG(mF{ywGn|{!~;IyD8LIJyRX{ zPED=kmU$tj8m?-@$}LA#W8-?sBD^*=Ex=vWEcVG26aUf(ED_6r`iB1Vjc&~Y7XW4c zYn`()$6o?ruo(Fcn-c-lhT3#QngZ6vo&wqoc`pWlQ-Ewphm1ypDMi~86pgla>xbsI zmMP~(EP%1>Sb0LdMV>8*b0z_NAyGT_u~O+Jw)N-RNJJ!^z1vq9s!GRXll29|#iB1s z)n{eD`SNiFf~jik_#0L{?*-PoTk6KRSf=$M;~Y{@HD2Vs;n$PN{)D-Vnf%Yxl?wE+ z2*p?akShxl+>FaXxEgm{YE2UgM+q)(x%EkWL~xi8O885gk6L$)1>K?9e~#LW3+x{F zX>xq3rD^MHf86-T-|g720o(Y}>y7)5+#9)KU#X&UPRwa4@md=D0QM2QqGv_D?%}DP zV94?-K4t5vY3C|?Yt_|m<6?#Ev`S-+!W_rm#p6(11%ha6@#`LyG4_ban#B0Es6Pup zmvJ)k1m;?YA|f=8gA@_w`tK6e<$(`9-$k+TH1uIDU#+U8`fEsNkF|A-c69$`_Rg1^ zK1Ggzv@Hhyn)$R}@Qa!+!LN-#6{|TvANgDhlOq-u_-n;t*9)r6UyiL!X4Yh-gTK2! z-15Kg{(YKA-zR0LH;pe}t0fm$>iLI!a@XMBZhimNrZ38KOG`4yAy-9R!z=3ws;hHr zG9J!6`t^~YZ~nyR;wzQECvWOsvwvowUiq@*%FH4r!2zt&{h(TH^K?)qG+BP1G^dY~ zkHklEb)sU$e=c}3zvjpBTQhwVTw1tBu|E@Gr4&;Qg8*Fr>Af9T+LBa8x={@Ueq4=xGS!{tVU{})v*)n^pf@-hF&Y6!cSoZKEul}a0 zzv0M#&D^}%(`Ca`T40;l1zoBED4?tFXeo_Dt2Y#sY)Vs@ry94hI}{;}!7i5W&MT)K?i z8o6p_6zH2Xmu_Lo)8bHJ;Hg=2iv{~ljf_!n0m-0s`~aR>15wnv>z%nO^7c11^fuJ@ zU#!>74Uk2>{TkTa*r%*2-vdcKfgk%FaA#XrE z!(}XnX+^#f{?1K7g8XdFM+{crK8jG7ST%mFy8#yeg2J%wb+Iu%0->r^@W_-WL2a2G!9ShA7mvchH!e?;QdTFgYgk3?z#don z@8b9yQPcZShZYMol_wl|9mb%BGmH(x9KLz`$nFC$PnZP^^#yGTB44eOLMzpRL0Ce_ zHK=psp#^01I5Yq;Jx8;88S}@yzq%r%BbP1x&v~yr`Ru=L{`n@OTR1l^b-&J|E64m# z;jeI6=LUHFeddS#+xHRhmMkx*&0k4KPNS&AG{D~_zewM+dH3IL{?#emutwq1prC6d z*T3HUfh+m(f||+|MQaO@w=&r)x;qVgZH@!9nHBOAbk2nHC@)b~&U$o7=ELc%C-=uJ zrJ3p#L_Vr}n2xp%KvoTLEH>Q?VQB2RvBUar*HV{(HM92IMhS8e%3%9IO#NUdk>5BL z7c0VzdjvpL^%}E-ao8AnJAoxWqsbAGDz9`-P zY~g>R0Nj4!;)Q)DPJFR@@5i6N{hQyN`)2o@LB!tM`CYiARY@@iBQh#1cgAOax_0}* zzGLfO`2ADmW%z85mlmuo%42F3vK0nW&M-F$ebQzqV0`~hBtTc$q?MsGqjwRT3y-gm zMhYV~K;e=Vzs{MTR-W_7H(TBuXgqsk$kbNJ9EOX>uX&snbNkgJ**cH}D;rgs_cT z&Yh8K*lqB4=!W`_LQjn)T4}=}ZQC)}+&0t#L&+u%@Takuq+;Qf;s}MkBiOYsRJS!s zm045o#RlQ;P}4=(U}JrMQ~f~m#ewEVOxP+`m7-u}yt%YeOgV6?8uv9cn>AjR0*k_1(|Lx1Mc>(Yt0k5Npdvx(7YzNmjs3m@ zX_-+$Lne8VioD)69Oa^YT(8lhQi9(o{-xoU>c(jEaO}q_VimG$t9MrxT3a2N2s(g% zGl1){tz&D1u+Coavx=vhjT6vR^-cF5DCNL%v)|yv2b{mbDmkd z_?LQkLkuhau-Af>tqt*PlutHgr>-uHc=<9txewx$Kl$Gkw61sd|1^1hwzq%Ni!ZNx z>WQy5ynSHnmJQE6d;IIKulHFfZQM@H>Fes7?4)pPy_~{X(mpDmQOOOO7~OE8eH={G zrAw3)=bx2=PW@9cLKMk5ZLHHoDe3RK+Bq4l%L+Y8T*q5WO?xa4ShEy_EUW7>MTvTA zN@a<_G)qGJY$Ip`jxE`X4=-~$ZfSv9MP3i8gLt&H{f8^pE*?EhJ}sHP&?O&j;;N<{ zu_cQ1hS^-Q%C3B6>KtOB0MiE6`EHs}<5Lo9i!&J|yyBtx&sMGZ$Mg@={gWV?dvvDm z60Q@c7=>)1BKHXR{&M9`$MK85Ennj$=?dg1tCNMuIIk@#S(#J4AZ_6%Z*KYd+D}(| zMoH}|)636?eLr=Zir!0We^;EI%d{iZ`w4Gl2!L4Il#OjF#z-V$P_rzNn3J1Wykue8 zg6`8@KVALaLQZ*`Qt}LUhvBcXy8(cl4HYA=U*yIUj{4vNP636yilK7KFh6jFSl-$C zi44S*(@4Ev^!)HD?=OxR1$GDNo(gxCBq%-C&DD!veekp0=l7mu+Cu;NZfxK}56k0F zGzTrpid#yA8kwnJovLp$;F2>J+>=kWpcwAs{&Z1v0oR{`wUB=!oKAr6EVq(`wOEX0@9DW53QI^1%Z2E?w z!CxO7KeXf9*Pr`6@3^L^L}CKd zI_j(RFm0X;)|u(QesApdpPqe_IY(;>su6^#%t%lUW&~12RwhQzQCSXU?gWMqG;+|z zSve)yxq%m*o1UM&c=@BhTJ%ubrZ?7+dTKp(3}ZB5@5%E$C+mApT^u;mIC!>+0S$XT z+WhSY89`quNbtV*6ZrR&-qs+5wK`ZJM4o3KE$g95HRJ|!s z*g(_qmuJTLXD`13wT{KU6}IZii64(IB>h)GbW{;G4NT#0>XC)5#I{yD3-u{iO83=9 zUP0^|IR1K&tLmZj<-(K`3l`)l7>-AR8eyL70M{C zRHVt4fMUF>d!oB%s&{Czhd|ZK!5&>kXl43QsdZuS_x8Z(U#?7D*!As;wN+bQdvWJS zA8dL3)sJ6Y$NVWqXL1)SPfM^&or@@yH#PW+DzoAti(4hMjZsp6>dw`vvgJ%{303x3 zQ?o_bcFpHl7a9~6TU$rwoJmKkt(YCle+K+j{A5bSL6l38u1mUSom5JDu2R!Ak`=>FIA{o(U1 zFV{ZxM6zscqO59pVr5o-Rc5jxGuK*;vJ%v8Kwh^b?MqB&AbDPzM|bez;>6;@+=Uc~ zm(2T*yu}MI96m^8nGm?+7}FWf0lq+RpF=QhIBiESe!FSM*0;AXGUJD-d*6@F+!?%f zXYj_|p_>u@PQ%>3bLlpgio7BBs|>K1QLghme6bIs7U_uQq8zTjVP#edwbF<5Zd||@ zZnJW~fwxQ&yQxu$XsA#IosQS>R%v06NsS8){z5b0dh0vaWfz{$*a~5CeMfkeqivj{ zh8@kB7_r$(z-h=nv0+x#68%A8ma5cW)m4d_Q6`xX_*k4|!cxW(BN5M&U2V7bvrh-o=j3p8;y! ze-<(VoqB#0Ec-|}pRQ<9nT%%e$tJ1!^9X#jKa={{GwLr=Hv_ro)@ z@OxXWn9dKUtLR$)m4Ds+$G4w+SwWDSSHK7)@mN zXLld&I9cCyuBErWop{7qIs?pS3HTe0Y%t&r7}NRQBYW2QVQD{TU%7Inh}tWyo9k_ zAtYm=nk!GD{&Aclt8uvOb5NnoDWv&`qJL3FL1tF+kw+J=E~!4g>kw>>+k=xV`;^ag zwS;RvD1LZdpyd)@$oJKzNSMyp`_7iI>YM!X7T~oG-zT>*$m9#!(&RSXpj9CU>GNH={U4zeq7-LgDz# z6$1NKDwN$8gIdq8YITJAYFrwfn5LPb| z*eU~Oqgu0$ImdR^n^JLhaJQZ#BI4K+kRn#dD*H7iv^57$3BY*!Kk%0Yfzt>TmX;Cf z!nC)~4ND4|lRo^;qg_@TUm0A5&sP$yC()_`stSBZm2z;j^-Ns?FluIhgaH zLjd;x{JD6t1DPtSR-NTK|a-BK(abZcA&2 zxYY`LT4%HlBkXM2&QfXig9Es+iFO4LxH&|6N0NVp&^bkXM9>j6D}~Vvb{g=zjN)ba zZ9Wv1ioX%~#zd^Sos{@jUCj0!3TNpTHtXy#)4sE?qVL(rU;%$yd$0G6{$=*od#}8j z_2@5Y`lw4RTa}y6s5NYL!mKX2gh6a|$wg)fMERC#77Y0}q8O)RCDc1iY8tIb@S1t4 z<;&8SrllR-zVG{qTk3u?(1!@^lZ#nP&0MF*etP}Oy@J~#m;cuf|J?k>dqwFhz|c^= zEif__q%SbNba580^1ZQZd;}w{MGU&BzR{m%zB_;D1W|3t(j|moYm?bn(6u(&Sc0IK zW718$Djx_gcS-8Xn%we*X%D~pRTU)G zmX@t9E+H3Nre`k}Ky=MicrX3l(*avJ0-jM)lCc+R#&Zb5nuE#7$xlUvts z8*iVuF>sULg7sEBXE|w+dpTF8$2+>wISgm%%Rha*az*0lqN=CMS3Xfv0SyefEq-f^ z(ORfcz4p`#oc7h85mJ}p!-UMtgKIK{IhJKsZ(6jlv7W){@hd}}1K(`>`nlClttnku zS6m6~R^-wxWwh;*%sfpYWCT(v_?wr@RswDTd9#+3W~VdFp)7Mr?t+JwKb-c#8-E<@ zZf-eqy!rI8PVo24Ie{;u5-~b-_?RsR&+K^jlU*NvO6SgZ6F2UTUcGzirX`;N-;vvQ zFWtL0dSCd<)`C;d^v0N&l3@<&k)9}aU5+UJwGgtd?&r z{)LDYS>Q!CQf{X1+yQHvM=aH>+Ag9Q#I}vi$Y_vq)HLB#@CD5WF47%rVj7$S&v7$o zj%&U^w)QVw1fU6%5xHtpZ?E#YA-gNF3y)7FQN`9Is$6kJh&tK!pC#uQWb?vGAPHnnVh_4N%; zJ@fTj>vyc*xcS9(=XV~$ujU9=ZWI5CxA3=PtJ-vgrxtOL|M+Ip_M&Aub%l(mGnVqIq8xAI%}=DKKbH1uz~#Hk_eiRQ)T@;? zMn-!_?%lX^>C&ZDt5y{*d5kXRwFMb#@&R#**Ey?li!r7t+19hu)RaxjI^T-R>1hu= zQ(pbo8+T>~853y@T)Noo@`y~|zpmZ~BUZ~8M;>FC*I-v~`=sHoCA7!nyDB?`g6mbU zxx^QYd@~Lbr@xbpZFi_e>j-Q1!b`ocMgtr?0)Y`46v6HMGz6krwL+ zqo`Z@$i%{CXUo?@vgmKOek8tLTehksGoK%;CRs{{EMG4olMH;MgBw%lxaLHwW?^G? zlZMWgZT9&KC5ghU^c9aX2c&A%`4i`6u1wQLO=*u+3yYA~*BOrv)HA5O95h1aU)&xW zZ8?7I^=F>KPkVZ0`P#Cgx?<*DS@mH}aq@}c;=;v?m#6*W`1UXEja+6Wr(@K+MJ&=$ zZji;+?ub^pFKzK;Q6)jmaD<;`J9`?wdHdC;o~X%NQ&qXDthB1Av?R}h+|nGP`<&2f zgyy6V1=bX#vTRg1mI1r5(N1NSk+=MzNAngfJG=kL<^JxrGbcLE9PPPqvgiC6tnZw) z>L67C((w(`5_@sq$@9C9;S$^)zN*Ua-I2Ryj=$n0<4#228D5D{ky)s6Y5vaT<%oZSaEVdS*fU};p4!ui_9qjP9XthbSGi~q5KJ6u8o+{F zq-G9!QW`wP^A`?^d?<8Q9BqVu(<`v6E<#~TO{>*c&)3umwwREYBc#OUc5ZlgDLHLB zlp>B8f4%q{vNu{BZ0oF;coASHTDQr}n5vukz`FjO?IU!KF&a2#g|{cP4Cw}88y(Z% z%PTd?Whu%NB4{hrBSOTY$kB4YS_!pS1?*Ow=y|Ys5mWM(Z5;dpmyA`qJ2G~Ecx>Os zEw8VBdh081?cKQTi$8tv@vDCV<7S7b0>kVwo+_QEP&$c!6?gMU6y8ek%E-xZegu~$ zR27{VJNBk=7TPs5gvdNu;LBG1p{y!yy|BpGF_Gu)A(=g)n=-e__Bw%9vA^+b^{KfZ zZ6~x#&oRjZs>Uf(e;Q8~$2%2;Q=iE`zZCa1U+=%}{_V`36S?zq$}&p9Qzpx379lg& zYGBdiEWv8?(lMs`Vu~oiTo_G|qcCI^FoZH}>sd#di0P|+>vzxpc=h_#9;81yFv?!K z!ejrFi$|SD+L~-*=g`0I|Fh*}b47LqwyLnaBDau^W13m$CneCJR=aoWzJJX8{cx@_z?aENfb#NRE}+(Y zZ}Qg6$aTJazB+kjI(}4&H4iE>C7<1=a;BDkqwtGIY$d^*DQq}9H=t??ZbnD547SjO zE&IY^^2u0K!Ld1m@;8LPyd%G|zBhaZ=&~%)hdf~@+T{EDpwb+uFNT}NtWAS-(*Q;vY;Z7Uz*Is5N$TIA%hG&;yPjYU`1ta zS!DvMRT90FTq8-#eta-{;1_$q7 zySD$U9V-ip)>c&4mQ*mpa#dj&Tdayp``1g;(-v-Bw+T@xE7kdGtch}Aw48T*a8m%p z$T(bS>zf@OhAjQ%){W0SwYsc0SyNTIy1YcgBQz2zF zBKJ5p2QMXIG&NNAs3p3!ig|t``S@-rr?fy*{?eZMj*^1^ig`)+Y8g1&Kmldyd5cYBysQ*^SnV+ zF;}n_e>p73GFu)!cM;()31{<;Qq1_$Jcrl*{npRKN)){osy-PHxvMHx)*Eicb8#eU2vSa}E#{`$S=U;VF{ z?`QjoE-jHRd}KDz%*aiAiFLnwjb)JBV@(NaT^TDG$yB(EF*oQx*OnxagBDwCb$&kf zcwPl%<)TLl3+EM#){p!!`d$1Ta!HVl0fGTO!@ka7lU?KHfKf6CD&;{##;zr0MZ7lg z{}t6LQvN@Dz4voe=hFQvXJeBDC`&WaC`TkAqBFM7vGK9N1}B2CNhaqYGAQRf${7@p zkjTN=KJPj2_ty9O%dNWq(EW7xvq!^uZ{4b{U7KMvanG}!)xCPP;JfR>Ztomv@3gL> zPy-Psyd#nLD*hUXyQBUWvjd2b%LgY&ksl6B_f-UseD!0^naW^U8=hC4RU$)`;%`Gw ze^cLJ^B{rgG#U)vlq8z0ifwzCrjfp?o}O!M)km*g+H>;I<{!4M{|qdCFfbXqX82@6 z*OlQuyTOMelmEK+)8=rRSU^7<>!NBppT%U}=P*;m@GU+yWZBKoT8#b{9-7Y#A#u_e$oVQ+)Pkj(m=dsr z950+Ssuz^x)?J$?@IS**3WD3s#RmkyF_^B5dtj<#Q9Y+tnfb+Vj=M5#8hDv znhkp`!B^bgm3C2ug>Qzshx$~xWHRUkIZBObcQH7H#5q2tQZ^f30f#JbwX*ok% zvH354cdX}b1#k`z;)n3ex~f>fk2i$8_CvOvGv|sahQ9*bmftgMmGmrz8?*)kMr}Nu zHX$nY(^bwIbp@0iJJy=4hil!FfSOigd7S{+!$CC0ZTmmp`1!gu2R3e`9DL}jZQp;g ziB!)_FN9a9qQakh4Giy|3rq%77Y#a3wU30PQ$Vdm2hNkOV#_;%1q%dwsd8f|yk9 z>0diHeH(~gCa%$$i8y3BV?1M|KqWwy#N8s`_7R)+(E1nx}x?g+ua9fw5uzkOGy2e1O1Dkf0?mR&mx3j#x1JYD2R=}*= z{g^Q}4Gc966FO*k#4-l*7TSfv^T_1J(VF&LZ>~Rez3kAbGvDoFFy)bNc5uy12M6uv zDHoo%uM=x8B=l{;v98{yGm~8n)ob%IrO!Ixrz@O+Q_}gB#x4%$NvuSpu^>Yg@3wO~ z(L;!^pD)5MJx9q2NsC`!_*}&0<40$P$GHY<(F?I%@YnV;-nA`=%q}G?dPz_HG(HHaF`*UM8v!5hD#apA4gi z0pDl4hp!$u@oquk+RW^C^7G%y%_Zkk!W@tQ#+am(BMIC#ux7RSzpnE)A&%rTqVBBt z1Us4wrRz>gWYUBTn;HUbu2{-*{J3YuWi=d3jf5Wlt4W$qj~;9%5=e zRJFb&IRRyJf8y=q5p9`gOFjckgyWl5uGNgLo3X1z7$Gr8VYhf_$kPG4k|hRwC5->- zBY5`#sh#rKGT4smdA5-1#>^~f$v;bY$lG1jLTj5OnYGT_TZ5phrl-1EqN-lrtnL$f zD?267iHTI(Da69S*F2wPwvou5Nl;s*RsAQWxJ+AjZNpGqqq#t1dM2Th5u7@!u^F!+ za~~xYtgKeD%2xgNdY801%)kY_D>YwDpDzJAOElV_w@ClED!z<0dueCUgE=#i=A2L_b?k3eKARI!C7Q|mc3 z=Vt-&5S!q{ls2~hhZ8qjL%<6I)6mi2#-n~x05xt1R)zwc5~vCWiOG&O3BVTsRkX5f zHoiu`jNNTIdu>DUo7+Bo|HQU$&-}1+&!*34auh<^Nlv)idPr>#Fo}IG0rGmmqXC7m z48dRH*X_*z?UBKq9Zb+uyf*!n&L)-=x_Tu^g0Y1ma6vvawiDsVJK5fW!3akUdCm6h z#=hNC%aX3<9__uiItP&x8zMjRnK&*imL^W^Olvc4&ycK;=au=5wCa}0W>l$jh0Vx# zGBP?H?EIiGH)-CBj9S2_!saY+Lg4V6pAxk^J%-2)#EA=Z3d}c+Vjp3OVspAce}(tL ziZuFtu+7j!q0Y%{WbOUs}DcP ziV?1ghci7;#i>5%YH@OM>}U8}+i3tIQ{AJ~j*oqRAZbxN9VpBn zBzQeKvo)$Hby-o`!ot)A1*wq)a7>gW`zUAU$0RR}c=^B=-~WjvusW%7xe5>~HrYOh zA_D*9;$+A5{}HC6Gydx~U?#nDjYU?7G2+HK>t`su?f^VG=4LS%>~aKRKz!aEA(xYe zoGTmUHw!U*I()n4_@(bQd|7|449-h7qqC;HyADqzahnutS#-`G{vvHMm4#iUpen(2 zMY^o7qORrq)y6aDO26NA2}!Sd_fRTc2xftp6- zg-~D%(+1O{Z2{l{L!cq+KS@;Mp2iJDx~w6!TAe)apA~ozcu4W95tl+X=ufR2n54Kz(GYr=&C{(M_E+_H#-OJ z4~>sB*6;ZE?_1t~>)^MW8Ap3``_}Kj*j!b5ezJFn!YjE%D2YA3A>afaTw*yTPh@Bg z_)?_CbXr2YoV?y_)7j=npI6f9_E&Zc z;V&PH!wAJ$_bkYqbax~`tO%>6OtF1-U$krDW){y}iUU~xefq(nZCevwe~tA7+Kbf{ zL8*7Se<|Kp1k)Ds0x{+SZPSG)6HEA!GiA6QpJbYS5IY4#j+K<29plG&{_NgE|9c7o(qfND=P4&c$=7zD>CTz`^nPrMZdwE=ST$Y^wKK1B_&0l52CfM{qtcQ?b$%Ci( z{0m;Ex0Ed6!_k|3bh<6HO9j;(xSxNq#T-j(Y}J+0^jP&1?=Vr7Uu>Q z<)kdk4=gE2U22}0EBr|@FFg0zn)e~rRJy*LF`twYVQ*i>_-tm8Y< zouP7J-H8E?z(fdY!?(Mf^T-lmsA@RNQHw!y#c}TzsSdl)Umv{nPWL3eiif}4Te|l& zeV53d6@P=>b(E-jjr3EIjO0VocaB};OA3pg< z!Kb13KAvYyyQSuT=K$*m%sh!v!5ns`6?kM&#FhHN0MtJ^n%@| zXP=>LIN9GeJsv@r-L_?YaaQz;Z{@~Sp4>m%*E-n-;jJC2X^`2MIx#ks8x$U-f-BS5 z^f!}%>7BVb(@@?}NFQ+;txSb0vhwh(mR!0okp!~!_R>NH?54*qqpC|5-P)O9zEEIt z&isyKkOZTO6PHN~u!Kwswt6w9(5cahF|Wofd11-P-QUd&_HTJ9GR6dY9W)zmSHWtp{Mfa^(- zU0be6w)Ch-tyN>tR>fU5uTYNGam?THurA}RnP)q+t10-0r4b=YnX;WFsMEtX_foCT*sAw8jnnBW>Q5p19C>H2xB|MbqU zHosH6XVd#fcYJ$#_s-p4Y&p7PCs1{(tB2|&{2dlgfQ*Do1xxCit%Bxw6;%z>1=bNc zYu?+kkLkTTdE@K^LMyq|8KLS@ES8Oj z%n&4ag}ivHB-)mSzj7NP9kLMG+jz^qRveU4@v3AQml4BaXz^vKt!xg32KpuJE6z6DUzEY;ba+y_6=M9_b*f9 zT~#-mDkfU1XFB>G^p4QU_;C2nbl;I-wf=MF?&&>0WX3EjM)!iO zTJe_!4LZy)xP%3ZgXQ(4&AlCqxM5or^|w2}cGd(l;&QVS3z&gU;|ab@B85$lCmTs2 zI4`g)H+gYhO62mu;-WNZt&&KNPI@hZX*+iY`pAb!PFO!Br-;xNC#&E)0bba|E4Z+H z(OwlG^sT9oFVrTiIbPM<|IhEj)nT=Vh);vSZBi<{VRH;^e`2r4xlpQJdaC{At+pE% zcb_}6<3xRF4VhbbnG*vTt!h z-1FIKiSNIikd_;X5l3=fR$^98QUK=-!(W?@WO!{8k&Fmi{u#*5h>OZhie3Ejb4=T; zy;6E-sK2Ya0`l9h`DZ}T;jh{-n%9J@If~V!t9~+2gE0IxoCONOTyt7>`0M3_jau5> zfJN)%HT>O@|uI@^vP6x#k)tuS13LL^;;lK3~Mt(^wOK!Qg8A+aXA%!13{AD5%(Ln&36fgJ^ zL*J&?g=A%1=`1s*Y*=a8W}B+uovWToeLKzc1NSNeVvga+%Q<)*!e9PryxV1-WgLSS zULTYGxlVA<{I=DC-ET^9LcK-R+tzQ{mQ3`GSB9+59+;-1O({ z?JGx4q%Mgzg)lxFcyTOGjx6vm#-|quHn$DyF|oc*tQ80`SoSBPpP{>Ur9X2O?m)i0 z;)Lv#30bR?a+t#K<=gB3HGAjx$y@h^hWLn(Q#N0(x^VdXwokWiTK{Qlc@y3O2#rE3 zxi04I|D3*4e)4F3Vyw$->m(y&Fa%oGsFxy-?KptpGoL^2!mcY*1U($TH`+0@Hm@Wr zAvHUenXL%7;)^qUMVax-)3N7@$`34~&dt68YF`|PmIft%6e;Mm#S5b%Uf`F0cW8_~ z2^YL=QMiml`DaqlUglXQPVzkx=F?&*VAo`5x5kZzr&gwve+pT%KBKw4kp8&jcEYl$ zURhJ9uKkWsh7;7stwc_RR3EArbEKwomB_AYPnI`dYCw4f{$hgw_>w@goT}-#JRS8A zLNeN;q?_2f?49!}d%4zOJbo+7+b&$HIezlm-u=h6d<}}Vo;!EHk6KJKKM=xp;Pz_m zTce4XlN((l_s8#^I&?Zey&xw!Z+UtVK#anfEf8TlG@Bg6KFUc8CGneBUa~S4m7vj8DX}sWX|pJ4S(%jyJy$>uYFB)AyE^*HF)D5^ZM$7Uw^#iy@I!C zub%20!^VO27K$`d*Fu28Xk3Szm{i`1EP;w1f>kLFHbR%+CZe}tMJ1K}_~z`MFSchy zr>#t1QIzFp_-0mWWbD%CA|oT#t;;xf;%ep9#vNbm%k~v##%7|`&-JCD6|~B)5aD>_ z(4vmUd4mWwBI${%ASXvHnE&Dn>xzpy={0WewSFH3IHt+VYnG&INCs@0pD_N_RntY7 z_XQZLVN(o*|6-sa9vY@nR=9MCspgZ8&zd0FkW?Yq)YMW4<`0bsbJP%M)_!5@*cGv~ zf;&)a_cz{+ggd0-HnkOLXM!Pv1P{`zphh>e34pzsQZV>tHcQe*L3XT z2I?4J(r4%T&t6)b@U+2TcAybapMt*9WaHp~uM9?1_Yc#3h3C2*U-p`UmG%m^HT*Rl zi2Y`*3xjK3e*&73*7Y2krVGC1qhx9-_5zsvc-$?%22KTRPlr^0hsSTT69cZR=s^Xb;ZOF>8hb2PMBvko^|jvSz~VfB(Y`n4+6y zEbTReq1@m2vXOU}_tt8%(kPeKCbuc+P3|m$G{Il8&7rwb+*6js@S?n8F%WFP6hdUn zY=Fo@lhsq&5FGgaWqJxV0)F+`(Mih_Q_Zu{d=hZNCu|~;9ic50!pwOR&nvVpC4pr` zaoWO?jK!p#ee;*azOo>G-lFJ;SKmm_uQ_(M?7;DF*1xx*WX0b~-o$t+J3cpQp+EYS z=yUte{ny<;xWMe{;!@#jDT5J6C4syZDY+|>mlMmAmy@{L{*P~d znSL_a$=G@*zl81Rpw+j6H$PeXVRlR^`AjO*>=uxRA=oBt7O)ONk_@5rhibN*2hWW1 zFNl~&XEC-;(hKG|uhe{*a*ISN)S)J@YuA_-O1!LvT+ddWbVtoRTc@zRLF8MiH})V^ z6j`$I5GP47*)RuVPt!XHGqqdzb!+rVyN%9Y@c3^p7%|g%^W?Szr*wj;K1NcHl3sd!1{#i!AMp0Sj6>_uO!|LpGYZ8{DV6Gg3R1##CoWey~+KUzAh zCz{6|^xd23=|bA^=5n$hu?t_CzklbxyTh}xHmdF@G^LKLBLym;yPA*}do2n#GK{nU8};i@XdXyduAI&5`Y zMN+H(8mb!4g(FMt*l<~Ue|S9#(9-v0t{T$DD^mqM+l{$@%A7%>yRs2qO~ON7lHQU( z2Yhk7WXoaT4(1x+74X#_VLKYbwS7o<_1Uz?-m$}^P~5?A*<-1aLpCoCqr=wOZM-%7 zo>uJzPere&QO68x2_6Tex0@7`~JZUwO2xE*ar>K&EBXrH6)in(34 zsZ!xGvmAPhLN`?;k6Rw*T#eEBp6d`0?~Y^vrg&TmI+KF=F+_8g6eTAkmM<$NreT|6$3Rz~We{x~_(+ zvj{`87&ws<9v;!DI6TMG5CjBg2Qt!+a+6V4v|R>mBnt$ixBF zD!=BQ%Eb`cnCYcHM_|MLIcF~!!eT%~Qo$__Ve#nD5ELfV=SuK8bO%cvhmOW)k-tuN z-Mn`A{L!sH)|@Q^e5w3WO*74NUt_m;ZZ!7}w)73QpsW`B6~_{#v;I*da(yFRsXU^{2dtmeDhrZm5b`EIsHga?IU;FKovLNUYre^fp)P014TR!+A zD>h?gW=TPEKHDw1qWpd!*d{Vq&ygMJ5ti_!f?U~&X#jU#LVm()iP6u;o!xc*&*@p3 z8icU0QkjNab2uhrGd~kNNhaHG-Wz=|*gX9GS9{kLzLOhRQINjMw2}vt zUqu_uutU{MX%CK6cT6<)-tV0xxqWKefvZ2BVG=00IQ|#?4COiS7sE-qj~PDrWb8)i zvD0~}Sy8e13l^sr=B+-k_c*d8(7CU?aj>cdPmIR%Rd40LnH`r^l2Mcymj;EWM$G&O zB80yJ!K%e#*pr+vAUHvKm*B;z3Gq;1cyPs~i!(Py!Cxuh);HlOMc8}+0TF3+Q7%@~ zGf-oTd=E%r^!5}X*4I%HS%H3;5a(QMO!jpy2Zz4tbSd7(NR5^#rJ{1J^JvOs$H{_|fplSaajn57vMC;k)N{e|PQZ(F^`{%;%K`$x=ci^-uOIcBzzLnwcu4P{`K4b6{` zN3d*8i{(!t&ATo;os#-YPsg9PCosHvbYtYsVE0r{`+q+fpXjb$ot28sAOnnm_2RU2 ziBu6sMZLV>-J&&5#&6#lya^@Omn7a(?BAHM_UG-tHovnaYgu+-V$tgK%o2^%J#AjKZIyXHbF9QgUUyO$_)3KrsgXZ)hEwf-hb%Sww?PoZD~4p86Q{Ah(k>940G+3P+UeyVwEg@ zFaJIJwDQ=6)hT(53d&CqkvwBB*yNzXvgwxIj!uSsl%!)(l~9P&i!U0>vb=;S+L1Ei zVq#vO2j!gX>iKDMifMXMpVhU9tC~GI3kAV`fM3V#;MAkBdsnX1eDJ|%IT=MnLH3Hg z%%W_6?)>K$LS1ilj&V;wbEvj@q`vlMOZ|hP-W%;L`@Y%?=+oVX6+gbC#;{nip-ASy zFk07ezqkL7iRnX|zs_2evg3oV5C^tN*=S68Pz%C2z>bxS&GJG_SY|luCk%Zb_Nmlw;F|5oBgff zuQ6_pAU?>>X2WpPI*c3xHAk#!anUp(cQ^|c!mEW^j{I#1f02~)0h&tLGQA!qI%%{e zpE=s6YTrIgAG9@fnYmBfE#9kAOs6SHWsSXckYx$?YK=AP@fj3uXvK8Mlm;R;Xlw+6 z=inGz);fqfX>_n|d-$ulXzlW|mE7`X##8M03?dtn>O?<}f9VQO<7aET)o@u_fnxzU zHO;eg`=`M)M?*c3Lcl`R&gRTuNMV2Y|Bt_Jg;e~tmD8!DIyz?h`hFcByR>ib#y8d+ z`g%*%$>U{551;>W?~!fa9{u(^#19}WKUHJC4u6}wkd1-Aw^~{6wADAP{lqvgM+xH%0W3!Ni}7N+4<*I1?!n*h|GfL_ol9R^l;uxjmJJ<7wr1M2 z1)4g+UzQ6QJ1B$f>E#qTXqrEJ>B89MxogHp?>wBmKiY|nr@Rr%hfnncf114c?S}U= zVmEfOu9$YtXP5`89IVRa^_E6`a>kMK8U**5y_-oA9P?(*} zKoeaXEgjdZ>Q0|2KXSD6`|rR1=%ee04$pSWR*+wZ0-wWQFViJH(!drA9ZSvKzTw}d zr;mU4T}dDZlT!REq!CPY0*DGM7@kY^6`#_T8F-e)6(&b5_b)3*iV_q~ra%vPEm`t% z#QN1m)5E>vJwts>Zt{_o=IK+=O8TA7iN^!8qs`SPcYgih+O;KFSw&g-D{__>X6C_I z*{AUsuSYz;f9uW%H>Y`PY-XnWCLT@%%g*fn$6I-C6-YkE7dijEl`BU(JJGz64h3T1 z+AwYlxcu+4zg|6d`G-w=1{+8C@NqSfglZM{6c?$1+Un`9;De$5U7vkovlsJHMD|Rl zv+!TsITU|Ia3zFSZUKL(fCIkf5uKGpH&YDV^7CGZ`0~?tZuB({cXW1%C9>pr9sk9k z2?$o82>}Nu?6_>xB`39QlpaihY`AF%YnW)jqy=ID>re8c z+fXxST5l-G8d7ug1Xs2E7`y9ba3Nt>?=sI?`8AB@zdV_AJ%8(M4A>z^tr?8dJ|K~E zd&q0O!1_5mWtuvEgk`7YE07)~EtD(gwRdWWNw2P^e`V+uG*k1yGMUJ$|1uoV<*Tuk zCY|nf7cSo(Ygtvoze~#a1 zD?Rhs%KTj)zgK?b5Yq`S?>|twd(Y9Yw^bcG#iL21mY3`1DGV`UOgn3I@{~rvGah=lxPE8Fl7+)C<6@Sd$J@LbL8PQA4 z!C+M&X=O^{%0PTcN?iKVsLZICTb+}SZ#+^M?QlCZb_gwf|Gxio_32BAugzx?B17v6 z6MVFYQa3D0^GChiQ^V3t~7J|9h|I#~5 zsS#gdO4X1ro1*A?R3Tp}Gz-ftr)96ilw_`cFm|7?J*Qot>^upN3)_}SxG=$uwhOZB z*<~LQgp< z=xbHS&RyN}BP!@)Uw%1IUd}j%@x~78dXOZo6ZJVk2#i=-8npxQx5-;v0~^+?Nn5fc zFEL?ddYaWNmZyNNOHsb%B`(g6o?nmHlo7v>$V^;l#ouh-V)95tULv!DW2|2ogRIzBUrAZItnysxgRybA8zvQWE9rhgjeyv)i?B>DM+H7iFvJ&{Fub(h5RCW zR);H1{EDfC0&zREH@wOm;`o5C<)|G>sxn;Pg6vf!`RqP7al~?X=d}g{j$G#)4Rfv5 zYKn8Tw%UlOFQgen6AYhQHo4 zF36GPmptXa)OHP(fG76AUT7uAdH{7&8>Xp*5I>LfjWty5{P4}~>(^e~ zyS?V*f$K*OT{*b_{O&y`wr#IFb@8SyAiM=*h%D(W>|Tia+VGA<^sRPGIkf3Xy*PMq zP|QuC1s%Le5f{{wuVeGn}D`v`7y zAFq5evO#j%UH!!Pc=h8V|1cM_n`&mPPscxJX1mJDb7B(HBBSZvWwrun#p1~-L6`)q zBsqZrg&zLKGliizDd2x?QS^)RTdq|7I(>&cA&(Y{@#f%PQ+Lab99`jK%4vsvs&+&KJ3Bn~ceDBCk1YxTDm9 zH4DL)1w`^sqp=3Np>s}@jm9nq;^N+?J67^U^Px{gAp3X`qv?I~+-yHHbi z@>==9Q^&sAd2r(w4JS_D>mHea|2As=S%3AGO9>rCxx{>##x{&)ew~QXBraSm@ zyOpPNIymrj{4U?cj$H>cmx!lw4rR}VDQ%=TTqKUBW&vv>biAlI18KBl!QAv!%_F>OaU zWr_T1%*S2wL zquUT?=(Bwb;rMG#l2!`_d@b$lm4KP)jIFl|D#Cy}|HTto)|zLLC-PG9yg`cZeT{84 z*C~X>%5w!_z1p;#+h=9ISOeq-yfCK@SM9ZlNEB@w8GVlVmC5{`IU3^Fx^2HHZ#A&B zLjKnFz3vxhJ@{dr_>f{t9doqR-ZIse$Frl=%M3m^%0sEr*jv_E>!jrejPM>xaF(1i z#MA!PTjfFtTgb0AV20r@TpMYzr$&}GC5zjgU5|%HPH)@%%{!}4e!Hpu z9Upg9TJ{gve2IaBWvU&I4vOWq%R(!A3e}f+x^VW5rrH~w%p^DoW96ZoJSW~#uW;|B z`uRHeJmm`#n=&a12u*Gk|3j-9|2{i|5Sq$w%Cg19>0&`i!H)GoN43i2IBDde_)Ah( z`L7NLPAc(bW-N|d6!G%8-ADg<=Lzr5Wk}qt;qKoi?(*{~M)MpKBNG61Mv`?o6(!}x zy|nDqwjbdpx0;&~s6QSYel);XwXt7r-v0OFdkvRMKY63@?rK|u`7xyIlh$tynFxa5C5JW6U7t;z!$*9=Sr-ul35<1_I?6V zY#}cxC7H;K^Toclh(hY4@jLuLv7Ks7?%qG~{jPj}8c~#*k)1&O zHvw=YLyPB?g_OJz;sG(Y;yDquYW~Re*7*^J=PGs*E8{Sc($ps zm*5TQK8{OqqAfwPjv0bW%=|3JFzBkZ!@SAVmEnwnsstk3)DJv<~dVVu}i;484 zMG}ciGm@4e&{7|4e{K?)nK=6F;QRcnNE0oHh}gLPT~v`&2>~x}ZGVRUBJ9C7ld%!T z906R3x@HSRO~!nYvZH z+g*jLq#P|^?%h>Y*m+6FIH!8O^E1zD&u)2U+ZAw>O@jJe8ytJjPCgv`6>^O-P2ELk zcY4N&%i*>1-w^&9@`jGKZ!y{nH0$l6YO~zCRhIDw6MdJxaZhOmCmxnuai% z_04j#hK3vgL<{d0Or^T%36jwIEQkg*hPVE7<95U8(k*YkyL-cXHOKbVoH|@};$Y>m z<7J1BoZETm(w^gVD_}M0aePa*vl(zi*jTDVSe?`W;NnFoc9b6fw{lAQ zLa4Fly37gxd^mQS+~&FhT(6>wGXpw;AptRoSokK#=Ov4UFQqy)rA(j^i2^QV+>+$w zg^9UKA{KnN;oJZ7@SoEiqg((I#)6wVpN!o4*N<{9EpSQ&AVFO^`ZdSt{) z7xrEJdHfgG5nv9?HU~lR)GFq-W3g%oUn`ffU737<1OQF?I{cFlB_-U&Yy};?)VZ5= z2k^VlAD5(a`DWz@s90v1aI53<-luU9ID2L9#;==7uXkRn4PI}MH5U7Ne2}yi!Yajv zysr9eIcIOOnDkF+=9&NXsBs93hxoCh2!%0qU9E3DUs-qj>eaoc4{zRa^vj*}GtLBu zOoe5?4)9uEw(QSG8U}QRnx>;^MQ`hRGI8_X$l$IoHbU{(-bgORpCkc4WzUnw1~1X- z8mJ2h{>#P@N}F(^*oiZJ1FNXG1yN@XocMYAKBf8D{=w6`_N*^nPWHGkEdcDU%19@_ zO)8_%&&3&;?^~3ejKA69Kypm_&jX&Fq^VlXi% z$&0~TGI7DHkM%i_4HP4N?M@c3|AG z5_~AN@{-bW6H`sxsPeO@qU2cr!k3pm|LXPQXYUVB3XRoSgto#P3s%b}TpeU`BEUpg zA~$rDjBd3!JsR>_4-qI6!e4IZ+@YH?J}brA(IzZ}D!g)IowO47)shbVdu`$? z5|T`FVz(O$w$H~{DtQUQx`z40DAcl-4ntw7`MS)o@?Rhr=(iMsd7i*+iSW+{N~;eG z@fZJRfn!hfHFv^DCNYba;sxzR(qY7j*ifr0mr`%=S0Pwh+RcB`j@VJc&9R%t8p|qB zR&7D*AmSF=u?unTwccK9lyHg>m#P*22dgbw#u&hoAF1^w$arxb8qUa3&AE9@Rv4wL zPP)z-NsWf*du_=we&m0JsW=xq#$gAZwPjvRniK{7V?p@syoC*lZ zmdBb6sV(Q}`dqxN4y(F{ybc+eLu4q)cSHyg#-6&X|MBF^*Pncn6dCDN&{z`VS)D*z zY50Y_1mY!&m`r|8<`=98WR>`fnO_hU5&v=FXaD>D|8=)}mM>i%R{e;$5=asMHv8oL zlDFnZ%*Pffafv@>L45rD7$Pqr=F>OV?Ao~Qvo|s}l&sve?u~U>S%4;eyXnzMDUnIh zFE2Uu{jrB5cUk8N9$Ax+`YtrnYc);l3F3;9uc@UAjQ+2?PcI)lnYk=6FD|7dHIoqA zu4Edfm=ht>5*d}q@(wSv`Nc&^IfaSYg>l(SB3}P~!DddPFJ7Y)^4V5Y`r4Gk?F z4f?aA3&&r(g`?zfT>rSZGkMpoR(svPNH?O^ysW=31kXZvT}QB!W9g4klvltUpTNVx zTV2;1_J6VC!k%Nn>rF&wg&69Z=2;qDn`j%9<|b|LMdv(97}{Fq#C=xn%#Ro#)2>a; zb{|ZkWiEqoZ7dnlrBpR_T&`?9eWm>1=`-8+9sGP-&G8GjI|lhhvtcl{Zbt!M(O;1t zV}y|OBH8X8*X5sXO}1XYhT<+OhRg}}-B|^RX)@p=C2d7|24QIy)A(V^p~)8EzsQrg z#;{xAJBpqcQ>2T?qtP;k16{cr1&N4kA8tW}k7zk#P5nie9t;iT? zzV_L>MLE$aYtsu+>EmC@*h(}5^bPrz&3o~=h_AQ2GkK%Gr=zi_x#8~kjoZT=$98}H z$s23dW;0YV0lT)MjA$`f^DoHt&Cg9*Osj8hVib{+uuLM6TNdXgMe-uegal=R%*0Ic z(-vKrkeC+}yD%bRO={Zxfx)R@Cj$$BA&9mXg569Xlk&otjLF*3STIN@p*R@AUqN22 z)w=(+`xquF=Y>uh!3-S@-Apd!5LunC;C%z~hl0Ntc3FVGmS;9kX?!*a5REK~G#@Dk z523%E7$3)u!zd@fXAIQ6POAQ(^vJxRs0A3&;2uq%+fbahY~#??&=6lRBG{MW;DttLAVm%ACb8^bl( z9yw>Dy~2F896VCnCBd%AbAArtFSmG(lzk|x^(QQy2!yFVJT7dUSNFQg2&~3rSu%lI z30WI0&*|3uM)l=T{Iz~Pe!TV*mZPr!#)4Tqc_KqP%O%>;@R!?J$E@SVP{x=b?2i-U zgF-Nc2y+y}3Ig(|iCj822kd{TOx^A4|K;Xz=?^=;dGC!2yLZ%`IbL(>DELcM9zS#S z;IXSeo@hN^&TgNU#t{C>D(b3xu1aep*Lbc`U43eK4Fq>Kj%>AELuvsL*F-F{3mKn; z7bStb#7-O$mgP}q(-^#t2f^8HZ%BkRWSMJ62z+^`Zi8WUUXNJuq4DW?;Oq7heCF~^ zxuLE-WZLZ(a-M(AK0UO3cVKC}d0W}e9Ad(WWe&CNSJo@S+^C9t1Gs22U+!3yv+A?;pMAG!$FU!dHk>P^);v&u{>fN-OWE1t zG%O|KGvhKCnt*9iaVlf%idLi+$Gj4K=<9tr0rGhY8B~uE^5;~mL)}=ik#kZg%?zu8 z`uE!p+pg6VB&6oV_*bRoti%O6C6x{)MA5R~(+k6BJe^L9zX!1*F|Q;kpNM@f^3&Dp zpN`HTe3fU^Zc~9Iv09q*WM*HA;#x)5LU@hyT3%X?x=-`=kR~1oE6qFQm1LnE9acSI z=(*(SR0|z)ui~FveltBcx&NhoC(i9Y-gKe5t*oiNycszV#G$9Yv$vs(A%g?W;O~I- z`I2O|FtwFcekooM)IOyAq0sDM^c?=O$$@0iT+?-}rnR)J=GghGdk!4`a%<`Ldv3J` z=>;GwWxHsOq2TAtl44k_NjA}9F-Yj&_kO*w@5J(?Y9c{^Yuki>{Q@}?n z+wV*E`Ijt?EY8WkR(cMH8G^LoO!ojP=mT4Spevm5#4ED1vV2MSNoFMbViV(DdHvPI zelT_tVi0vwrsqSotI?vD=L?CGn#aOv|+TZ#%x z$Yv8v`jB2K^xHm4+Y56O7vv@^&WT@y$%X|bZ{S$WH05UEHG!VnJJ;X>|FF~(&D9Gq4c@)i+@!w}4Pjk+O)RqnQ za>9;2hoISbv{gds;vr0xzyHC4`GI&`SXeEB z4GD&%2>+G+wz1&Qp4|Q^n{#v6XvE~o3+HO6c{8mIxi#PF!VZ526?}Dj?2y{Xk z@CpRCpeY@&-VNh%#vQyWo7b=t!X|UyReltX!gg-EeG?(uiVgnW?&|(!>}JK$Bb(oR z_r%ukYR;C{m0n;FL;0Drm8VO~PMm={mF_*#T~;rddBw8uXLrw{%IZ)bC@0*9|6)G? z0Aa1e(}BNoVjSGkbu;zPBv6z2h9!T z1?X+3p}x~Nh6(X>5;*y6gYoQq z_~@c<_Y5+C=F!;ApC`v3-yFR+G%(xWi_^vTS zpO4)5=06^e+`HX9!Np*CTCOhHn5p}U3U{SdRySs%aD$&BGJJaLJ`FXHmy8%+VM>!jl55 zVc`_f9`s9_a42k_CW#%?0_s|36$CyF-9ygZ(|bU9G~369A8mgt;M*z|6SF;27xtb# z`t8An^EEA(8#*emn#B01v!~vhFzDHO2>ETjaii^~#d!Oe2Gg#E3kw~lOdxRs@Wl?A zuv=#r4*#EXCYwg3)S>n6Tw&o{gV{#%UDk*71VIZBNaB+%^(6TGaWmi(f9ff*_w9qC;W&4+< z`(jfQqtg-s>4~WTVtz_K-z@#(KvigtaAul6D?K?mD={)|!Lp=O_{dNb zN?qhSI83%<*wWk7`QEyB6PCp<&&|)xQ%{UY3bS7fIj}26gy}KC`^oB zo**S%(mVNaF?q4kIdMw~=YN$Pnc-WM!9*nAQfU+R$2m>DkC7!@iYY*Fdc6O&h!=Yr058Gcs6b3z9TeAeJn2xaJv?t2tG+ zA*}Ll9#j3VEH5pO9NHD|@_1N@nreYy)ps!;L_*TqUf6ro*$sxX?x?<<#$JtC8_L`N zg1ly?&57+>tR5W#U-`hTn@Qih?MBRn!)^~L;&?!F@NkaPVqV&9%4)-3&lOXsx8=R9 z+ACs36=KQwk0$;3Tb)mChRF{QQy8P}C{|MCT9psnzOgQC+ z<`1n(s7G>!8mi9Ktf}RA3Ls=!CpzC^1PO5RMe9^ zu!BD@pDZs=X|G`}uq!8&o3%g;78_S3u`Ib|Ua;V@MzCwLvzt6KG2S&y{#nX%;&3Q$ z!701!UFh_s5cTBx7VQOJx9}pSn48vTK7DwaxaPQ`AUKl82dTcMhp@KTi%38&i zubYZ9HICZ@BUh%wN@czt!g3{ep}R*uTlsj^)r2$!p<|ufT!qE1b0yC1 zb8x#OK{4AYdHHRrAe#z{^acBkdp%>=Lhkr*W7W~p_G>jvxy9-V=4%cB0}X?Njg)SO z2`2n9VQ{o{jJVN8yWRNB_A!YL^2{T4bxm~Kfc)AmH#>$UbTSCrLN53bz1y_q@^#%$G^CCZR)Xu$NM__r}`(v0SsfJ`Yz)pVA+F_nWk&?>+)8c_tet) zFC|4U`eMVU!N!)U!Ku;C(OcNY)bxI|=I=!*h09aXbP=N5&hb2D@D18F%-mr%`iZd?qJ7avP-k`o(^`4!<*SpLW~-{RDyrD^iMVt`;E zn^>NhLgd9K(v%moAR<3C;^?-6Y`p*is*_$_QidYwf*&;gYaS6mGxxD%tmTc3Gr}T? z@a(ZeXM;JNdT)7Ghpp<=VVyXZzXf+lO`AUk8ZC`>b8HrP)m9@ZK!fZW4)3%;Y>d_6 zyJD`F+%{KCX*+V6WaH-UL1fQhbBC>tZ07@27@pMiRn#yLv!|>o6usrO5)}>oRw$Pi zArU8s2gs`|TiS{ZXH~-8EKOd*?j?A?n&B6%6g*b}SN{vicS!ZE3>qU!al5i`(sWf= zSq*C}Gh@**I zl{RU4R2y#{3cDPy-pA^#Q|%)tshrYS70Zk|qAZJ<-h-imJsUS|{qS$s4jpa1bhYmM z)tYmcYtCM&I(@13#Kr1k=g;p#6n%Zj684?zwp&4dHUei7);dNfq=3_b2#c0}o zH^aB3S5kKR?B$DdBzLI|1~$pBwGdwmqtfL&F# zNG~upRs@Y7M%a#$nWF#X^tkWE?_xWwjWd_+%=%Wc&m^O zws%bqjQn)({?4zzNsf-pO-Uq!ry=5GmcHoM<|ofjxi@hCUjGd89{8`(UXI3vJuRQ? z(7;E*EQZ222l1tRI6nQurY+gAiABD2p|>fJ>i{K4CNfyY&$8$`nwHWweJj)u@r%HG zcwyU>#@}u|=E2?d?4^`JTW|j}=R9fO1xGG>yM|3$?vPjPF)C)9ky`H_0`8xM+#qhYj7eyR z5yV+8A(a_OYK0SXiBZ)7!UMZe?|^}Ms9jyk%@A1DE5^UJU8-(AeXiob;fvpXdt&3p zb{bT>q`uEABxCgU3GgRMfi)woMw*sI$Hw+n&)EHm+b4fKOcg%Y@5gk7T4!#2@^YU@ zqs5&g)z4%rT>Ub0cgMyr5PxKp?M9GqxL*TgaTU@ z+B%N4!pr;RMyupnUOGIF?e4ExYcZ%~vY6`yGbvXaCL6|WEzfM{)_;Twny`wnap94= zrqTLl3{DB#;lqEOJ#~}(-a{d>6Qjdkp@&^ARi1DRsK;rWRpBfYQctAeNPB4w>vwz_2|JbKX~`_?!9f-%j+(d)n2$>f4;o--1Yi1S87jP0)nsXJ6?0*3|x{{7a5SG zIbKP*{(vdjX3PG$9H3?hB}vHQ ziQ%v=n~oGKHCEA)=Hb=x$tE%gNtY>6a3!&!Anb$;>Rh-+W@$uHm=3y$kUGr}Abxsd z^1mPaykYg**)ehX38EY(|BNzI#F^?`M7M~9u7QZMkYk?e#HCY zGcj(?CnhR%J~1!j&6msn8g1l85(V@Ml^s8s8o3C*Q0z;fQFm1!d1Z>9gs^}9(q$0~ z`znIJPdvTZ2;_;EvT1%C1Y0dzyVZ<2a)bhtWcX?7-jSVqSl)AElZyRmE0a@7{K+N$ z0AaO43c+Bm&A(G=R!U)V9*(1ZPM_F*tmSf}Oc*S0 zGr=WZ73ZFAD z+DBrIWw7H)b?dpaHAjzJ*!j)T&HpIQz8{5q!z{pmdE=EE=2(d{%C?V?g?BbMt$G*EgG%(if`Z|^Nc0(?VVhGDdA!%HXlO!-#PZ;(= zBtupehN=z$HD^oxw7b&Esmur#m6!2lW3oyZT_=vqzPj$DIks!~Yv#|LH9T))h*3hr z?UmS(=Oscfh8Kp23rN-UxH}qO_Nu+&mMP$l&sY`rY=-|5{Z-X{l~p|zmEGkP9c2=o zzkCc0_pv zbv17g`H=aOXI#h>c9qq7d1g^N6RMHJ`I#M^%)7*M!y17_k=1}Snz=~wzpRG1` zZbFtib?}Q2{L(Hbjyx>a0IC2cI$b$<{;T&lSD&tExz5%t;U!%u*d&@Vb{vn0Q3UguBak4TcgSGeB4Ft93pEv6hrDa)y1 zNK%5K9z>E{V0=50j7`WO3KG#v%UoRviUou!meT`%A|)kh*|NovufM+GgAdxvY98I3 z;>RMu!Fb#TH%G?Y3>*Y25#|d;T*qRI4H*#<83X2bu|Mu^-SF3C6Q3I$qTOX+t3H| zwet|C;{;sby$j)0OJ`%g2BsYO9_5{CK3RyTa8EPMsO+t&>Zz4*)m!ZsMjyqa@yXcV z3Q!t`*6O%G?JP!ur7A4(xiZoX1QYfwc7OL*ihv?=7HPBxse+6SlzIFR{z_X@HN|U5 zDGTkbs@JHdKha*Z>AuK*bJAN`LpwGXWY^X5u8UXN&t2#!z1V%}D*x=cUdf+Rb#>sY z^C0XKv(1dnS>PxqwR1Bj3}HFirz-yp$7sc052ICHRo|qer6t?!)qEZP8ptZ@_39W1 zj&>rm#w%RXPsrC$qaLgEp*{)nw^KTh@4mz&)wqXqV+c@@{Irax7?!RMX3L( z>_?9BCQn{Gd>4- zk6Pe{`-{HX^gJ?%5X_azufsBX4J|Tt(P6jc;aigZOyx zu}^j0_&<++zq0>?|FzdyK)_!uvBfU;#X>X)U{@k3oq$xXNKU5~OcbZ2#=ZJlL0sZg zu=~mAgd`^|OQ32oa;;n{uwBC7(q&(%khZq}GIbk~%&PS4KqUQ-($hr82T_uiniU%} zKO*AP4?F*P2MP2uo@{pIgvh>JQDI^OHi5s6JqTXAzB-Va8?y{fEiJ$)LWBvPMQV=3Z9rh1O2=Uuwc>i5$~=j`s=~t+k+#9gRZ{p`niOLgTDf!9{wt}s`^?vuVF6V zg0VtASZJVx%^qsK0X|xUvcv-Dn!n{JAwwV7=Jk+B>A7CNf790TBWK$#Rg!te=abAc z6Bb;;ipC$wCW&T}bfsBpKzj}4xv0Qa%d3PDVN_TW4wQp8#=Nk)Fn`7CG5n<{hQDR| zm8*@V=PM2$#t!=6=FhM0-^VnuTdl|pf@4h`05M^|3y;XlyRdyV-vwU{FOoS3KaV}V zcIZlpzhs4P#hTQT5`UpK4UveBl`<~MB?c5GBoo-LQ30XaD>^G5bAch4YH6h?oSht# zmarr~HX=WF@zFg$3K3JHc9?}NcXnN9WNP*|}z90$QQ#ctuwq;m<(M%WD4 ztD+bRMFns>D{DRdGrdCkCj!v*8p3vsn>$rl72U^9`E0Mp_B_x@xm(z@MDy<8NEjct z^X}?$0tglqc3NvGIy*HrEl%Z4J>;SU)N{YtPH6f5?m9yF|C$_$Mbz6~(7)si@5zdjhXDCPQxz&XL*PWOy$bPw& zAhiv%f-hPot+Zl^9PDuX*J196QieIlKLcXsz>1vMGSA9VZBHHoXzu2iZ)=|bUq{=W zzMcmIeTTo=vi)x#Hl906DS`EkR7U&d%C^hbh?a|2n$BOUJ$td@#Mw&+PJq#O`zLrD z_s9;2yNndWF#%J82e+7NQ%WwZyazrG@PWU^f(1t94Z1qAy#+a1h1%|~Ia{kchdP}- za2Iix?lIL4fo&By zQR?lC(Ewt?>5H;c~zs@|K9h!vC3XRphja}#VypI1GlXc2x$u@_OSFRJSIPkX~B)Q*< zUE$%609MLBb>@+PFQFA@+etDa^ZdR;rY1#nie8jeiynV3-)`2FWWP-W4#jp2S;C;< zx#$NvZdz!g#Vxx9=FM zub*jeXHK6$u+|jpcfrL*xXa07#9G4=o9nwHQ%@%EZQt}odRzh?FA@RzhLVi`qAPI4 zP&5Wf65ujnvka_T*7Ch6@xG+!_*F$~8>*XU$7TmQ2CyFy`>%#>wk1@qC$2+rNGE&L+gzD>Cs6m!!9sZBEEc zh{H?+0Tr-|TPBXyxe1GGp0DMoA;oB}vg4w$cgAgl$c*Zjf`EW8Szib#DO|c} z0VyEQ?rA*E!#iWa&KWYKJ+^AByMMKmUjZ+|a>YQi5zo%bdJliK|41azb>Np$S3yRy zG%|M3<{xoVg@?1&i=}}GZw(Agq`8TpG)#fm9vOx!&K~N&A)twOM^!!l?5eE9Qwu{& zf;Ytmz+R7WWS66^Z0#*;YP)#7>D=XpGbid#A8#%_-EsMR*Y&c&n%W`Sc&nOhM`2iL z-+L&B5Z)3c++VBY?h~%n48Mw{DgF{}7Nkyuw24%P$nM+GNzGhr7S`b_Uu8qL1~wg? zbPCI)k4EZcBEHfSgjgb9ks%}I9&8zAQ%{8*1&%|ZydxZc?L36thTw8kwWmjZHEpH6 zkdm@yldRP}hri|%r6_E?&(PD@q8$xojDy_h7{FW4Ry@nWy4oB|kk?CVEYkjWvNebk zJL7;a?b)|+&70+ikJD`g1Ou|54+Srx{c=s~#VRt=HK(suoV;}X*m-`ftg9pw1PD}r z0;ANB?CwN!PY8bjU&UYXrJM@(N&?p7t(@39GW?wbUx>V?h^CeeYAZjbjEmMls z7(R<>b4rSgaJ6och;wmTHoG0nw^2l^G(8EJ%S(zyyhNDJD%Zc1 zaY*1=DAP-Il{on2SAR@RbEUEDLqqIik=&gwO>XDk(0#MPsZ&5<)aa zEES!z_7<7pq}V>kFTp!YD12go$Vo~i{Bdy$Uwt((ZrSIbeGC9i^z=f(_^ zd66FGO1JM&4ci7z1b=M{!8)%5ZO*9)O%!2DW@ETA4vE1E*hy81=87;KwIK_4mj80I zkc1RFMl8_j*sGyWh+o5q4MS1o!8fT>gIR#PQCh`Z`CEhX?#dw1Ro32JF2oc1$)j-} zW9UwmZQVWUsnt)y8C(W%%K=n3I&A|nW^JIb!L zUOC%z@l<2!v4(R;+b)-OUcb~+Ue;Gx!Bbe47=bjO1K?|)3`cfi_Kx7l1k9Rjxy!>l z{I&6jg1-X6=;{?`-9$+JMr6QrZE&=%Q%)3rHQ{V&XAxPc)T&F+RyBd46tqxaH-BMn zB5*h(=2sTNTn}xfM(mVSZaVf{hbg~Y`Eyt8mD^=wtT|^M$1pei$nRHv!p0hGvWzlB zLo3kJ090VhrJbc!mbESv4tm0Jurw%1s1FtMj|hWEU<+jYV|=FN+?7vPy?JW;E_3?< zdYilLi zv0dJUSxRaPQm6~bdV%rTkmw8&6T#qEdpj}S(cxIL{jVB%xzEJq1nfhQ?T6)c-E&#~ zQ**z@H{DS)!wyq4Upms>&Da9NnunveL4nm7IVgw8q#=pP_r)Mdg&qn?)M_2cTMDKK ziOJS-JAt~1YS-e3h%0*!|8x2QwKG!9O5V9{C3r4NtI|3{4DGt*-QDTG z`LFvwfBW~1SuspaN+qF<;RC*2L_vCDUTSI+N^oc3fuiVqe{b zs)sl48pm{?Pg}ai99AmU8&~B%5_$)>hqS$=ZWY{p)bP{xA-osckqSuZ)o$FGBZ*(& zZ@Oo!{KVz6dydzgtA-XKxH8QZ$x^ad(x-3Un0mBovg`R(&ntjcu~n2;hP=jeZv=-4 zFcQK1H>|RK_)Lm5s+WHdNX~QL>A~B6@QMuib9U)SybiQ7NtXB-hcLYc*NRMcaM2zY<1K zG!bMrKr#Ix~WENC3h7)4fxkNQ^j|5bxeD~b@t#$ABKiywdy}OZTX02L#wOXwXySLuweX5>%YGz<^cF$>g$ysd^kq=zP;P= z-}WICS!3-(qiutPLU8BcwPx#7x)BI21!Y(d4LjC3YoCiX$2JK_!T@)|vcyVJQ3^>9 ztl_5h0^8A$*D0*jO!D@`xMbZ0c4N%%BW@w>9-H9EZP1F4VU9AXaCIjX%$ES6CiDhr zXW`EBCK(4A)rc@5m&~w@Xp}4^&!9`CuvGgjPiB^xU-15`uNQ7jHn$BXsVPH*Bqq`S zE7`(KXV-&}7}_ZA9GRK)S!`JI?4fr*`p4Zr(|!g1suP-Ou530IB|^Qy(Pg3aoZJ|h z`rB9E)>U1~Pr`zL&fg-cu>!$4Bqwp;&q!Ol9_8v^mLGGavDb1LTEjc#o}7O{0kgB# z9R-tKZ%-`$c<+akpH&v5h^-J);TS;7s9y&DnoGZo1I|m`o#&_d#Z{dB`DMs)Gx9dC zSzCRu{JWbEmPgo@$RI_b^Db8)?imhQ%wa<-5$;{B!e8Ud1(H#AeNAIkF2k!e5>tE++9_RB}l_FQfqX$y|4;CYG(3;mN+06oF8Y<*qtfK4#; z3^&Af!^xhd$rrSZ2R5hLW9~$KmT>l9civQ398HA<{!+FE>nVw{sK$^ezI6{Y*G#W{Qg#=z&E|B`cx4u0_FwF>@giUU^TLxS^N$a`cW}F}1S={` zoL*aNYo-N;5WTY%a~9kw?0apqY?Olupc{My8d!dE=Hf&s2SK;KyZYd4nF_ z7}ByA={Vc+=>(9t4|%);j-`)}qw-W)GeKdi_t6XRWP0iG^i*y6vA1{c6k{%RIOhn+ z+LR)yw`@uceC1B$%^D7TDgLHywV8uV8 z^%;<&1}*fkPnfT!oq=69zQ?WVt1{18!WQ+NP_X_XinFdxYN{$d8uGen1s<6NQXR-@ zU$Da<%>&CwUog=@u%nh9G?R6a?5kmrp|fRxRX#0139I!0UC1f0t|8q+ZeO9F&Uht) zWQ%xO%y36%zQy&=uU>r?by)eb9Kl5@z3FN2}4V#$WGS zliY<>?X6#%EusWHacB&NM}bi`hZ>?xHvF}8v}LNPq!0$9hQEf?T+AAd+B`*M8Mr_Q z3jh2uPxIACeK6>V=il^(a0g=z#vqK+8q;yCR#}hTwwx_WDts$s2h*l26IGQd%?xeLYVs-B zC+)1-X&Deq!~kNk))ezY0i6lXfSnLP!On&q6g4C2`|#JV{r&!9v~8Rp z36K2h4R;O5g=FFZn-m%46~7*8Xy2P%giMqw?T+*uI(9{n%;N0Jf>w85x%R{DyIfkj zNnp1?tFN^7SbY}$E}=e=&R17>JXaSFA(-T4B<(B2)n z>3j11e9WD>sYF?R>Ylumy#>iBFRuCQ-JL(&R}cYwwW5kuGJ}M$b|j3e+8H)Jt6*yB zWl;|*!ioy<=le`al6a4oO{uH7X#Lj`*LS0Hk0);qU2SPNem2z5JK8!t+8LpUHr+op zGk`tpEUmtCI^oDPUH1QC@5=ODUN4(4Uv685S@3sshM+SVqi+%~y79BYA7Vm&dvrOT zsis&=rI;kP4rrjWc!qQt3DVKdzF<>(;7WbZ*^9Mb94BTwf?rSETnP&VTh-NeL6&I| z$(#VOl!NV>0vNo+tTKjBEbrrq*Hzz`Yblf0$r(z&k|R%U~v zz?SA?o%&j3JXj=2Gif~RbK!+6q3bsI%NBz583;CRYe))SC}b!qt-|8fS!J)bD{L3Q zrl2HE`BTJ@}cn4P#+TKxK4sX zE4ga8Aub%&`Rn1Y*6W>Ls5n1MDP(ATX=qe}=HGyhq6En`-1zTPw+&;y-qHRwTxXLaulDS#EH=(BgqL~1DmOfi27in*K6$X_QDMh= zzZ~ruR%5gQll20-zg65(8Pdq3iHXOPlTDSU;J+O;bz$7*TLvN>0p^X4c7?{e!x}+@ z<4D^;sJWYo@Xb}{Lyc|htED5@G`+^fl+hRgx7RiFcY6R+g?adEZBupxY7Kvl)h@?I z2z$3ytM%UbxHKRuN0vX4yr1q{w8qWS8h6&>)j4mrC*fhbt&;CWmOf#`lJMjKoGv?8 z-XnztgVBGz|L|hPv8*ll#*=v#|49`;iwg&%>V-u@QnUqFOrbLoO(oj&mh>0a!WzC? zzJuYU@!!?giCsCiHuBQ?&C0h1J^y}Xg>iLn?=H(rNG?nw8=R?z7@8B#&cu%X)MsD( zaOVLZ-|&}D}ow@12E?7)?5!+{6v{W=8K$&<7Obt#V3H8pQL@oO!`k z)g7_#k{q~ARZzm{G4!W?6b^+o7tLqaFV5=7vJthNC(i?yM1tOZspa&?<-Hf%V7}}C zjUVB2H8VUt8<;WhrQyg^JIj8+?FyurD}+~5%~I{PEHp=;*)&%arsGY%mVYJ@{gfS# z3S0ou;oO}=t(yW{`{6mAiRo&L(CA7iGT0cmhPi zhQcIXwr3(6^}| zW22$4^&%Pm!phNEm87Q9d?anZY2?qvtOI*1!k?6o_~KedJh0!0cC>soH)Yso=ws~L zR%{ZIio!8|BJhYtmTbx^(*}c>AeueD6oq@SdBil5l(I8; zlpBiCK(H`hY5pZEtFg*MQlOL4szRq}r;V#Rtn2{|hfxL!tSjp@^^|o9f#X6m;m#h; zdbb6C>8s^U#qMSzLH>kp-eCw>+ZCO_4P>f_ThLr+BX>M70)ZU_KX+P{HSfe zhQhYDnHyktL4#83NbB|J^~=H93j-Hxg4eE&wRX~-1cLLr4*J#0%l0Sk^R9Z^agA2| zhBPG+_*|Kc%FM4Ac%;4@mUI63Q4$)|L9_z$G`Gw@2rDz1E0JAUl|(B_+SK4KV|NW} zKG`AZDlOK}+3r(bYWAZ@CFrmp_lmbUZPqRri!UIxNMU~Hs@kE7{dmxzwz zAOPp|V+M=r@NCNktnf{gK;SJ6Cq;|T5|7t1O*o_^n#BAr# zR(m`D4kJ`MWbfI|Cr944`8C}f3;g5p?e5x|ZK+#m|B=M3pUxwcs9R<7uTot*?!gf{ zGYNGTb4z!orx1A?Uo70R;nC#yPdArt1;d2Z$8DEluNcn~a8CG`0%V~Wd>P-Z{l9yQ zY+k|EWTvPC@z_lhd$LnEKeOh@AKv+K+K(Da{2!5kE6Zg<)<%v@X4W4fARM05*nt;eEHaV5M{Y3(DvQANd#!SBX=th zEI^#=BX;KIXKzhL_r*Wsllc{}p8N)H-os5nUft3-(YVT>%@kX<*)mM6; z7PNhdqw9oHsw?1oYixOCTnr}-SWzburiC`ojg{z4!fq=~URr`Ik1pRFyUEc5aY1d zWTH6LM>&|wm{w&BPW}EZn>RnV=D9VA>1$6O+P^%>7BNPDtH^f;V4zY@l;)SAQB%WG z@`FPe@>+)c-yBudvl@TJAQAjkEVdl7SLYQ3bcm@cET>6!YGf_`I^^x0?CY7f=pC3Q zlu?pB*2K8zlnj*&q2QQ8LbYHDxC@&12FJUHEsTN2bxquGu(cYCt(Qmj)gU$Dl+QX! z*t(F$dppN^I*8Hkb|h5*F?3j>dw|g5EciPV_oc*$lG{$EWuKz2XR5b*vKPYJ7;C)} zx_%*e?cDH{YW!rzTbd_2dS-i_X4`fn@vTBUHDPrD&u7fJjcHO{F`Xc{{yheS>@JfhGb*-`3o62Pc%M!g*yL9GN~ zgajEv8PXZ9S?6zD&ndX3IxcI)y&sJ{nHay(*L~vM|Ew>sj5W8DY9<{1_S0yovOuS!q)fpIh^KY4JC=Z#`JJd24d+-ptJhb1UD@Kl*Or@qgd={!w^kxi>oB z5okYCcmA`ABkvyk`1KFo+57w7mcGVhcGT3!d-miO@5w4G^KaXm!xW@!-u&LY)R)ON z=OpdY*qN6p!E~e?KYm9!iCLSTSs&{Tf4lrplFyn$Fa~Ew(~2ANx?Hq%{pe@3$9OL2 zHvj9XbEc>BH!m;a$dh~pkEfRr;n#do*>SFpeIPYokQez1N5me0uUK7q6BZ!9=4}P4 z8nDU@wMWx5@)Gz95L+eJUH{sPWV0A?6K>|EM@v$pKaPNW2|GkdF!nHI@`L>3J`3A-{k?euN5>kvVphB&ZgZrGf=C263( zpWlVsj+jxq>CU& zjXF9bX-&$OHSfHcH_%mmb8>uQc+k{A4on z$b2P3SjU1b7p<6xRSQ{S?uS&`q(Y`>5%m5@`;Yc9L3<^kt#gNIwv`Qo&KG8gA=3;e z1|%l?yRG3jj=!T_U1QzduHvj(EZiowOx#OHFl3qE;>96$9Lmb$UBiI*WN$Mu(RD4> zb}7=|3RnZL4e(q=a=vmufLhqF!uDBrJ)2@djY(uhP;gG>kZW)X_1ott)~h-no!)|RKR zyP5@?!b&ru@C6~ZHi119hO?Mv*jR)8xrfeDLALs-u?#~iR%Th8SK}{}eW1finv4}X zoOK?L+U4rlr2vZM*;XG{V;T61SqCNHuO_D+j*K0D=a03A%g379kv)$w)4VG{FMvq* z2W5AvF(z9_f#AWB_P$Vams)ew2Cp~r>6XJmg24we`#`-MS$5()u_ z@`&HBCGQ34ZdPvu$5!+)@u>|ETOC0E)P;jngq%zmy;zP&F;@w)n1)$Sz;*(5hCK!< zx-9Dgb~SPRTkh1P@%UeE+#`Yf?w(hXi(rXhJlFDmo{JPljxJ0QE#q|k`Ma~XCp@?D z?B}Qcdgm|p{Q~{EAj@uq=hv688mOq3ZLOw|eQ#>vf4=_9rSeLfyF^=%B!~Ts9+pXn znVUBIwj@RShW^)gfB*i$H;-<7748hxS2mvc>|FV~hd+M#{r@Z~dmWOSf-zUtYx$)_ zS+1X50sW-ZeD@U4=i=iYVsCC%Sq>8kQ#6wIW+y>}C2}+N=4U$h>6|S14oUB}i|7C6 zo3Gj3v5jGnqA8tFb`g%{iD=F1W!8==b8-4vd&!0A;Yb zzxixke_dy&WniQu0RQEZ6G7|{nP)c@6fb3abt_3rsz>qHj+RgMBvm?BT8%Y~;o202 zThW!35!|F#MeG)!%wxO;r6qgh?NKGj6pl%}Q@0I{ku#Jfw{s}c)ID^qvHQZ+hKfqG z(2Nhm0MQ7gY~MHq4!t26 zW-D`;Pcoutq|qcss2y0Rk{z)P-}wA9IqAtG!~KtLu1tj@mKC(RnAOy5&9N@a#bNm; z{bqjl-ss4sBcHuYNhQz!T1jD9(Y76Vx%fF3ru*_!GU*j6_T@@siuH%7ufbnQI~($1 zW+jnMU=Kovk=2D6)<6k3Cu38>hW{wa^Bp_%2|nkyrsqdGdq=yO|}onS7NU-Px#Hu#0v6u8FYse2j$SyV*g4bRG~QJgYd$}G?d0H<(}CJ* zQ$$m17DH(kd?E5O1=ujx5ZR8lS8?aTI-my-nagM4Gs4cKC0~sty~4fJ4g_$W5v9}n zSvJEe2=1uJI~SUm4bqe}DGf><3S-%^8h?$$u0GltYp80B*mWa1zAQCIq{4)NlthfH z%R^ov$d;Y9o|F}x7)yVUM3jGFNs3BnkF9ZA%RGA+Es9?rrbpRnn}Wq5nzk{wQx%(?@%`zqB!DgNIFsXAsktYyxg)BWO)M+VS9%*RRJt!efy`JId?iI z1Z`G|=Qf21>kh<@d=TLm8g>qC4+F=ivdkHSfE(r-{@QO{R^M{_nz85cJTs4!2Lz|S znZ5IWJ^b#Aw?6Q#TSrL?1`7D@@@*x|1xqE+={5_=X6Z7~p4gFDy6Ksh4*%ivpYHsH zH^8ZI=&v<*-VA7^5I@FI!tGcFA5KR9zwaOR*Pcb_f{u$+@b+vOzfKfp`Lk2gHY8`f z`QAtS4_ChNhkgIK_x)Fk-rSq}`VL=NX=+hXQUUaYmMCd2%JhNP_;DerViGzfMMTan zax%8(I2D%Vp8;PQlys>>o z!)RBPRwaOyAmcd)fnco{8%|;GEGEw(Y9e!>XQVaITzy>}CR>I_+C_VSUjd#HV$HOw z?2G2goH~S)s+*QMml_u~-b&6{OS)1B_U@6}7M634^IvY*J%BG^w~ZsOM3)Gjfwu?T z^O4!E3v*FN=+O0!{ws})39CI`*>dgz8o&FKczwu)#J0-t*F#~+wA=rkU8{z_tRyhg z+tW*TXK$V@uVgM#0n9O*aSS=c)D6%4 z^7HpT{$}Mq0yEwxVaRJo^Y^(_4BRkFMB z%q1=fOyLCDv|cIJ6KJ25{ZUb6-M0EK>o!s=4|xS)ZLPMWE#q{R5-Q-4ZWQp><#o-? z2^5e-bXivSw0g0Jk194Mx2z>y;Ko?9Ln7@k@+42vflK*S zgi_sov&R?m#VEy9nCB9u54X3@D*tjZ)u>PoV@X0>@~)Edq#SC zDfRMfAUn<*a9d2+2=d!xUmE=y|gf2-if>!b?T5LznXP$G@qE&+v5BTm}z?pJtw|d(<&p<;4(N(NrUK z7&drf)`Q8dsrEXzK$n{`1hFcr)n`XZ6icm^FfQyJ1#>+t7Cn+p@Ub}p{7OPq$E%%u z439trZa68jdO6bI^KfS7rbmH;H zes6B+?_YoW$sgX^^vmbAWePBn>oX;c@eyVQ;a$RN) z_zI#`Ca*|OF8IsTIq_%C*p|I9-TzWjegfb~-zD5cNDv(>yYl_JVYme(kTXjBGMmYy z&b%u(Pc|o6AUG!tw_adfbk4cS8Y#Q8lXhh@)R^Kg_b2bp+p;5@kvb5u&EW6;-@WyJ zfBn;)@p-YMVWfg*@63uSLN)f5#(!01#Hov4{Kk1T;jhLrD1!7q!aZJ9-{kq{!{c*d z8P6AJ8M<0|xvw7i-w@ir@t%m71>wV>J%*;a!W@Zlz@R_K>#3$OnRFu+yo&9jIBA%B zhVo*^d>#0T&xRL*wx)BoJo7(w9vN`UQ6~~o?h&dEwHy{Xgb1-(dIsy;`YtsvChXj& z2L>)*eLOvLZxoL;`Iodyld+-8aU}UOAo%i_2i>B-cNcliSfSMvx^Ae z3=JN=ND?`li?bPzwT>vw+OXaK5}E4K%*}R9(wU1i+P@`X)3eWA`106K_Z}>UkTeGt z0#QPed%>9LTNXy9pWJ+KXYs+Qqt!)urFoPjit>nE`92wi;E>etR8q~{S)7#&F-LP% zl>QP?f(kl)eW`DKNyfV3^cRVe^o{gM8AH$ar}$Df{c8Q1m$p1#`Pql)_a046Bf*{O z8WQKFRoU10BzI1xP}hNoH5{$302=4@YKYR2B#G$ZQPYyy3hz8272^yJ)c(;m*qX`< zCMbfae}}(HNJWFan(G;R6<8#6K9Psc)O1WPHw%TXxK0={uU`&0Zi@U>d7+a>;sBZrr2mYu4H zxp8?vHw=GOTN{ophNIeiBpgtKba#w$#$qUPgI~#S5r9d%lOVEJ9ai8F^H|V9g0`9w zW*gC;NLtl+ndC(cQw)`?tJkvAQWO@4LkY+1Od)K}8;Y~i@Ee;pk7&zUTh10Fx<57b zzaHFgt3Gq^&Huz+k3xXpZ^y8R=(IksFVI|IkN65?G7Ywdvx(^ zCDT#!GbJ|*D>R+69o@Q@kr>ybieY}bzs$GI|Kir%4O_8y`{6d`jkuJ=Y%UeAcxQ6r z!R*ZAg&W^2-}>?H!CnVO(jg%G@9$zA(XB=mWBPA18^3S(~K9)ilRpSpG; zMQv__$h&hu-i^C+5{RTBuhwM5BPqoA^)osopS91KSj^D8F zg*v!4Ps!u?`nVJD1%jz8FnFu}%*FQ04Z-GK?46DI((3CB74fX2rs=udIR2{!)No#- zy8(`M1KuoOlYvi`X*9=P$U*4P@kav6f3q)z!uS!zaN=e$cvBU>D zug^&MRnmrMU)x<=ck#^4$?3Uh40HgnSjk1`t2i4!a)UR?(Ui2{tY<*&Obh84jeB01 zp2xV1&C}t#U}+s6?9u3*80s_qm7DIT?is*3kZ0&=$jEI$K>e*yIbGuTku?oi0ezkV zvFDDZ(|(<5dZIThvMbLe17~tHt2ieR3?w2044a1Fvpn)VPYb4=m!rL$g0JXm2zw_; zYgFti-A1c3%pyVT4v%&X5vIAa-%fS}G>{?;j(7Hf`%rHU^wQl{GM?(~l2Bf3sxW)! zXv|1x$C|E$>MjglJT+8vGFW?Ytfih5H@`CeuG_2wTuXt3P-u&^#z-uhwKnskFYELDtVN&q^)UI<#d>9bxD>u z*ek1nqlcigR5|15CZ$Y~EeUE9=752VBN2xHP(YvucgJzL6a2k5Ir)#fcls`0`sB5@ zI;*eD^~M-DPR}prnG2N**m7A~q!Uwjl#0|@M2gTGjCBkE)+0?lp{p%jRhL88uHTPP zR>l!4;xfjnOhy*Pn&6pP$vhyB*Si)WmTqJ2hL{GImethdovtgxWy=OQb(SUJZP{n# zcl_EWESPtPv(Iv`$Wyu3)ftGKqWJP?=h>~9`d9z+=f|U69U1G^7A9@M6`3f_WKNL3 zBr~@tBU{R0LO*p9E5lebvCYrW$jjcCocN0^BVCbyJo?+ix%-%A{BY~ff4ln~@zd=; zlP+Vqt}CxPz5nx%UVGzr+h4}$GV%HKo1cBbx9KJL4ng*#`H!A_-6sNgGkY`cF?R-%M|OIdb88RF`!WW?LJ zYjT^wOGFav{-PaO+mc>L&P&e!YW^NBSaz*)W$F#vTS}tCz8%0t&ZIZPNVlm^_oc>p z-Gtu=ia5C#s3x;nWs}xZd+mDlnZCM)U`rcC4S1z3T&&5Iha4LUiNiB&Q4-fW_rWsH zrXn!)yFg@Y#%m*zRI}l)-D9~ROg5-86Ws9dS0*pOf;D6ck{V&j$OcI}yP*tfP&tOb ztK{2dVL=qp8-jz#T*9VqOq{M)oH>8sNLy7kLmBQy$8LvXRzx{-gZN3`&xJ{r(65HXyfQ3xV8KhnGpF2jC_TrOCOfTrb1u<~YX}?w( zG;U=|tuZR6UW^Y7jSadvL{d~$`6x%jSr2VRM&$+RW!KWn+cSPD$5psYuGm5b_*#dK zV}b$pU=U%(|Haxmu?Gi#DZ+yOVmht*>apG^0enknwN#$OdqTk1-W$1Y!(Y0IDebxR za#xTX<9HX&GeIxmEShItR7%WGM*#Q5I{FDp&oL#)pfL6t{}f=T!nC}@xxS9c&gSX1 znvwd`;Y*dFixoqcs-yLnVr`Any&dGJr~A5R`g>7Lk9T&Bb@0(TxMApTIqIq25n`rC zL-Cid)k`mrc*yH22nN3Kqt~7E)L#V@mK2BQ3#||{LdMnO#p1tlV+dC_WSle&&D{tr zNVpC=S!Z}fkMP<8B|mbfIeU0698&EyG&imtCkWiI+^HR{W@=7BENd3>FRZq@R8FCd zDz9S9A86}zM}}XkpxL1?h$EC){VPCQ!mwDumuU!51_A$g^VV2<$Dz002N%ZMgJW&d z|4R;#OHux7EUO=sQDs&|LI`RQ%x=)J;HDl76WXgUM4Q^IvslP4rk7(gaCum{sE^3O zYBH7^bmfRlOY5y9|KsS%yY-fIv6zVS1(E|&nbk_S?J*2|tqN>eXotU$Ur)E@VYlNT zdM4#;ST;Ze_TelQ!*@$2m^JjW;EkH@CZ!)|-6L}0l zFE9p=nZzzX%BAG=7uR2{IH?d5qkXlm;_~NzI`RJR4!rWphozLAOZrW=5STL%OmHLYEy>WjOtInO!NzvUHkW3n zT6k04CDlyAS+(4vrHY(F7u-(?6Ryj{Z{v-n*VV-Sj}2n2HVu&oPR(W;Z66|>sL;lLwyuz?Fo1Eo9fRr zR|sPHmt;Q-J+u^um&hgWA&qrZjZK1OJ3TC|B%IrZ;O~vtVmvH64YnG*a-=~?;Z@Ym zR{8bNRuK80fe!mhkss|>qeFwl80Z$qwZS2Eo*c$UVyrL7z>a^5zf$OQ zwP2)HZc>AledSku^!y4;Ta^~;3B}(56>H9Z~>lCB)wK;K^1tM*EVkx))TbKSfavT9RSL$0M@-3{kOjwKKYO(1AZ zvKt(&UQ9`vnC$GA@@_}}T+i_Qz~JK0@N{SMXmj1f^()b<=R-ATf)}cSH5Wn`NnqB_ zbhIuFcFy;<&h@uWbT&krYKd5D%V=9GgNDg|gTnIadVbt4MfF$n!6-RWKvOD}tGKOw zM^;(j{-@we%aT+xCD1=h&_o;@yE{RX{(8* zY#)cqPaGa(0%q<${2jei=xYULfON({sfOE7M0<{?!nC_5izb3MFI!Q}On6Mn7!0 zR&oA^+fO)Ad=Vl8R*OgxMQB2pCLqGRm#OIkF;=6o>)-tsC{l|Rm_(TE*@KBmGIdR7 zE;LuybX{u#d`EjCFa^_I>fjr3cy407FG*H{!Do_M)$6( z4qIRF^4Jo=oCV_M#KQ8pXS*>z&yOn;i;VDd!2={g?SL5kb$M*<*Jf)iiBnh-iNV&+ zV13izrHgH+PSzee)LK!oJUDni8oe815Vjbc!r9d{Y}V<<)hBZ~!eC__(rzpT7p*y( zE1c{0ANRkHwFh={eQjVIRq4*G60*cNLff9ostME)aHoW#5@w2J^BQbMQh_fi;l<}N zH?K!S`~98UU(L@wT6%o-^tD&Fzh08P{k4+adkeRh6&CHt$>V9s;UHtsja-rp^Uc^o zP^3`6HMEr_kc zBGTS1f>qTp1NxX_I6iAxW%YG*f2*o##{8|uUrP&%unO|qgM3=uuFzC>3&L6m164c~ z3ABK(+)u`s80&0{hyK_;*3k_gmy|dT8|`gl%}tT|n!wew{WT}rPJPjK^rOz>`+Lgw z^&a2fS9xHl>QL}}dFaCN=+%l??a84Fhq_OH(pmLc-}ys>S1JOvrvmlo!i_adbBJ|Z z7jiAzDoLB${=~FobRm&!;Y0D!09le(?5VV5AVm-uTs$lV?J!sK)r?*xaXlM~-kV)u zfOXa9Uow0=)-(L|&HJ~;=P1yii)J4#A!A14#xhUy&klUmlE^d_Ru_&7E%e4yFCbgH zY^5@d%4+jRl{0Tc1|5>n|lcF#YEtU^OCM*(rIxUn9xR$Jv4fR3ZQduGL7L+S_9OF!Lx zvj2^DGuI{TNXo(JzRZ`oI~|TG*k{1^G24w#!#%C+_gmq%0*}>zTs3VNcdp63(J?rj=N#z0$?(^^j65h7QZ*K?pu! z-NC#rO;x-7d0YPD`4b;~{@1&LzkKOzatJc0;O`BcH^jVq0w^K42UnTZ8VP}E3FXUL zf4MU{a<ewtt}i>b1tI^Zm7rVcF~rPxOzOsr%HR^Vy(BRq8>C?;LfiFW|f2X`aQ; z%8I`r<0`Gx)9FR77YP?=HU{d8<#NvdQvanK4D8;X1bpY>32JqOmJTKq(KN(4qjhhO zK_ZlOhH3)B(MV%E6}0|~)omxs8_N%mHZ=+U>f{lRvx^Fgt&4 z`*7;ck1O9^J$`AIZ~LCC9RRW6FJHZeXt9d@2}M~7i#}?#g2E0H0ZPK3nz{9*=bwH4 z(C3HltUMm zRlC%9AMvUPu2wl;89G}JDJogkC5NiYRXxT!g0zX$F637zaQ_h4M}V-RyVfZ`9ty+L zwUpKu(ei7Mzlw+HqYTqQ8BHJ$3Yu#cny(&_4B3xpb04kGea5O3=fs*D+H@ymkP;9CDA}9*GYSuBrBR40#`2~ZjaPm zTtmpn7P74~ZP z@FRE%xJxrsRCHG6%E)ms61M$};j*0>%l6wjxBIOUM^>L8KmXs4p41#U?%%LwXL2r4 zmXWh7H488WQpxu+C!LV2ulg!8-2m1pv^3kHGZqSHqW0z_Qfb|twV5dMCjp4&mqpc8 zZa_|{^ZHRNr|rV~LLt>qRbVkY15md4upZ;ikXZ?NETQyTO7)q59F^r`+MK#8Ct+vy z77E0~UVlPa)|S1QTg!Zjdwhvy+@QQm5<5$b0k>oCkR!HN7;Ug~fLyZ&D9g#9HFytG zkg~FN`XoKR<(Xf-|H`}cQh};mx?Yt;_D-flwqf_ie{EEOhre3?bvbI;vmphSk(c9> zq5A7}$IrA?UmkAk#B6+|n|Zzw^OB{mpOMP5^3S-se9+wu1JEaejK9%+9wyyjKVcioIGLt2fuveUpBfMG6B zb@~Mm|3@3R=C2N3KG$4%wD!p1>5g_BIgs;lMa9FeZOdr6J)w1JX+@ZQJHfm9cKQC- zHy?iRn|J8IWiA8c6p9+JJ3vKEX-rX=kf8?YX*QREE+O173Nn%jAUOG@wfp|?#=E~Q zd$}m<*E{lF-v-Y?22kK+_3D z8oV%feP-HoFRfei_V2S7IPKF*P2--NRb!5Ntp6kAy~jy>c3jg zU5&h>9nG=!rtuDB%x!o}jx@E!8k$Dx8;5JocAu{3IQ4nw>4P0-4zyQ&)^U1&+sThR zkMC`pFeffLc=RY61c4F}A@xd!62QF83 zoj(((yJC%96a6@t#U_Vi6GJ28gWzx2irPro-i<-*!TqyOnMffxbv zoePhAwX`@t5_xr3@r%!`S-rk`mtwUZ@ zJKJirYPOMwGJ-7TGZ5Ew=^q+{mhu)QT}2|>C##j8cHG#1E64&2c^Dzg&hV53mli|g zC188l1Sw_0`jQNHcn0rD-1B&dEcf3TiT&lqQq7?+%isSHJqf}Rm7$`&7xx3>zbZ&` ziz<5t-alu6(k*9$v&uk8t~Zj3a!ItdN3J&wUB>JC@ZS72(B^*xw#KZn0y@jC{edX@{}zh zlGLmu0lv(wd$ADoCG7Pj6J>Z}0KRCLkymL>mz=iJy4>gXN8RbKypQS7h{mezYO%Vf-VF}h@NV`SM2 zfnXBCE~Ok}ZlKNzjL9Sg=Rs<-1xL_O5jP2^uyXn9V&RAJ0 z|Mfg!VAJq@n7&L~AO(sF6h}%L=OYbmq1tPmXR59q`l9i8`EoEs>&>0WwA5J?g>8>* zdq2DOER*0lib!IC|JHjGvp+q06zJ$EEy*n`$SlZC-Ihz1IGN5etN#{eV1JQdbBM@4 z+gKuTrt+l_1sUl?DFcOb3U_1|l5O6bTf|IBiY8pKOdyg_?6wYH^II!Q-9VscM%9eI zOCp`=OEP!KMlVqpF3bSmll{K52cwQ}gwv;kQigcECUWL(s^#sgU;c@Ua+K|oL*(MH53=L@X#WmH|&+FB2vDXll6vzwz<=cnA z!a=m;-DhZP$P2t0!ix0TCN9YafCn>%IKq{K#pO(Qc)BY@){&U*93t$tWfDMJDgO5K zskVwzFBVvW$Uw0CO1qHkkt5CB(WWj2D@2-_!*y4K*DjNA9=udV&bjB@v7QS@duxvM zUO3u)?nw9PFFH?t-gW%*)+76y4}aKpbYJJG&j!vP8N7UK=t}v(<%4ZiA6`59`_6M8 z2Wra+ETf5@i#5Y_wXj$A4`c>(!%Ut*$gfr2DUo~lD_a^+LxSdgriD63Xwh6z%_R7L zC+NchfB7o!PEP-L@9w4J<%!QdpSfXUK}u?gFEckiBVpb8SGMozyx#ibS2yo3jNOb3 z-v|zv2L9(vR)ERzVii-xn_@s!GM^&dbls7{1ss>uyqu)+JU1a*%`C&lnx^Z(w90fv z_q!T@4T4RH#L<8lNOjm`Ja;uhX?GHkWEEJ^K51VPE?5BB?Qr1qz^G}4e<#tW*~y%J|ZQn}$dbJ{2e zA;QiNUw*rKw_|@rk(vs%y_Ow0ng4zN8~%NErshzwEyEr#Jyif#HBMGJm4a`Ulv$O- zY9SctB#oSxMF7NvO()#r%OuEPlXEU(YEn9y8MW|bGT@d=tK9VtDeg8`CR8*;S@OH z`G33e9Sc^4Kkk}n{4mi%>fBU)x%XOgps5dgE10kN;iDxc0Tq}eXz$gSli3Q98z8S? zt_%~@d4g)SAtl|_IcI4dk~v4x+iNPU_=I2XG!|@GW$#FmdG6*IjYzY&j1q?0uwzwl zJ)Kl&_=`FV3hYwc+F5M-LpvL@0~2J57}7M-JO~8$)zr3BoT@u=th4I;gXs8!(RssP zU^lKV;DYC}*Nb0`u$2Gm_FFUzY;x|8E&lz^*Y%Y(g$bEtA@=4JBl8n879cn?i3te# z(ubQs6C76{_z3vQ_yb>-L`F`rFPGZ!j?CiHjAG^+Qpzv&6~L4wRNG9x8YOcaxKUCS zr6mwBTr|&Qn@fC~Tuxg%g7bW-`I%{%*%`@cTi5(z&6>4qe)IC~$WY(Ixmj$is8@n1 z{HMU_$gFDBZ=%;Tkki3sXxs1HUz+NyrsFlc4+P*`qo zkAxat0j4Z*^xl%=CDRK_#ad);VAvd+jpthbjyt+phu&}pyuZ|ai+?NW;=x|EYh+Af?#gX`7b#VUFXU>&mQZpI@)>aNLS_Iw(`#b-_|cb?kL|kQ1wON zN_n`pV(99Tj_Uo5l^?cOe;RH&A8o0LG+hr}ukXEF$9j$&7Ya|Ffqz>~@-xyx6mFyi zpupg%hD;|k~99R7~jgQtf_Ko;8N*i7I4^yIxo8&~Yud;un1@8YXT=LJ~9tg_s2( z{*a&;HD!Zr(_h^UMZce09Jp|y>Vppgwe?H{My$?;Rt827$ec}x$?CkKL9km!Oz9d# z6e#W|ZtTP(DAdpn`6c}b{z~yTEZvcO7AebWv0Fe;Lt37tkwomU z0AfkgX;1XZD1F3Yok4()x{JU=Pd+1`&SA03*o!AN(rX)~ByGrANf+1^7eyX~Oz@^4k64$PKH_I$I#MTedwFS3J zn8!+F78Lsgh=JoBe(*Qn@E55SLDHKhBUkG6)7UFxn|NAt3&H6ZeCMz14|itgIb7B$ zg!Ns|J;rYy2MfecAysH--fW?Z*+zs-DH8KoZ7Uw`sk zZAEqewT@6ze++voMhf(|XKaoj!%W`vwn*^I zJda0{^>tOx&!sC|Y+#&5jLFV05FBmlz?8D<+_|R8%Bx3?MjM;&MQ3gWsE1zjOQA5&=>pq#FTEva!ENjEDH1Ie@$gw2HY z8G-DC^WsqdD=e+@)6-Iup4+rxO>X`#PF1~ob7twrtP&>4G5`YP;w79fhq+Li-!dJ?I4wxRIcPOs8hGKkgj90% z##mE+S(dSayIYPj)IxV|P>dp>xrdiY8eP}_H*qW{uKqUT~|cTGiaO-0Xz6MYws z3-dj9y!&i<@7be0)n5|bXO1}hJ$0nL;)~AmgH2!T>p1#xZ^dVkOGn3QD~2zX_n$uk zQu7R<#;fCjfp(&wcsR z!}&)ymTtsGXM)pnVcV|jNCo{Tl7A%`DJ~X0suY#Q%BrQFUx8|?NJncbkA;gHELN5s zb$zlr*@p#<|5?*Bd&E}tLdqW<@~Yc2?1OM#7K&dt$C(BTRAK($VXkw(lphUWFn^X2 z43W{5aO`ey;xCj{Ya1*7_#qp3l7LKn5^R#fMh@!E^?LJ=<#vnHsTnPlo%i+-h__B+z#4nhL8rhI>HTi|1!}_gy z&dd~HvWmZ!CAOnGZ>~F75LSfEq64>vVoAu`BTZ-x4bhMeVqS~KTb5n{UF6wDe&t4R z;)fge|L4Jv?{5E1-iGv@DFw_NM;RsK(dj%g|0_Q|KIuXvRR=|(*4VCTv?%ZHVR^Lf z-BqeBIE&Dh4D2iE_d#mqx6WLuz*a#vSrfS%fCBxAs!)sv>GBK8pik>FGEG;!=1alXP!;%t^BVq|XPT*zccWma)gn}yS4h z5KJ1ue#MQy-Fe8`{@$y*(dOB%iR}~Oc3Yixq*BwTkdd`B&BxaSO~xb*tF)r@40?iG zcG@RqXtc1=ri4Bp4^>JG+>phMP~jGE97kksaA~nIBduMMmO(JHa$8z@Uizj@YuCK+ z%$j$8v#YP6?cw|_{#(UjLfsVk_$;uNQ$=KD{X|l+)e0|52dP3zx(2cw=``VcpB)Yp zps)b3;xA|`F)VI`RJE)GY=SFMVL2RkLkYRZd>)`<5;}d}9eYW^!G8f?=JfR=fkuL* zDy-Oe--tBiip8<|X{f`qhqE$z-i5MVyWbqN;(XS~FSOLkvYM~+&j~0C>6V@*^X1^0F}n}x@a@EM0Z!dX&>uw1bk-8H9apB?E={&y7;il{(sU}^ zP!X&OoFaBw2f zzc3p9=D~bRZPk_+o<+A`>|=g(GUK#tv?Vt%7%xam@g=2h{MEXY#Kd>sdTY4j^8Llg zH%p64k&wAkxM~-T655m@D6BhM$M1q7Rq{d{8m*3IN5fyMIXc#6%4gXy({S{(F6?Np zG}2N^`6jdtL_p*_M1c(S8wVQ>h8AcaM`el%S?!n3(f4a;g878^fxm^%{TY_oIF{e$t}r*LVt zV2Zr(dP7xCozZh}!Gd*<6K+hzo|zM+bX(l9R%f1#Uo(#C$@Jp?`R4D3e*Zz{uQro~ zHXkZU17@X{WG4ZO+6t7kD=$rk_bEGeJlE&ni9LCRyK@Vq)kykGi%NXkigR`pWp7VS-?lY5@0Xji zpWWbF^FsRXJ}m$4&etS$Eqi0vs>{KuYAAk^<2N|@_a~M>)zD?;39I zM^~Ur5JP7oq@GA6QT^1At*obW7R{AaVHQW`G#2RO_0U$>DCCq4fMuHvAGy;5Um*A? z{@xi~xE-5!F{Z_0>p$|4T65I%cg7dNXuwwpGK_dk!t`1m{)!^YxyGp%7S}u%JQ_|D ze3JHwwGW9SYi&nwO=J71^H;wpZ$5GQZYcH@`sx5HfJi1>sCx6C_%B;`H?dKb6}*%0 zZr#6Db!tz3?kmOH*k1vl(&m;-cztjz_N5Tnnb~Q{Y0tf|VM|KJm;1h$42&X8 zgz%d8k*RC`%~4sfai6$!iW^qRvP797uRw4BhaX9u8D9S!e}y%h{uxH>Nb#VEqAdXB zI5UNuy6D4Yk=&gOdNAd)Wskj7vl?#vi~p6=Sq(}X5kbz``jQMiMK~=XTaKp?+(jc& zkT*{bhb<*6o8TVpCz7=dh9{2b}WcvT?2ULIQ-STx=7mkM=cY= zcS0Fsi+JGv($qw#`**LD;2V>%@kPCWQw@Bj$vvG?h-7=vJkTGv zb<4WvpIx(kd+w=IM{do=9^a-RBC-$;(P1G)Ud3N8^DNmU`w#Wh!iKzZv|4KXDBvZD zR>$>Z+)TV)shk2lVnGwwRcl!}I{a0Uvob!=P(VjLvaGgijki{5HGXP)Z*sp9QL%+I z+`%s38yfdoJ5Ig-Vaw^WGd%-#EwNFY8WdHGIw4>PhA>PzK&>|Ic_6r{uYj7Qi*-?w z1_9WOs00}nGl;nds(`_3SE=BmkzNW6KbV;1<5=UZ@mzyTWBtZbO?zga!eOHBJ!~Ih zd1>o1*3>kcjTjH6wTe$i9b9Q|T(YYT)bhcf?pxTjkc78BXfK~&1$4uo7jmU@QEZFtlYFdvVrAY?;NGc~y?ua@W?KL7;U{h=F$}QQQSGv34wH>)HQ&rB$ELm4j^dA{niEFoR{>7H8 zO{s6}c(r{0XEnzwFI801Rq^MWw{OIp|1;kd7bH6wyPP;0>^fu#PXl>Rph(fl%jH#< zj-75jUpL&?9c=55b`6gWM8>I548>4inA*y=tnR3NzJ|Ov=*M{)(m>=YuoyC{GnJsQ z5?EuV0*(}0mD{e2OOr2fjLF98U?L%b`No~`B@1)pu*dK#y&;{yAiD!!b*kirTsMAzW`|)17pQF;sJUAI<%#MKKk7Yu?(4~!dvv~&h=v90 z6hg~0ujaqnI_(?=910JQeLHs>Z}!9Qyt_RWk1`(`bxodNNh%>y1V0WcGOSQ=6C`E5 zLb{x>Zkul-0SZ$u21bS9W+YmYnHw-){>l?p;W@ck?X3iV32GKRmCbs{O39Q0Y7>Jyz?yDA_w1Mw^@E z>PH%S{!@-xK~s{_eCd2NigaR{+C732z|>G=lF406_H^Os$XLTE?4d`RHKH ziT?BDAg_S$xnq)EK66Z>s#4=r_vw?JRTYFo;i}^ur_0+<9|eCM@>cF^t=!jg;*+NG zeT_#yZ7x61QS~LMY<$=UE}!VDIn{shGzH~g{pDDDYqYf)@=J`gQ1b2vb_IFchv4k! z&}0&X&Rev)wrwzGdt*|G&@N!ZUnFV-{i?)NV2HRqGxhMs!ntFI{Rtb>*S%PfwAHdM zMQK}^!HOVXH2J=luvW9qbasiXz*z}v(q3Bgr?)dl182UzeTyfNF)mSuzpNz6f5pDi zc}_a#XACEu=cJdBb_GzuUv*(}ol1c3NR&jMISz>tmTHO|ZmvRJ0bE-Q6@rb8Y6@2} zw_b?!%l_+4B;zp1gK}^X)5ve8=EqxFD&Kpr?)Zr*WX#|ujcM#>Zk1x#A#4sL1+H#7vVT^qh~WxhAC5*%F)j7nX26=ITn zlckBxhWcX&rn{Uo`<5{v)dXe&ug=XwD%ys>N`7(RG&Q&&sR0D&AScQ*pVZSLaLh{x zEmA*Eh3pB=O#H9xoc}-H{?v1=5gAfm0$oDsY8WM=Wf4uealo!_Yk0?ch|GyZv{hHB ztJ+*bG){!6nn_tp`Z}BL%jD7=30i^pa{wI_<~GeiV*V@v=x-IE&q)O66-E>2@HITO zqv5r6C(*Jbp{|u`3oK5RrCG8dW0;Q&duQ6_!o+8DHm=FsERmn&E6K>+k-P1+?R$Q| z=Z#YbD|#Aw1MR_Be|T|p_Uk+M9^6>IGd)8&gX>esuPU$Xs`Kg&O6XSg;@xHH({L=% zaJ{wq?8T!;d#+v`YG`3XR)hw|-hi|!3_0eDBQasQqd5KozJMJ$VK1c5*Crf!lZkGK z;7ZcPUW_ovW8T!uq?gfF847#I3#z&>gf+~C@ZOyOh%HU+e6c(z7qr#e^VS6{;H$XL zOLK$wL+deJQS;$>q+*PRAcZbfY~*B&himH{Y3vAJtL-~?zWMm^`Xfgob=U6%BX?*o z5(uV6ojNbB<7$T$k{b?ialtZ&Jy+}DzhM6F_aBdT_Pv~&jV?tJ(f%~ctL&opjqoL4 zL0Rgv)*E~gC$gfX>+saLt+8DyR;$66+g=_L$d(Z~Cc%0N!HTND zF4*evUz;;1?!g|_m0xLtk_e#*ek2@{I*k*w*Ew#H4wX>7F^`cL8We+QZVV1%9K=0f zt0tM{k=9;h`e%NGtFrn@=<}$#1jf!fJy7emq;V*8a`lqrG5j6xqG!{^M0Z4j#JJXi zL>LNl>^Oa* z^VG5S$}ih0zGyvh5az4#so?Li{q;vaYODM#c%?E>S4qzi-M&M$=K}S0;l}IMP0YV= zsJVk95v9MFPUH^K=ArR!hSJ7lWTA9YOW+trd6yKZgwC$z0&r zapv-Wee?I_i5pCEA)MO)+6-%$m7}^4SZZ1)D0$T%B@UhjNfvzPO5v=Zl;{bSGc1VX zsiaR;N2DcY9S7sh?{oE2%?w*z*z_Lkw5@2XW#XqABtsma@bSd_TyOuGeV^1P2Ip3lASe+D1$kV>1NW6)f~s4C8X(yf+{Zb}tYdU^?V5`J7lE!#g^n z?a1S=4_v+6d;T1mmK**3w}yvTLXq2H5oHnPPsm4xoSM@JpF)aa>ipo2x*;)KhNU9% zF*i$Cvz5y^vT#}IK29<0Z;e?y1c}Z{a#Hb^_be58%UY{b2PIAlo&WXZM>M)6skzx3 zQ(w-_#i2sXH=OJl^QaW5*s9LiX8i5SODQW%=P0v;Y%Wr^G!vOFt277hoKT#*F&?FP zNjnNsm=Y*Jtg{k$LU~U0^ZyR zsWgYiUR1J)MM>$!$r;;y68^L`zT`EDFDAeE%!afLzSl~AbKt#CtB+OV(T!=3z@g+?r5xKDALe5bfvlf{Kfhs z$68KQ%y)I;3r(VFg>l$o7c*}G@;XMWXOdH@LPzJV2PAP#{&4GY-O2OFsJ11iVt2+e zMY2T!R9d-#QAn%kmer-%bW=>@m9e}!4UiiJX=&CyRNzY_FtMb1VmWF%0?-6nuhLY1 zQA%dY+6`;htYMs1SAE;k_&fr2+!O^$$UJKXRmd!*y#Wzfv6_ic${9jRW_T6Jb?EGI zd_VzNQ^+q6iBM@914(N@j+2-3UCpXbaQQL6MkkPqLsqM4F zP1QabBldW)Gko^UCmqX(|0u|79?nkch~RV-tftehEGbb zf4UtC-1yn9hEH}~|8#fD7q7G(ey#1;o86Uf_f&r}SaYzi<`~*$T8cWW4|P->?mTmZ zys?C$Er3gOos#H0dAy^tT*3DU;M;Qi^XB9GEgFw~Z1~%FVm}(`{;Q;&PxfC!DoI=p zH8q(3Gec=ZjcuXpo#DnVr2eQ4u_;D&XtNj$f6cK3@D3S z;owZP=iXv$VPfzPZ|++6%$khN>*FE>E1)R;BFo<@;{4PkyFMKXr;9&oUP@BhOY8lK zi66ZAW~ig{`+N79*p8#4ttzX)cbtX&UqU*64H|6C=P2vH26-wAvQ;3_D*P3Fk+DuP zz{F}_K9?3=rSCQ5G7f030?NqB=3$dUk}zn}N{mjM?d!R4@IdYH6CVfc6c!{Yk_Vp@~1X;IHjZTrJ*P z;k`xPt+AXCP7BW-{$d1ey|m8PQL)$lI2&`f+7k;hZ5K4T#5%&PQNjA2flJq0 zPF}ch;85%7v$Wvd#S7ggje^3uu@vwn=OZymcsp}soTSmjLw+$5{Cauu+dIogK6)=N zjjKf_PReLtWeQ@xk(OGNn!?f6DD)~KS;dd)o8+Gj2P8CP9d^*mPi;Ar2eLbB+IM4sbR8nz)Fr zC>A9qSMt@v@_2ICu518$MTlj4756IM^T>~?xpEy$Jze8HU9;Uy3w^E8s|T8ozS(?m zN5iMZwfhP=Hhr?2X#V77qG|tLqW$12U58)qJo?-2@;7_V{AsA>lfld7y%&yjoju%f z<_imMD;9rAGPxbLd+HdTY(#72mn{`Xnk$bqRUB@rJlI(AdBgF2_2nNolz&`*?4$al zpVS`Ncm2eH_A_4&UpqteUq07UbEdQAJj$z)j*jVp0Xkv^ajR?Sja=^=X&M-98Ddg^ z;ID|;g#6O&OI?Y+BBlZEh4cc<=-hL=Nw zF2n4WbeZyL2n)!{1*XGXTozYXek+?AOiMZ%ZLrRI(gQG%>X_++SCIq`vZ4wDr1a_+9@m(g+zap}~7& zaaQy<4Js;V0=y&e;}WAw?|s@!8xgj?B)LSTHHp@_KK4=3#@1MK`&3)^?LbrvIxnJTZ%BG!b3uyljh%ZBefYuo?~m^Ncz@+@5AJ+->&BnwXCF+C zGo6Y~G?eO64RJd|*$uKw0Sa9Q`pC?Ukr`?VgsaYL_TKP!Av!)EasA--9ns&--l?ai z`Ru1BYEN7mtm_H21&DBGAl4fhqu?7Hn+%Rjg<`gH;wDG@tlK>zlYD;`+C3ztSW>}y zjk%+jb_OvGJq=;+jNQ6BzG6Nbw?~!mt!$IBlAtk7G~mN4dfhezr}VE!;Z zk%1?-d^Ja4J${>vH2Me5*v}YiU~YbJ8dEGO1$GSMOc-it?YmUhcqzf# z&dCme=3Ja4b{zTDqOcH2%RHNs)AQ}5f$oRt@x)UQ9Dg%lhY5t^Q~y6*@BQ7@d3<~G>o~UKsLLv>k}Oe_WXW<# zoWyb5?YKp@dSO>DB*orIu!95$fG7mOE>a>zRm(NDldOC1_pW=_dw+WWrS~&4&w;3X z@4MElvkpP>Jk?flvaRwcLAKjg0rUK2Tj`;W(!)e+@nNWGTsy*z*Kl$hth(5F@j|4n6<4Kc z|1jnrjJl3A4Mbap5}ko$Hy(*LqgDr1XwJ*_jE#_S^&BmQ}V7v%kZ4#RQN(!94+0a?aC~QP?wjRjMew}e$ zn>XxUl~uGl>&th48}@zn&#(Xf&8_>evdlKMEEoVXNae&r;8pVrgFh2TWsbI{tvvEH zh+@M{O<*O5HE`i*9I_^iGVDMEONz5-HK7naKv73Wb+aT%r&P+-_#3#gTs#uR81Qa9 z(pY-BvHZ+fN3-n)jXi*_hqR~FLWbb1#NX%*EcdM+#0`3g&)!YU-5bBb(c3D>A3#3pKCd3I!AAvElHM5|Z7iN;+nEE%yRszMRdUOH>D zV>l-ag~4cr;KvFp54B~Lb9A3R-+cSsk==;wYUDLxp%W3cjeg){Z2arVnIGqGojrVX zUv@4x09D=JZ{MPk{rl}Qt&gzUs}n72-PYJ+>!@{>lG_VS@GM8>D5>W8^&;#tV6ept zOBAh&?5}Tofe`D+9WT6^x9PR~&9CR@ieJZ;+?O_P*|GY$yyt$Eza^`1Th^=lUViKM zfBNj*FRoQqk93Vh`Xh0F1T(;|Z{2x#bNQQ_%Xg=#kjm^@WW9`{KwYpP)1{MyZHZkjlr0U+hL9?V8Dw^Vy)rhXLiho@mpI`zhN8h#;a+0;ZhR`)HPCaZrm^H|=lS|z zvp?1q9_xw3`WUG|$H#a&Fg_JZ&y1yoc4l}GLkvwdt&;@J)bZBXB5^aeq#-SF|s8 z*WMhOkGAd2EwC)uj>at?*oK0XVS=j(n`A^fEaq4G{FkzK?9X|zaO>uc&pe&IA?wKJ z`^E+aZl%*xA?y|dG<7om+cYz#SO!>0a-}&J9r6fWx@ikbfmgE7+VC4U07WQehzuyp z*uh}fu4WlAFB)Nt3P-Tj>92z0cz0k8jZlP}jj+eHr%2 zrHvO9aEdH(*l=~>6{=?5HIWjz(Y*-_D(S%_kV`3ahIF)q%rYiDr*y@g!^QwK*B{ir zCsoc^{>vXAfdwTy0nL1|ClZIEAsN@39_VF0?{r_wgs-vZ^qWnG4t5`VrQ`5{_Ag)Q zKKeV~2{8AMzEgkVTfM~^Xa3Sx{PsZE2ZQAw;^7Sbjx|&UYe;p^pl9y6SgavLG{WZ2 zb4NSRp6sYR(NR&1CwY5iG0|3WsqXj50Ty^onAI`4RR8s8P2ESbr9$#xxTmi#rx!drMptnB(@^w$+6jvNmO9?OdHzSN_3VsP6lP8{f>$;H<@vQxXj=Yib2e zxjR-r!9MLjwF;Us&xX_<@=9fm?;40$a%cFWwNW=P7w|P*mg$6DRaDnl#}>xYB>Q%5 z=xholk^ZWbOCdP=AeDZQOf;4j*PcE(+SI@ug>FhrKJniOOvo@AodlvHHdX`;L3G!?T2S~(KEsS8%t4Fh+}{^w|FJNaRNDDH=IuC`v+dRF z!q;+kzn1^X>jl_37jAxb>$6$UuKm^8eOq7r==HyR@z!T&k5trF)I@tDe_#GP@yqh} z-!DA;X6EkQB)9_$A-tvBNGeMH7AwShU!*~_j<7I9+ z?m**V+%@&p#lSXYrLtgMy+ohyv4=DZ}lgeN~1mnzq1v|?_8<-8nWl?^3tCzk##$YJ@GfwHNc z?>(BB&nK3JA1JY>aV`med%07$*E*#=eKF*jv%PeCG zH>}~j2ySALPUbcK*{NutAKhRGe>BkL$|+f z$Cl?eugcoG>Z$jB|I%nz)z>#7|F}JUHyN~?jyp%v2iRsWTy{o6=T&}jVy$rrH_*1 zZRKUv$B&1ans`Y?0Fua|HPXPr!f3|IQ$!#jK+|QpZ%W?e)Lr} zh%_ZP7qrE}fHn}T|4IWU4jWiLDiJNFl#FKBE>=a-WVA!A9k=howc}v^UIJGl;*|o- zc=z%n;<{Y42VL}DjGuFhwrnZP$=S1OOVPgk1Fvs?>i=^indD0=aj&-(_AHta26Cc zMUnV?f!+WK&pKmTlE!e##@C9S~d1cjeKp&rwR%ItR+C*iQTqW)hIt?!iK% z5sh^RV_khCjcvHlw3Ju2l$1~U`W~fbMgOlgrj;yoW=&WL%7RUJXvb22zq^FV|0jR> zBdgYHh5KIJ1^(_ikXwjo>g8PSv0N-taqT;pD-FNMM1fa=1xnT^^PJR2prc8Ri(bsl z+p=!s#%FVL*S`6?-JQ*sf0$=q8k+3)&kO~0T!Nd@>nWy0I)xMj__LZU&?8k^8Jd6~ z+1w*Vne1|54~ykXbG}k1TQ5fz$adNE=(yg!#yMx2kDfT;MaV$2)rhGGL#$Dn)DVTU z@x~eC<-~>=nHCG26|mM`Bx~{omvt1NQ)KZpF*Ifbl8#z1IOdv?+v*Gjrr##}hew*b zdTW|Sn!BZzjr&S~IW-t21hLVuF#Q!0EX#JK04-^+GSAA2i+%Lq2(3xPO@DKsrnKp+ zS2|9-LH_GI{wm3D&+#{UPyMOy^qVBS{Uz_fIS-V-C$w|dpwij$|J3%E28`)`EP64Q3}39bIIZ6k}rvt;?J7Td|H3<{pRA2 zJI^2TT{_%*1;6OCNOYLyK)V@$J5uKh)b~W%1{lyY?!$T-6HXBb@rp?({yu@iQV7QQ zj@L)+R&ZPi1&Dt>Se_dT{_&-vEzds#nBuKbn74`8Rj{FO+r~Ww8+Yfew?;Vzf_QOH~jY5tgM198&7`q#e?bezwR$DCt{>>B*Mgv z*!X-rX%K8#n=OyV>AC#Zb=H_qvhc<(b6L^53g80M;I9YSj!CxFl}|CB-<8=4zl&dC zm_n9E+#a(K?<`WG`{Pp&#>d+#D=!>6!aP$n259NE*g@kS?2wiSq1RUEpABI>{GEdC zHa5>zGsgjvVkDSZjKquSu?Zkpo3QZQmI=aA6n2Sl05dnZ4($6v&7G-k|NL;&5?I4i z_+jM`q%zLGmf4Nif9m~TQU$gZ4CEE?)tw2ITq)*0Zpd<-SyVP9s8%PYEf;fjZQGyd zt4(tK&*dLR+eQxL?0az|y_Q9oB%4pNT13j&LGzXJyfrY26lbb~JLxu}Tq_k@W~m=6 zc;)5%gZNG{6KPw)n|Zl^SpDqA-~M|2=I2*!UA3lQ!@8HY<$wCtA1ji zokfba|NZ`*pBERtot@UYZHj$`-6w{X##>71AK9>}dbFBguRx|Wp!~eNgK$sna^e>kIp2*_RjK-6A()OT>56D#rOJ-z58-sBn4)$!pr$P@lq3^`?js(j%A6h zr-+seS2Zmn#45<+h?M0;jlBHLzkVhwd&8=>`l^52xxJJS>#CW-$ef>9fil09;1eno zG5xmStcGQscFvnC@2}Xu%P<8y$-t{UBGPCDVvb^OrD2F{KP4#a_58{eRDWeOf~Uxu zo@XAGMR-QazN*wR=QPJ5n}*7;(lSk;ffG$>4SzEZ8|1%+zm_{cc|>WJ39-YnzFk;r znP7-@^T^0hV`JyltAh z&us6d!OMrbPX4ar_-ozAUngLmg?v8oX5Xo|2F|=aRPydnxr>3)w@HM_fBP#x4PGJt zEg!8e@n0)pyb%q&9c4l+1Fsg|`SI4W<1J+;G&nilT5+tY^f1v_@)glg{AE+gXZ5H5 zT6f}|`jhXrmwnh(`Pa6JLtW>OL|U7t2Ku-qA}ziC>s`TyUc^@@uK-_~m+fCO56=w$ z%!%prgmxGnGv%|i9R*V4!jO$W-Mx3Uq;&05Sq1A?1E#yOU)Y_yktoVtOYF&6Beokk zDI;asc1Wd$YF^FJ)(IY}va-`sujytw)%lA^iC67oN`jP1YaxZVq){yE!v) z=jPmvbZROdn~RMR(%1_{tW{uJ>kJb)tk2beJps{~Tn?tW!P}(YLY@78T83k>1 zUIAalU#T{_nw`jI938E5nsDs{g6)5_&lev(8VM<;$o_C3knZ&}J1NoG9cOY@Ye&l0 zKQlN&zDyk!)X0dVNbz_Pj6CPnla)%#aOlBU$vtScDlim*JXynSQS1jf+Fx$_2g~It zdRpDnN}u|znV*&({doHkeiQ{8u%FrmrA)&S;49+{c2E()$;M>KYpXsb>4|N3-*u@9N z9?ma)w{-8(!tIAQ7MVUQLR>Zma+3&qZ^1uniLGrp>`Qq>tNEPhx-BYk$axDXlzUZj zrI!U;Wjq!yi*YRBNhoMTZM(SLlkUA%EfDWM_#z^KSR&oi}wwSB)T!_Kh&?eHJLwQt6r2@W=XJ#{CTLO-DM;R_#<##c4%K{*_ z%+$3Vss74pxEcKAM+_y6f?gh%fky_f(8bX595tlRND9Or1h3?=y|h<;9Z#@^Ulke3 z#qppvaOcEkDuKTu*!K8m{Wyr^9$guZUcdz0I}<#|d1ZWovG&1mbI(vsD{|=i(`R}0 zZiS-vu-HP8Wts~8RJw|=?xwumwbY?3z@3NF%Rk-u_UzG;-COft+qL)g-PnNV(u>6U zW3zhq6u{%HD=JvMJAc*gyfqrkYgoUhAX`GYzQWu!>z~ca*|_@C_um@s>bpO`M9rE4 zCD|zDAz;@q5mwn4W=;%h^(eJkPVBpy|C+btip*E(X!FN-0+c-n7A&?SNl%AHdykz;qpHWSNwUT z@~xrD_lVw#4+hWwb?D090@a7Z4JCo=r-rX1!8+M_jw0`=*0SQ3(lf?3cU7EjFFny# za-1*#fCq=`4S*x%9KPioeA= zJLV{t4~~tt_Cr1gn|he{#LcXR8={9d#(@0S@E3$7X!}(NHvN_A2S#Ivi1<)Ex^X|^ z3+~yvYs+uea04UA-5lrXe+Bj2ST5f}B}J`O?EyK_6g9!(iI!l=KPm-8<1Z`seSaroi7T zM~?-Ynw_FR{ZY;1#Z_m_ct0^i+mYprCIq!C60_f>@f5QXmXpclWZHs3x)!)=N~p}q z<>SgJc_=JJ7R=3(J@6Z}3AfXul<4kcRy#%5dua?13|s&psredHitdYzgyGA7&0dWq zAknj24~?$(>Iq|ihkneM3}%vIsMe0nu~P{An3CA>Bu<`>=5GG)AO3#iy$^P7LaY2D z^fDwfLTG}Cx?%?S;`-NjZG2_t)BCq&9Vq&bm-jriEpN-3-#+tH)_?pe>#4O_t2h06 z3Q8W1eA4C!HL!s41Y~a|77J?Kv_GQJr+f)6{=V^{Bk{y%($7&sGhHrBz?72TQ8}!SoX-JK64Px46=h!cso5XP_j(21zTt4oN-P zJjiZZdi$>BxyBEJgZgCAMBR=_7_VYIxpo~>0J7QRTl|##ptS2!>b1Vz6<hi(u?Ky8aLA3!r;LGDuu+UG0?M3a#BgiD?#wM65&T_=!j6q+l z*Kc*hk=mAyvsbR4JlT8Y%A?7NhiM#Bc@vYFt(5GbEF(rM;wd)06Jeo$e(?49NbvQ& zd-iTCczw@{2X_@%=4I?m7ASaT~W4} zPClAhls11qnq-v5n#o#zvTv{X#GYpPxg z$(DFXYNl*Q^Y2BR2^;3JZ0$|6)rMR+gZuV;F3VQL^l6M>CaANEI12F95Sf$0sLMK? zFEsr}(k4Ryl!WopcI5ehFCi5-MPbX7lYsz1Owc( zhJXIkK=Gf3O8-n>@KEKugJ<6xsC<{^-{DK2jMRKNT6ZMSaB{frxWBfv@7gKw7i}}( ztN0593ktWCoFtlyPqdVrmeB7S{x%kazegHNzNkC>Y0b$Gt{r>#+R;B(9ebM%>GH9ENc4pRtpm_pa5z!O=U(LEDlFIk%loVfyCL_dZ>p_3Woksw^g1`2hye-;TM}uBoQC>mrx(%zJS@m+!f#yp!%M&+#LX|o`c{9wk9ZBmX zvvb)Q37tYPB|M#AOB)W)5yEy1gcTOr*i=IhY@;&a)@1dLu&maw_{p`}vOk%_VjQF4 zm;HA7k4SC4PNljpUbOs|ZgEre$!AkN*7z*&p|C3pjVUELNo3sWq#YurX#s)-g_CJw zd3+Ls$K`Qs9%ZnRC7Y66IwiAoJ@c93!^Pg4nV96li1QISFx!j44I?dWkkX7AnH>r9 zvMjrLu0oHg*OVaj6ude|M)MK-J8cKzkpuF-dm(!sr_pjuwK!kdy5~3tth1T86i+>z zng7pkzAOFW(C$sS2Mcx`%rBBCc!k)WvwQQpywzJbuiCQdSDSwO%+@tKa|>SEci_V} z-aGZ#@%GELP=(3fKKx96oSpsG;^NQqi{DPpK1j^m!}=1}FW!1up2d`#;mC2TUz-Ow zlQ>kUEaiJP`P!JQgNLlF;MQkBVP4D(Vw$elm6ukGCBuxw>PmWo47*Vsm_G(5DhMRu z75uf|WeM-fs_|lKfv^cjH^(`7oMp?CO$)6|j{VIy6N?1NuMA^SwU}%tY*d+Y``O&W1sD z^ip6c@!v_M{&D9{UG3GE4;1Y|X0W%2;st@*)t>w3$KJ~Sn9l2Yc z+wjcuFFg0k?~0n+s~=+A7amo@Im#ZGaLDL#QiR+>8@lFP^vQ;s`rU;fo*Rh0EhPW7 zP9n>f0ZpTkWzkSfx9bDGiofC;DYlb@PV$|JjA<~6Jz=b}{kS2%tq)vBz8sxY5dubG zltqM;o*s-7z^)-LaA4KjObr+4G|aVQ2Ax^iEcg+FV8LI-*_C=NyN=5MZ$AOaH z`_KG=7%qNuq~y=T<$oEecsqFELx1JF7Q+`l3|;*+Qv20d-QiIE(Qw1DP<^St>SWis z6X==S$`D*V)?6w6o^CEa)l__@xum$ctc++XJJnEf!uaQg;?HYOeNuD${i>ty)*OAi z_V~Na#h-Lm9s5XerbD-NVAY*OU z4ihM!rIUSY-kvSFXkrQTt3MA7EI|Dbnps!I(ko+Sp@~rPJ8*RVrYc9-ohF3e5)p-i6V>*lY$`giL`=g z6=BZ#aSwAJOSvSij!)hjPkb{q`7jmly?XKDSBFSEWt@ljhl`NLMj-wFbR4FilO}Z!HXoRXAvoo0)!?nz|d8_f8ciMkI25=m;s_^Y-bMKz2vGM{*CisR-1OUZ&&$YX%r%6@1(>8gSeY^twZ*uWU{JQ27?_)~eRsjOz1y=1v3=gQg$Zs9T!S-8`%=O#Vw)?o2@+t1Bw| z6c;cs_XgmO8U7}Qhb_iOn7Aar+OA+8n1<~93bN*ue<(RLI6d5-9`Gf7RYTWHMy`D9 zzx3hAIogZFd=j5YfBBox&O;UN2<%pTKnzuU-uQZOelpgc)U&(yI-iU5@X#mJcVtfedO&^&j;V~U)#XgIp5Onj-$Yw z3Z@7wcPOhZB*Wa3YqPR4OtNF1wVrFLDpOe5kJWmw>YQsgeRq6*zY* zt-;n2{CH#%_R*RJgYweqj_e^XXZQLIJJ+oJ>l-_|t51EmF!}BL1UZhbqN2X=u+>SL zS($}~A{2u8a^;FGi?(*|TefQ=8C!cD6YKf-v8dXLs*C)TJ>P1?9EH3^85PGcKmKTX z@?J99a<&xyInvh7LV>Zi2=Dz4f89P@ugtbz(;2r42{))+ldevu?)TQy;ps~sy9u#u z@}T9yt^%vvDgUf3FYKk!wDVRb2N|C<$Oeo^`Fzo~wpd47yt92_G{}bGerozo0@!k( z$%)9z=`D>)7{VIzI$m1&PyTFKFqMxrpAdJ{7mC+P_doJ#BPJI~c`UM=i2n2L-Tt~d z+>#Fz6u$l1+n>Ml`MEQd!@i-If8@q^>cRZnqgx9<+**1xH-CR}8ZsH;9c1TE0>^FL z;N2lDwO&qpPx;&Um)-lKiMiVy={w?3SSK)Y9jhJiR~hD#m0NaVeA*qxHVA=6CgRUt3+IP=!P5uNa=y4G7H`Y79{89!{tf5YE92|9+fdaIbbJUJ&$ zm=hDk?TJ~=O?hSal?TL%p?U`zISehst?07dCwXQq@;bjP0a-&>oz!5z*P*bXzm|!q zDT~@l@z<`;f5Fq>y&+z}AoRh$-qRoL^hFw5 z`Y&H=B^fC$9%-n*9mTXK0Uqg=Og{UMVb@KVlKaQ>=bZ3nAq%7``uSTQ?cRa~$1Y@k zd-8V>Y66o3BQ2toyc`0*pcgl;c|I%ay+7;?`g&($30^lc7dE<-eT`u@{FP=~FAQ^? z&dOMec)6dtL3lL(!ACs6TLpyF-9%YQ!z zUj8^({b{)Ni%{L)A`M?g>QDHu9wKRNE&UAeMbF$+aOQ*&Cj6(yg4{`pF{c;qPM&h603}=3_YvpL$!&`Pi)^h^1TI zCRbM6Bz=GhWX@XZ26NLSJZB+kO4ZpkeCj>ZT`|3vC6vWV=PUrk{4e%j$jZYqE>pci zR3A+F&(LS)bJ6!5YHs(})uRxb9vHY6kJE*EI~o(iXD2;!)8QSysE${DMJANB2AvJa zEd2-U42k$Nc)>wTFD;eIDWvY*nZ*Y;mX~K1?%i1Y=H?RdBWOEIAE=E_LR=*JVZs<) zj<=1VptaUiaL#XEQdG+~*>SPh{#X3vFXzw5>-&x;8XXm)X0|`zYFGHIK4$ z@0z+Z(b_jex{j!j`A`mXSKu!>K4Jq}z3#+g-%n0_H<6z1>BqRZwd7*o#afczaC2X{ z-H(VjGrgD38h}f0k@cpzNk-~lWD0pR3AZ|hvB8cLR)@5^@p&SHzwpLh?kV7Fn8@t} zK61lk!nrqlIj=rJ@?tLpg+1go$BdQL-UV+clA9^)Z5oj=-*T#HMQZok-Jc|NXBV;C zOGKQ#0WD6Fq0^+yziG2q^g|(ASqb%mvt2P7b?yq_15^@XXGq%msGEm6I!0Pr2J32C z&Q(;OJ~7tWhR6ZCW7Fz;ayw}y%Y2IMgU^x50%wcZJ>G`aZua4YIP28EhN#X_~P%NN#MkotB9>cgv;4vF>_^A2s zW66`Mtz>VkIjKAHrI(yKl+&>ZUw3^W#kagJ;-kW+%l8iKiAVk@0T z{MXvqWXgdR_-j}8ejTr*_Zz4AbwWG4BCmk2;I9U08ezzYxDf|bfS4~0F%5+cRh{5j z70c-5B-I@nx0wQ(_NtCr^J_|DS{)mu85T&A8GM6bYQE?vxxVGRmgjm`7JZj(0F0rP zC(}-=SjOTu-anY=W;MGCk`7mWGIH^Ki0Hw}KM$RK%YW`K9Lb3Nm2W{oLqrdjzfC$! z6&MOSaP^aL%@^V8e~Z)}8bgA0>8tL_uR6;9MoSS+lE|x?N=`JCoFM>T_IMn@UsGox zfd-vh%D!kg^GW^D_iMgLVtgfo(uBW)E^4$LZz$5yLV#?=*Igbw&( z5Qn`gQJA-5%bJZ@&pf^B;DM6L(-X7Ff8JfbKPeQbl!MiS-DON7e07e9mj8P9s-T0H zzc>#o+csyCXv?HBpIlv#Nfk}HBS-mh#U_UZ(b$9Z1QyT?KWZ$w7;7H@e+7bvP!-B1 zS@Bos#LU{l@Rx+HSte@%^kpV~dCxJNXKF3xQ z2=@Hd@dF8=?Wg(=8RBp3?60mNPiIuqt#E|8Fk!DAP$-)gHz`1BqqbYHJaMH$#;Tj>G3h4^&I+d(F4Dq@>%V_v=ObD4|xR)8NJb-QS>-@F8E49P5oggnJij0+aQ??m@l@D~Wy0E<1GUGYpF zBnRiohTZ(EmuYG{j00cEh(S_9DC z+idJKfZILfzuwkyt{MxD6#56nQ?Q9}`L*-$klrO%gEl>wRhjiV6o`|6`;VYlQKVtzMsy!C0 zEgigilBo)9B}bZzj}d?`A!ddW#Z6@;R{3qNC~qt)tuHxKd*)Q#sV}RKeSZDu$Jf4m zzv|16E`It>N5$!ILlw>&zKdmT<=ASSA8qPL`UYvIVoL*<+KXeUZU$;Sv{klw7-vo< zQsKxE$2N^fhx_l%Cx3h}{o$YYZCaJJXWN>6+twH5ttrf1hZ@iVAl{R$(q-oSntG5f zVB~@VxLdGv!>G;N&W(|FN1ZCN7ys4V?R#>zVZCb^u9qoma_u~|a#Z}EyE_m2tQT=@ z-B++Le|`RjXV!f3)`!Te{`1kdXpeDxQYlWtaMpd|RxG>@mvtrQlg>w8k|3=i>ar~L zL|fQxHGsFYaKbH5DC^b=m3w*5i9ARpzn_`vyl}4ebZNY^hnJc?kGWc45(??5wjf`pd62x52wr771vCIWd0j= zZjvzgaQRY98Xr#?&?h|BBY|A}NycpR7xJa?`5f)dVwugnYrI*uDRQzp=PWH(Fa&fq zEFLt2u-Kr@keUu(}PP)jOg`v=>)@RzXc+sf~uxi&j%9hnr^ znVFF7jzwYDzfLE=o1Xk(YI12b*mm}EeQ{;YsdMe;uMgLEvBRfjM%ZRwK&E^S&|3ks zC-pD-!7*E~{KzfBv=@y{laYycW&8%=O1-JMTN;bV*R@yIIayJ$_1v2cRZ$(^PAyqj z2AZkB@+C>J*Ub9_Ivf6GAXvXthpW__2(*lWarldqm9+P+jGOce8``eK-^7wsc$14J zyW)r$W+v=G6xt-ejQvt!+mtZv+;|DkMa`UNm>(gvfH^CY3Ja~%8Pdb4Sh^)Aq+qpS^$1|(fXFc`nH~#e2Y&5w%b%VuFw79f~gc8g%A{dxR82+k( z24Z6YT&`@e%LzLxVs4b9h>bPm5qpBcH2<0ivGkdaoeoYpJLtfqVKK*BK>%8i-R>E$ zw^y2s+^^HhvY|5{pA|4IXzQ|OtouXazD3xpWcg+WJz26WEZ;Qz1$GsD!QZecmN^O# ztIviSH3uy(w*2==VrSI0nA6@4N@bWSUseC@iz|mduRC#csQOZ{zA9LMmB9Zw+;p8L6f6p* zof(Ef)#ES&7Pb^9+6wa8R4jK?N6J)aG#w5tOvjcN($_A0x?xq;woO@kw>`hNz-0Mr zcIU2k{c$;K3bR*B+b{m)4u3c8&c|&Mb=Ky+TMG!rG3?pOTm~ulQcoesdG~Cg5otSt z`RbnR7ZFrT=$sLyxOv(pX<5o={&lu6?b9c3(LZR{XUj$Vx)Gu)A!?vM0co2kpCT#aWQuYOD;Wc`S&;NIai)HP)kb+$_eU zxH~RINAIUdf#aAz<5C;*cvl(*i(Sw&HRY ztHdet-bv3&h_pt9tXxE$Isjt1xMhEecd{{7{RaIJXhl#SnxBkpK7GT9j=^w~Z?vH^ zP{(YJ&gp?+%yq?IHy^L08^HzPoLdtLR}JS4|w zosE^4MoQ84c6#0#f5bq;;jd|;6n2x#w}Yp5)E)>lvW@KM zxm@2+d?C>}d?(5#>INevcq3xp*jEO-5>iJ+EFIFT3GaY(cXPK z^Yb>XU9~Ff<(HptuBlFsMHZ52CQp%$;S?ch?~|FYA+H!>1f+r2AXtw&4NyC9J(eQ_ z!A^T@doZVh4$j^PQWT!!$ZP^)J^%_&Ed*wXo548=%aIk|4SyBfosJqZS;MKhz~5V= z7V7-0dM&Fao2RsxLWjmN7#ExI;i!b-sYBHfI=}QmIcke8$*+!|=12g_bgl9&@R~c^ z3(1rY+Z1yN)GhJdwtzkFE^Bz_Eue=81R!Ckbr}{5{6T*^H;FdT83R-tXZ(3 zJYX%MS}ZJ}6FptAcB;Ty8KL+s9@ zOKvT>Fd^m}d3Zsu*^|F|PyTQB7OWz6Y<_ynhOAd!%6jAXS^M^8ZF^xAa%j5U#mvnq zuf!4Brj6KyO{)T{#LM}=ncJJGu;8ort7KZtgfWZD*}CoNtfE!FuK3%Q+2Ha2_lN)d zarzDwL+(S8DGB8;SK_ZRkj8f2wz;dan9Fjls9^}J*rAVSQeaLj5jJl|;Fw|)Nf3o6 zBu4k4ALB9DiH4F2@?Tzap&eA(%FH-)iPV3!1g!Y(cKD()Wr>qjP9fONrJSeqpcM7A zE^I7fW>u7v^z7YcC|c3F;&GFCMtl6XENcTp^Q@}+0ic_dr}$Na%5X&@1td@OA*#sS!Ok)E^pWL zvS3Mv4c}K@pGQ`dgPj+L?hwqY%BE46;f4^=mND^1QYrX0HThk7>Zi%cd!bMeBZ!Kt z*N&CcoGfoBJJ(lTkDp$sy)V)+guK~uUgMq2NfHldwQ-`c~SNfV7j1>GUxok zQn|)XIW@DCo>)xLfF|ecvNUAUE{N$RyiSd5kC6ESg?3Kwt>(ODprNF6;?A9MpxH2b zSsHac6kbk?&y`h;`6K(r=U^>1eZA9Y>gx6&(+>7JMRYtg+xT7e1cQHYT zn-;e*Pq1zyuh&>K+m@LEi1sZsSLUK=_{)(mO$*JtU@wV`tjdk>l zwsbP7u=8A1-Klc!;l)to!MJzZyFsOLciAR~qs)46Gm@qrOx^z1ogd3TKUK7K$HDD; zcjOf0Y~1qs$6tg;B0t{!mdR7Lewve&2;`W`t(mXOe^+>0!FWrhNo%`amMX@xbWt(s z&k)nHW6NY6J}SFz+_fRNoeO;}h8x02gUiH1=!S%9(lYpKiL)FrDGg1t;%VlqHDc}4 zq)&u~P0N2h1(zzj!cqE;Ga##}uJra=hMNk2wl2d3kx4uag}wY&(0L{DS`itN$B1i7 zgfsPDQeg0RyuUxu*Vli&x~a0t?i1%pJ}eG}2g!V$U!`eRTrGG7{({Kh zF9KIFvKSal_oHlS4Y$;V8qWvo&ql7F7_B-QzWT-J<qi9TDwZu-5O+ zS!a_MUfrBq`25-rc5WZOe4&5n!lz%n^qair3F^YqnnVh$(}4kE&*5zorntg`xKMYLleJplen zhZjsf`7fVY(_cBZQ~Q67f0ot{1z9<2jW45U6~hG8vAFfnNo6GMHT(sForAfCzoa1qU)tmR0a|jRXErTidmCk*!9*4%!h=f@k_jX!FJYL= z5rV>=2^xa7hTQ_Y8uF!PyP18IoR|_%{YDISy+(*a&!KuBg-DtU=;o)GLed zG+}jKE%j0Wm8pgH+g)K7S)Ig;5hKElDE_)E)qI$B-mX#5mV0>1FW>J`bk%a<)jwxdoG%z(H6sB$m2Czcd^p~vTB#Dg|2$!#nnh9&fN z6`zev=Bos?)PfCUIa;XBT)1UkYDa+3imp|74}TXZf*bydOiR*Lb8Vt-jz1@QQQAVK zm=g2(Bb@`GCLbMMzKhi@Wfy#xt}KPezD`XsSect`QQf(X1IgTGroz&jYVw=e2Y~WB zum5e+Z+7n7y8ptN8WIpV2b=j0@;Vzyvug%$wdQL_ku;02${KP_;rw5;RVym69`a7! zq7_$zM+9)Ch5v~@BbQSLrX&|u{AJsgVXzH%(3)eeos=o_-IY}7Pb_NMBG6GE>}&{iw-IAK zzIcBh%}|M+j%Y`HsHJAK>3Y1gndDdTSA0uS{v@Ogr4vGdSZU`#awYP569$9n(ZJ03 z(0BKyJL)fG)Ap8?wQH+PPmnMVXVpCK%wL-cLt*RT#SVHW?rhIs(YC6-s^Y)@^6>xs z>$|!5$kp=GZ@g5<)e6?HH78YLo5AA!H2jK97H%G5qD5Lw^ZV`s4SI*^8n*eudUR93 z)CHZEC;%Ev&tCJ~`rkhL;oEQac6WUD=)w1M3*e(U?pjULeCmwyax1)Pb#%y}>kMVh z*Hn(he`QVwv#mjNxVAt!w<2rm)IJW!C8}Bm%)OUdxHmygBm!5Hu(EDLPQqK>WL{os zW-&E=D=A&WnOxXj2YjOF5Ja@8Q@k;M2u{KI42&2f4JjS#9E`Q~kJPsJU8(Q6R0~Ia zH#+qwHUCX|;p_AbI&1EYClOB5btOUiAB!_YPwY4V+_6k{JP!?MIgW!%&0&p^u-De{ z zJ)oYGJm>kXsrW-$DAvUCq+d?Pt=SLXVOn^5b7ao65+q}z989R%c+ zP=?&qqjj><<8356Nv~B3PzHPjf0gse98Kz`^nx%#=3FYKa0v#E$m9`%=NM`;rh%}I zVD>FC3M!lsn;^tLLqYN&AUM`BGTPM7h_|kbHT7po7}h`*j4Pc@O5(1QP)^JW%62dD zAb`dD{^N~%EtOY3IPm*;Z~wP97Um-noQh`x5yt$f9hpcdMbKrTnW?eIhNm9t3E%4; z@l;rRX+N7e0^Ig|dnTE(e9IX`i>roGRg5VFE;E`gqGYyL^tF!uted+%8cy|*_c(CEZa6?t3 zy#aQera6$;LctdZmZ74AQ$Yvfx|wknltSqTB9o!mH@EIgh2#4RcCO0$)%JC3>D9$* zvT*AQh1q1d8;WufKMP7?g}N(GVkQ3at-N*5FrD!8_uiUMCGXAOn4xKIX72mDw?q9s zpS-tYdv@05=d-99+jaKmt&?EjNRHTKq2@B@8NhuPUaaauvoDYOG9gnqEe>qkNX6M) zqZv>FKo@R(reJ;6-rO~pPJeN4a`yY1w^0WP@LA>2X?28&GChW^|CaoEX|kYzEW#fC z@+DzRwZ~U$YGzrL?%1IXF?fUut}DCHdj5K>dj!)SZdBv;QNCg1Yxuj6ND!4T=l7}d%FSt2_XDyCNhlK!C>VKLKMdLhTeGR`_fjhWOb zGwgZP9+Np%wXeTMqxbU?RM@N(s3r>OY52LyX;N?839BO@5Eli!(ZX3 z!Czt}+iV%GCZ__v7S?Jc*HMneQ&7vz<{_`0$gRbL-qUD9R{ULYv>em z&Xr5DV>?%Ug7r0LrX<^|q4i(D_c8to1Z!W|&9MclzzgA<^Pz?L@C}0OPxu68q|(hX z5Mk$&=n9Ut`4KY=UvKNbSmir=8IP>FzTtAUc1a@7(IyX>?xr0%rVTw7qmV*tQEhpv40@wj@BOv)P6Bi`_=IEFa1@=2d^IOyIM9- zRncE{b)fp%P+eoFwIkBrHQLhdZ)zHDZXRiEBtmU160IE)ExvGTPq?j<2)4BlqwTGs zjyCl67+FK7$9sDL-wggnnrnuyU!wCU*4~ZGlKxXe+vIRuLTl(&K!<0rf-fiLYt0ec zN$`TD`{&!=T|8Ft+pPb{{mptDs0cFO!kkTjZw7w_f^*i{`~^FjljQ!bn+u+Q?&U3e zs9gN(&QILuHaC1OweaJ^pYJXIe6FHySKjYmSXH(G=m_8z=+)-|*M?XX8c5XDvUnoX8+Jb9DTq&CO9m2D%;?YIA>lqNU<|*X3#^ zmfHT1K{gX37?@1Xpn;Uul3Cl~o9xOFjN0vM^Xlpf#v+n4@zKV{Kz)6nwtBSwdaAp5ez@o1c<9@S=ywyz?Wk2gM z9Zi>juT+ZJcj>#YwO!4rnIVXq<-=I~+hpo{Y^w+T!&k0#l~vRnJ63h%$d#iynFhb=pExKNuIsN;Ig;Rgp}4YAa?5tCr^!xjiSLIUoE z{xVoxay?~b1*9B>E0c%`yDQk&NtIuLn6bHvk1C^58rdji9yHh$vJ^mm3^r)W=o0@=L-QYee0fNTFG-^(d|F7{MX#h zjaxP^FZ1=XI+ zhq+pr6%R>=iH2^fE~Z6SLNjGgVI`eVN>fYwkDdJ6k;f&`)-2nORzY_7s|bQBSb01%^+Bk+x!!uV(s$)DpCbU& z>bZu$l2D_?QXfm}Y%*U;DL~_quKBNrywrb#06KQNhQAX7d`MfPZC8Q~k^&RqhBB}? z*iag%FA3CDQXLL9)&v@=hwCZkHsT6MdP{H;Xlf>cB)p`$d}}<=a_aXl{t8Pg<}aArm3h8dCi~KkjeEDbZYF`|f)`A+#n?@P^08Eq z@xY|TNi8d#Uu8bMPWdlwFD%%&b^ZGFg@uKOzxr$<9s6(h?l6JQ^t85avtkR3s;rGD{krLv3+c&$XI(cQ1D=HxiSYF3 zq*eXsF0nPoYG)aQCD}3l8EleO*BD4$f$fpZ%BCkQbS^EjH952TvF@1)%Y3BmI__$o zI@*I^X$!LMOSUEExx^l6CQPMLCUQF08owXP2-Tv>40^`8yV+WV+q&uCBUt4ItD5_- zHiR0xlHGynfiX-o9;I)QaokIYCntimWyIlQ;ag)NLQE>+aZE3Hkqsp?xNOkO(J>V( z_zK4pdTKgbf;f8li)1l|zxEz+g-nl>4k(quqfr!iDf1J8fH0fjTKd3-gMbYsW%$cTPfhC zZWD{i+sL9;X1E5C2EM?t+lH;kYAxq|jP=TF8)`457iDlErD6RQ9MNA<%ry@UV`D7) zHaN~qS>nX%(O%}R*3+6<1JEl{X1u(_<%8o&hFRN$QDiMyAf-0^wfq-;UZqpc6w3+N zD28el3*OkWtcs5)&i^gV>s;d8g=ETG~0EZ28Pf zn*td@h*J;^ze7r85BkT4hX|XX#ITISu*@hSSeL{d3%DQj6BK_f#E7GBV7#X{-tCKZ z;I)eaxelluZLXGRx-REUbws$inP5n(M3c{Aw6QycezLhc*yj|~@5~Cfxq0YW= zSFc8YxT`6k^k}agXy4u z=Y}=;t5;#fVajJi*!_&UF4zFK{F0PKD1U3A8fQ^EekCvG zU~cxooP1(m_O|`GOk{s?XZF@#XJx%{a8EkW^{=lNe!P*oH$KWJVM6T(8Ij{PtH#ov zSFk~_>7T8`MT~__`E0oCvMMcB!t07=6`TF!?96cR+KJQTzxK-W;)}glFqHvcmK{k> z5nLJ1tV$d61GLrER$a$58vc%3wkss0>8uQVZ5_51&#thTp1Ea&b~>Y&)$SzCuZ8H= zu%hryW>_Sq7UGkh>$M7TH6MhggQP;3F&7A7Xh0QgGB85>9{bxwKL8|==<1Jk^u{}U zv@9`km{pFMK6R(h)fJcYRaGMkobnIcPbMCu#vdlr_Y=wciNxI)c9bEBXaq!N_>=^@ z9dS*Otr5aD?K6sJj_ioot&!>~Ol$tT0);h+cK&9@UGRfuI;~e=J}*UTkQZYXnZ$wd zHG!1w>-6*^dTB?;gAKmMlFLmcHP=sErf;{g>`KqIs=?a2p~m{CuZuV$#}{tATYGY!Wux1I`S*r+DLbmFzS~w_Hawz2=Nb79D@xcLtHAm2v;f8)! zM|L%3Mk9GwgY=H~4FJACx$$+e?oPBBW1a2c_SSG)I}vFopKTAfwh@$Z^`r-4In>fK z+Uy{hA}@2>UBcVe8)@qecl1R%6?VJ&5%>|YPCqf$8H{#~#(WWpK4x}D`Ip)LP4)*z zo4SJa$iVs#BuL8d4T=(mStv3WLiMv_%*faRaR;tBt*exliP?=G4JB1__yz zv^0^*#GQ#Wv5b-OlR;ODG`6IZw^HL2peaKG#FC5Y3?Qq6<7a}SJ63-4qoLW+(V1Wn z6lQStw10FG+f}HZp5Y`Y+&zSwC7o9Bj;?51$5>knEpN1+(}doC`JC^3dC!H4-iwv} zmn%o>u1@%x=LS1(kNUq(M7|kM5^(jXsco*}aymuuFEBBX^ia#AVv9kBE+qq5cUEu`=+vhBTA4NV_VmhgX{A z1DRS{gf#O zGDj|(IpP3ymw`SQ;qY*u+Qa%kM1<(P@g zbR?q%q(pAJ7j2QY)*%@jJmwTxv*DR(vF4bV^rz^mOZG*_yGM~@MOp@#cs_i+6%Aoq zStXMiW(P-?LJ80l5X|5&7zBiYUxvS|RJ@7O&ZFR~5UlyB^0y9)wbRy6(jYj~`3oqM z+gf7Fx8$-SqcY1W`z?}FTW>N)SZ>!HEH;&nPc{ymr*XoS<>FLq@vhNHD?(*^$!`YP z`9(4s$s1FszZA6?g6f3n$5!U}0@m_;@wfE!SK8sLvjyRSP14NKlvgIc$}rUezB-x6 zn*uy4bQ$@wwejjvlIRt430-14G?*9we~0L{zzB%Mm!rw5-lvq3k+42@p9#-L}1jFhmA_C7ODP zNK5}%>p-l7%%Y#UMbXYKx{4%x1JQ1%=0O4{XW^W=GSNeME@;d=nYhdu?Dj!xk2W{O zyE@~2L%b%2zm8kbX-B4BgBO+1yf_^Q5);8uVrta?!xH{-f!7PO*JNcCWv{ZTE_&u& zxtKg-Dao88R9G^oh+ZMnToq!3CF70q$bz?T%-;0en$w>heK30)O^CB-gWCy9AKWwp zWG8R@^6jmQWnX2l{%y{hRb;qAAm^?j_T{buc@N~QCH7~pAwc2%dFv14Z;;4?*~WC5 z1|`OUiGw9$N{R|(!~?-*fUqaOlE1;?)q;($Y~RGs?tNkP_TN7J*1iqFw#xtW!?!=( zg8RZzK@1@Y>sV1zR0kUBq(Z^paGGG!91XuZ=+i=!^!MEf`yY*mf0!8kJ{|lf75;HL{`ZO0H}UAV@$sK0r~h-|&OfJb{ycH( zA5%-eOf3DJUi|0e(oe~SAI1_tC({3%i2jm_{E`m-V`B90Q{i7`qd(1#|9vL$U3&as zeC$qObh^uzZt0BIw?ylkhp$xo&R^=Nz?|v=jfZ`;wSx@}KyAp^7w#Dh`36GWejNzN zOu%7w)C3!BG6LJO8aqVav@pz4B9!$|B7&YO1*(cQhYDbnCZ)q$;D$-Tpet zCD(nIE)8D0Ht+Y}ji#3)NRl3Fl`S81HD3nvg~hl-WX5JKwW4yWa4Y}{_*yO4K=X+_ z*Jaa|9ZUAQGB390JB(J|du8vEcQuE_d}&w-r!D%iD76()f&r!;@+!Ei?#uMdHmPAg z80SaKFPc9<@mjB`_buvf4TN6a+yw2e1rY>!hZ#Ra1PQCX0)m$C3Z&X3Um4CjGQn{9 z5eD_z#00@#GC>I-m>Fr_lwlDg?L60@c>WU^pVw z+C@ZKd&b&&nXW}dJ31*+!;=H4tZe+LQQzZ;`NUd>%5S1ys^0W9)BHQ_ALgg1Yg1O{ z2ce_cB-jqbcmS&cyIQb^zifVhX^GJARCr`@YHWEn{^du1T=R5R!G`D1Rv~W|lMQOV z$gZ@*mlMKt%jSPo$gJvub%onDZOdN!+*4V9c(o`M_CL5aGdmWaiNq)=666SAF?ZU} zOApw|{BFnIb@Vj7pdg;J0l~94SZ1Ste&ImQhJ)y-a@I(Y2}zny_mi3^r3f?!LEPa znf+lQbt@jZ8H0;X5H>a52uwHK)y)WwLs~A!vD!cpB zIZ6L49p&Tw+(^<}r-pT~?&-PMXU5+wk-IG6;hS12C}xkNt#3AV-u0Qspty9 zhQHD!DoM05&m8S)?wYm;>o_57G?tS|!(T&WD#cPPX22^8zSZgUZ*!?xl#jVMP1(|+ z`b_Hh1hUITu$1T;R4Fz0c2}UGt@~1S$EC*B z^L1Sp8(S-{*PpJCICZl6=#i=;U)G*Ddi~U~u8U^}YN~LX>8SxFoBVYQjA%zxu?j>;BF$~>yGyJM6qt*V?_^9Z@*CM11uEsfntwA%nXMZnrywFD<|F9 zYUXG_ucZj>w5iBE{Ixz}yqa$~2NJFG@{Wr(DM|O)ggK=c@>)H@@Rz{~czy5)bcfvo z+yLTvrL#FK_q0bc$IOE~Uw~Lu1Rir@Hj*Y7Fm<$~s2SQha{~BE*d!x2y;nAP_N(of zk=AEUGT)U)l5E|@D>W0A3mcQ{A+Ke=^b-NT(p7}2inukln2CkxO(IkI&1Atqa|VL( z5hW}aHYW{{V~tJ$!4T2XGSeR-J&F2IWc9-|j?_2zSJkoy>$z|_*3!8Y99s@0mVz;B z(jluQ3kH9|T+bzPMoc4JPP)=n|CStdRhgYAE8?qM0R!(uHHCr1Vo{{AG+R|CC> zpHV_u-#xxJ8YuCCV!GZXt(3Si0KQ`#eDK@REfWZs!F`bTiDMg-cpoXb;5CVPM?Vc9 zEMMg5gw>%~D{Q%7#V~%%UZrV&-^75=Uw5Io0>7+_)S%gQ@GUklI({)sCpB0A+8XoB zf2vAXrK5E@O$CGV(*_;3CqWouw zJ?Nsht>3?c8HK2@q#r8Nqa>)UiL=#?5k>1X;p|;oHZaYE5o7JwFa7J=yFV?n;LwQ^ zUWi4+ZZRH1dm0@|Y;jf{j=PT$MMRk>yHP(>YR&2|W zZ9Nx|6%u=zKqN-skxSegr;S(eS0GsNSBOeZ%v8Y!9+AQ6G$r}qkHs{`=#QACWhof4 zm=26eQcQu^KV*AdYBVS@Ci+KdC^o)Y#7n+D)Da?(psxqrzORS76e-t8W1GLR17sX- z>FTa-XsohlrN*+0^(E))PM2OkSzLXhtm=4a%}I%-^2_xl7phK_cU-9H zx=`179*s>)-__Qkn*Wcj_xy6}y0U$tKrype#bV{aoJq;{wcGL9lI?crc4%3SvL(xs ztYAgv%*q)k0ENmq2L_RpXgSDsci%DY`|!TJKk5C}ntPuD~*ZO41lMApCy6Uj&y*Z&P z$M%!|YKOMpo1@hIZ6p>!8YI%skXz_plB13;%`$kB(pyt%7fYzwsNm z-h33aUVI;TmQj8wCUq`WQaR4L{yEmEc}cp1a9rOg<<$d>I-ljt*vScv&s2Cr0J}5& zv&1ZjT$GfK>S*(k7CJk977@1YX#p`@7|achngK@m%Gnghn_V5 z;jJsLzD$GXN+!LW?psOc!D3}%#chl6m!mlfo3B)YcL|vleNp{p6lWeE$6y~8|W-m$Y(%Naw`w{S+&kaC|acg(>2~h5hsJrFAm1Wv7KcadsO zUqS8Q1((a$Va--tzKA#VWeUxf;-*>e@>O~PdM{t?xpbxH(zWc3R*vB62o|fBbRsN4 z#C&1E*YOt!9%$+(Y!pI^$xEz0MN!EG|7@-8t~W=Tosh4Qm)@+&lqdpUJh-fXP4!0s zhpL&{MockKu*M?FyMt3s60KCNYIS6(*5$3LXhdF(fTUM~UTNV$%@?7L*kJS{;|$`b*62PYQR!`_#^ ztJ?CXmKn*GS7@)+JK19%AQ)?y^M-qms0y5HVZ_NkJH^PWef?L9pSbl&!Tsc-r_jHd z3QiuC-pxvX3W^0Tq$p~r&0+q~@jlA-*IM8B{qVKRq@(A$x@LPa^L?;I(=ZpKFqLKN z9PHyd7IaX$PqGEb0_6D=LgDXJCQYb^R~`MRP`UO>cf?;~*(&uGXGa4FhGMSu*cu>W z&g{lb8h$a@j-iwJILXRMp2mI<0~Z}rejPem@Y!hUwhn?Dt;FE9=G>)g-S3~lDFrd- zRA-tOV`6D{?iH?aC@0jO&{nsv3#5g=h0d0Nj%Jc?uIcKaraW}Li(tiNaot+N2d;GH zF16#YJ8-4FaLv-o!da6yUqS%Z)OQJ~a|;o=ib#%KyFn3+$TyMGYo@^;eCN6m$#svo zHfsq&%IBG`uCDjq!<^^Bi!YJ?gnHzhRM;vYKO)3PGaC1Q|voj6w#`5kaQPXprV8%!mqNGhD3UWZ1KzwnIDDA{*C9_x~PW{ z$%Arc7TQ`-4{c1;&#mCc~$Es`;NKd*Dcu$mfOSYk?u z_v9+)Ammck0(1u5<_BkzEUqw7^!C=s;?3c8Cl(V{iS~x7E-B@t_9{*qsCplFv^fCpB)*;g^ zTWOg2O1ioRb7@P|uzIv4v)`i3yuQ!zmAEgoqcGogzV;4q*3WAQ8;;IUxH%T6WdJrVdo@Mpqp1ch|wXha$mWbkPT^_Y#M9H&$*asoPrm*7Gm@b@d+T zAT21r0zwss562fjnppbXzy0L!&Z>s3yN)ySsJ4zkIE{RIZ)3&wt&i^BvFYObfB0~5 z3FrCS(+kU3t+!tN<&VGD(6D9a&eYzj4Tl@Ho~%cXMOD{WE$v7I8D@u9bLSAw%2}GG zBmz0~sk)jIb=5ZR!RII`5q)w`9e-ButgP5x{`oI{r6-;JXz|vqNiHLJK3I1uaD%N= zAiw#7=6WN6CN)hvQj|t>XT#!~1H+ijp8w4&c+ZZt^o_UL`A30|Vh6d76qO|05W7ML zJfaYbYomKkCKT10jDo9`T=$Lp#9Le)9slc`$nuK9SsqB-9-g~3G)n-r-0mJ(;W&oc zy0hc8SeOu>Ef;g)@9m+50OEx?)goaoXon*DjG1oDeY8T?k9Q9tS2KNx=f|Q()Tf|& zb3}4!vNgZ2?fI=?Kn)g~Y|>|Xs!n7kIW06Y^bsRy7-+L??Xd7JdrxTFveToRc7AKX zx5h(@q8oPk+=yF-YT%%4@qudgoT^K3pIy4|0Y{h|c5cS1RR);M5BAldpvS!iX9Nf%q*7?Qh=&H;7>%4~(d za+BuS?Y-Nb1Be%A8(dIrWn~u9e2$j5=pEtLM=618AYU`ff4U}?R+O$YXItNW>*6af zHNX8PmV?v1-HW+^zd_gJlZG^q7x-fcuSh6ccqlbBLK?+ToW*vs7e%otkd;FQtT|N7 z6&ps!tdX%)g>F$@Sig~C^w;DCX~XCn@^GhUcfX80-E`Ut-ZI2r;Yph@-L5Vty=LXW z^_qp_GMfTPH6qic{-JuS79?|+aqby&!p27RFKt6l5bYkMsr6fSuMAZ@UBtz>KUBG(#$uQ$`ch*ea_+h_aFUmI=ig}F%QtlzRFsbXO0fvtyuuS_rfHCTPw#a;3! zP|skfc3Fc4RpuaRrk)$LSiWP8MORu#wOO|9N=MeZiaME`qFfxBF0gf=y(`}a^7ast z69|R#ZQaB`M;jf=W8JO89Zkb6mwL~?-Fx9Q7SLRLENsD_VfZ`Y<3em8GD&9Egf8Z^ zI+xrLC%Z;(kF5S<<JN6Mw(Q#)sf1H{6+w$V|cYpo0Q?*rFQ?)x%hiW$; z-?QUX<4%i)T^0awT{+!J)}T~J^N|fyQXH$VRcLC`@%jdf20XHApWIVzv9BIDe&UH# zW8?0h{q`rrvxWcn&mVs7ke5c_5TJ16!sge(u$8rZ;q9OP^xCVh zj<P*i7s={jNVjwiVnp+6100CU9!n+%!%q@km!rWqp8&vV`FDix=IGPVE zkXAf`uOW^!NJ$RaK27I|p$0mt)UJUh0;eUHA3Nn#zSmN_Mp1`?$V!hAp6YYaR$l8v?UIW)pvq+|~G>s&tQ7F?$- z{;rHoVDyfX+@E8{BYOthK{J8Qm^x1EuYpv66=EBP1#7=3nE;gzA+o%V(LUUeb4#Bx zD6IZ#eXN2cE`SVLCGR)blA{JeF{1ZkCu6T$-Z}?iX@BSR;EkrKu5PoAq*mKEMDW%1 zW#l9vnAA+6x4q7I0@1ehmb(9LwbVU`~GlhJ5UcDrh(&yzO^4{k9g>HHWII$vhv~U0?A?+3||Qt#9>xJoBGn<>}5p zh<+zC!xP!e&FRV6;qGsJ`NXcrQ~S%eLrASlx2DRXdJkFa9jUEfIr2pYGd-BQ4_jmj z7rud+v0GzH_d%`ZuD4(K+EaT|6`N9rs!}H#pE$K=V?g16zg3JwQs?iHT70oUVI#aW zC3(vdIp?aA^>y5VEpe)$mUyzU?x{WXPd3!-DcRakl6vNI``XW*|I_m7z3EkJDv_VL z`w3G9rRFRA1$m)<;Igw|8wqrOY-+l*_q8AW2kkbRWk3prRrh2!y?Zuy%z8p?yeY8d z;wzN`i`pt<4cAmvV5P{5Bc^-*N*C=-9|`_)X${QR0T;QBD|Y6k+p&X;O_M_q>}qRt z7|0g?4MkzgLSwm4i1r594GV*?DnKx%l|$g~?vxm>hn}3wPkVunF4vH$4z+T3eGD~I zpZ2TLdGrmil$|PKthgWr@C((vsDI6L=PbJN#56mYUG!x<`Y1X_K8pk4FvSk5cW55E zlM7YExS(0Hq)+;bXp6Hn6yEyIV$eP0Lx@}yM4G~-xp5kA$do!vSQ#|1JmC%>0nn+s zC)-%GwZT9NlQ50oiD8aU4y>ZCMp}iTV#g6Ht=CNW8yzzkln@kFx@FbpFDUB^-X$n3 z?}|*bVvR83p1X z`2Wa%!C!_YK>^qjq~OMTM;Y{HX0Vju+ECc`qs+-(Y(b|->v#J*XZtT+AH3cM!{W-% zg3!{)p^evCjT8W-lT+%p*4kry78Z;yF?eE7ASNwr_zURR zg?e%8oy8G+g}?T%SadcR4Pi&%UwWYL`wNTX7((+D-NeFQ?;5564*{)*>D&nxRQ5(;ZaE>$MS>gbiXBqM&Nx~X_#_tZ|F-^17+`DaT(cl8=X zcC_Uf{@c=FwbYP^?&PXI+}x76cnK9&$2+Ix*$8JLZJ5-fNpy<>hop3y>A1pc?(6^r zO@h_llyAL0!1y6|;ZXl?#9z~S4qQdg9FP}hDkAIA4+>kDpB4SQ*3zqNbE}CnHBKU+ zEtRr}`xsN38#WTtj`UbdOW`Vg8E@kz-F@}~=Rt0qCU|D}yU$h-&sEM@YAX}T^!AUY z)8qa9x929a?M;{`mp+lAxA#yreZJ--NrV)1P5FV!T?F$N9DH}n^${0J#vjy|JXZQ> z>V+SD_s=VLZcohdwKuN`4^%7U_psjm%ly4Fzj=NC_Nu+xwx6i1I#pYGvS#P0+FkS+ zov3ZB*s!^3Q$_o^*1xR%iJz)6gn_fTYI-`CzcVwtIyL&v%P&0rFPrvMqz+f6o@`7# z)v)6;jU`VumcxSa%cA%zr%HUG&7-p3Y@#U&^CAg$)Qfbop_)*o_+;Zwi~T!@ry93Z zZA{f~O`UuB>vzVl{y!fL-Wr+Lj|(=ZgmY9uujB7>|0qFfof_qv&;Gji%rzxKDK&D! zDKBycxbj6E$A-J~tb>0}6Dvw|a z;w6|Ju=P;hH8cmUw_tPTDWsCZC6UV~Vz}m9(rK?jumh6~AriM03iGFj?hx3qA^7Mt zUGuABvm92Rqi~;v*hP*^qPxqim`hN7_5h21HdL>x|Zy0(wE6^3r~4_!^?E_C&uZppmgj5}8U z`6dJg!#8YNdIU;lmPzqW3CR-94qFJDlYj^V!FC^E8Q(@FpJ%#<0b<}-u!s!R#i+(u z9b}CN7xQ2C9hw6ivtqaWd8m(gTa)+g>YE>y1Y!P>_^(jjz&hfuv0xKmxCR3mhC&r; z2j%b@L~?IYZDzgVx+0(%FA!R@gL7)v&IR5K?F&s=jDQ{74_fl!RU5nELb_ZkxY7J zYVzLN!VkamG=00(yEfCw%Os=ywc9OdGOFDP1T!Lmu}9!k2RJauG>+) z<&lbwsULp%^TVyzKUrFMFg;1-7h#4ntFjR`nKbSSVLE`hQ+|k_&H1ZuKKC2kRHzj2 zWsdkuh0*wzL4bNLTs{is3W9y>y(f{Cc#+F4lDZ~`+MlB>7uzJHkkOx9lXY3sx$s6kC2ubN_Ir-lnp1CzNonWX{ia}B=hYe_CTK3y)1i<3GBRThs~JqfIBCPQ zLJ-!&zR^|61qhYMP;)18xj3zlk%LR+fvpHh9lLm-vyx7O71WPNo z@8Ct+>um0!CCX$}CP(Qf)_~{EgZ>WE{s!E>l z+L{Eb)5j}>wqI&w%zm{FwZE<1yZYvvjoY`^Y<-+OEq%WGE4S{i-eyr#LdbK2fky|b zY|V(CUU*EX2JuV4Lea^RB{q`X~oncmrSPtfWk*LTIf z7hG%!dS_}JTj|SZUw`JSPd&0BwPnX6hw3ViHr531=z83-Y=WU}-8&jw#xmLq#N|)b z?@~a(Pt`)egHwli^k?dMCEGsRPzASxH{)0_$G-LCF(%ZXDyjKG)uw%i!ZI3?eI^WJm8yuSd;6z!nf3c(K`nba635>l28)1gx5DP#TNCx<)Htu4Azau51>e zf4H{#3Ja}JL9m3$&Cx~suhdyFbCQ52JK(0?MR*M~mkmoX%umxC5 zXy~1S?{;#AW|3#l56%(5x8ZEm1^|%}bB$25j3gq(4)W>sd_s0{zJFo9e`zVds)0x9 z>J{@%wt`?^B-qnQ!Ik`1JKQH0f`!G&pL|Im!ZeH7q%E!AKrYa5qI*QGG*%A2k-;V_ zbGftkLR-(J8#JNQ)YgB!2}L~@`I#OHzLsI;OK2%9qZP)5X(uEot`S0EuF4qK{M$o5J}9Z4~YD1i;VAWLIRKiIz?x z;;$(}qpz2BJ!8W=-P6C2?(IJP-W$*U_^qG*r0d*eun(&xs=6+(G6R8P@D={*U*tuh zz})%n3}VjwSDrjv@>mM14PXmzjs4X-_E+unC^P2kZdc$kvwDx#ZaY%5^=QrJW3^jO z)IPDlEVX+>>ev7Nxd)45A54ceVX^1NN+I`PcIxU||53l)^16q2Z!k;AD!iRb@V1H_ zyF2%lZ#uds_5G(Sdal3n!F>0vsd*fUh2zo(2IY)rvnR4+%j1iG`rxCv(W$pz`TXaO zrK+(C+5N=vsvS?(l|8ku0)~q*rh%{bB}wxv{C%>%#AExD_0*tuoT%A`(IqJSnFfPq z{>6=_>b4Nat2gf3ncDqm>YGm;>O6O5d2rx^i3JF+e#OOH4E$yC!pcAa>wbbPhw1_> zDbbcs_1I7q{eA@jP(O2?ER1^Cj8tWSV@1!dTH$KRVJ${12FK_#ir0T*slr^}(Vu3* z0QgEhh5p$=7N{kr??lX%wwWyvA8Tk5YQ)lgjhKtF!kU{b+`+>y$e^mk)d{f1o?=HI z=h1GEiz_nGhvd^BUU%3yIQUCg>Amd;5Ld(t?11;-0-a>=zTaNtT( zyM?N>)uxk|VLRUcl`MqpPrV-B3WY`Yz%K|RIBVE9OadWl$RpGyu$zFd-OY3O$r(1t zj=e%)oUl^2P#{pI+VsXFGYhzFj2P{O6hnvgtq7mBR}lE5{9B>#hU2o3SMzz@kpsNu zD-)Nmev&kAAZW|hI87SPyV$#CkLOMYEE?sw9!b|Ckm$-nBG8gU zLK@3zmd*+tYDyOeK@@Nnlv>I{JL|NOzSe@5vFOP=38i)ntXpt#h|T6oZd*dzvfM=; zS;E$0{MGaaDJYCAhs5|`#8%f|Is4X1K;RqJ13uqDOj-XOZEqcEYZ>ln8ES71j>_$Q zK(LVr?~#Z63V9)YqpkSrSm>4ne8FFIFBagYH6xg0+6)*GMZ1EFdy*@rQI{ZNY`#^# zuU-ZLDEWUHb+OGsw9GBm0pa~e7*^0q3})G@GKy{F*BfscXxOW2ayWwf?O5{J_%Hfu zn^{KZ5enzbWdv8PfV=o__IiiIu6iE(u9?wFD%1Dx6q-P;J_Q!=g59a5a-+lK#zPPT%0F_qHMCa#t{hZTj* zJ9<65g9&R&H<~U}8JbEocd(RoT`3{o>Cfz`s@eR=?k67k{m-AhKR3NJ#26j3vI#j- z*F`hejB$Gl>47V+yxIQFdHl-wrdjh#ch`7t&v-hE1E*%sL6(yoELZ!4V}h*?uK~Y| zdIWP_yT;1Jm-)g(KsvA0hU>Xs)nhK&O1;8pK_q%AhuWpvLrXV@mXi>JzBy`2t+i3L z5^H{ew(2!r&vhqw>IaW-KWM@&77uKb5P=fDXeAI?u#tV?osJg!nkOLIAJQwSNI~EQ-=J%yYmJq9Mfub`HRCy}c$Q_}5 zoo#VX15=h6!r~${)R}*HYv)Rj4Rv`R$g7NTSo3&hL;Dm9(94B0BfO4VPit6}frwv^ zL>0dT-T_Q1Pn*4l&qY$-kSDPhl);868#Fs~3We=0N5|>Nf_()jbyg0g6Msr}B}jGo zlb@Ml{8baQrWa{TmlND}A-^`CgW(#sMj-aqA!;!sD=?WHpsQB{kL+%`|H8m35wJL} zwfx~;i!XAjR2^A0<&e#GH_L#(KCi)2-}x}JgW(;r)DQ%ZbPZF;g9Q&Z({*m+gSk-m zUutn%ezr)15bf9u4%jP1*a{+E4PhHVkWmgnm*xNUib>2HOFHG=l~MlWm}@EJkTLeU z@Ki6hT~o-I&2G#6Uc_RGGu27Hnx_kD2YFK~Q|;AZsj4KKIH zmVK{N)oTXRe&H>dra?>iMNvgP{yB{pd%pJo{|x z`)3&akmJR$Ht5P(+OvC@umv7&<1zX^jt23UwA+c|uEXkB?3iI^O)7@C19R>-W|)&{oV8 z;3&ajh#}t57C3l>_TdIBrVW1qUj?Sqq^zx2>~&<#bU3tU&(1AdHaxcF-M7!)U%ojr zIO;8~*5&HK4yMq4AKKr!K5(^{;MMhB?I#MWj}T>1za{1h_+f>#ub8wV!V!IIpD96M374_(jaUiQfy<{1ZO6k z9NV!fd1R*3f>*X7Kj1Jb|1NCKFTVgU;hrt#W9-cgUSIl6$OI_}f!9Qz<*=vmm(l&9d8 z{4@TVn#;9eCJ&2pWrhM1X6bBlFH`}cS(r6a0-fQEfykDEwt0Wj8-uyF z{5oK`#-q&|+QJ)@iu;A<%}Tm|VJ_o4**U4OR120bhbCvy8i^chqHl!m4@A+pBH-r4 z6ge)!_y!}5CLP^(k&JXl=b5Iiv&}TBSVfqII3utoBvu#jVtQDy(3ea@__Wh5eqG14J57wlRi}yM_4+09HUu z`9-&8EQ{;Y)0E?-7ue;{xMu)-mW4(>4JpQTiZL#vg~>t}T7dhEH5s*M20Ho*YA&W9 zxuqA##_56>YND+h!z&xO@UZgA^;ZBfEx_``yJiMH>OQ8*9LzMDdJ|U-@E6Z;o)ira z&l{&cxr>P0w={9Mwd?Y0Z~f|fKf3nTY2G*wF@aV`zdC$T+7lti(lg?4JZP;(z0Bgt zkM#9nxUn`fd2@E^hu{3)J045z$8Dpg%4E;g)0VPgazl*uCq}BaeUYz~R47&i!R{3eh#P_V|gfudm44+#X%{aB;2m+~udg^ySjc zTS~X@tgo&+$fP7j1~*m{LSg7Cfh?KfhE?0P@RFYYx8Du6PoPoZcbd5W*-mVK^EITk zvELjY``hhXeeE}X{Jn2hmX>UO?2$cax*BQ&{xRj^@ZPEt+(@4O!c%M0qYq~0ZH#O{UVwPiCA*BQ%|Y|APkHy( zue83`l)Ii`A|yX&^jBp2rR4^HNu@%5Lkb#Uuy-DDA$7R1Osw*~IP)7H^vrX(_Bzi} z+Ee_I+l{t7+>69zS&S^yd67fgMcYfm!^>+Hh5}YH9u7e4pbS8)mo<}%yxkZu*HqK| zi4+_!Vj}Qm2ME0?y-r?|?-)XXxk>i>&InE*fyL8|tT3PtN3ilS348_1454sLv97<( zc&_Z4FMzg_5Dmm&*+~dVm4i((%p6wiRT`&I=yarx_qXTLDk8k2VM9haW{?w>Zxtve zc5f%#62gX+_CULHpt_&h5%XOh#ZM-NYub~$*>Xsqj;2pW@a4!_SqZyWN0+74a{6qL zs!)%u_5guli0+-Vp>{LHvKtm~+YVurRmQV&;_fhfJ?rdWkCSAU0>plkx-{?Mng{BosmiQs#{5F%xAcW>3fJK0kz>;Zc-?ni?AovJ+CipA3)N8=p z668f1ZR%)OYLOEr)vSzhz+6}OEB_pswONbkdK)s#JtH&S(fKNk4hN~#5XEbS?irYo z^P}WA>_AHiD!Il_DgIi>*{KfOVjhLqJ>Aq4zw$_zJUkg^Y6&j^3bF0!eK zDKLcG=A8~Rue^8QYL7a6!Cwt9av(;D-3|U4k9JodJr_MkXtuR}FmW1A!tFiQSlZe= z)O7vo>#x4?(`RqIbDAYYp^dA3PFwntfMD_8Q8Whr)#BSl&e^_TX{;u*gPESmq5R#O zqwl`?i;8WJ?An~#S5|VMycF=Y4&SP6hbp!luG)B{dXvx=I((qYioXOkV&Zst)#07x zjT?9U>X{$XSoHDC9rCLVG(HMM>nqQ1nDDQAWHvu~b7Fp?zvsnge{i5aRkk(NxO>Zy zhFzLoB>t;j;8QibEh=~K*}Snl_4seT^@ES6Z!$7Fq5b(>ZZ4Z$%J;7h=26a{eerjF=UUA; zu_-swS_tM>eP|{}_{k?_O}XI8!9eURFn67+f~Wc**inYz zQ;*`ra(e>1y1!}xMvYwsZP@A+VI5ict9-QxY}^leY0r=gh;3|#m}!J&F#6Ns0&MGh z5(rms z*(TeFs)o)F=&D;2=|`9(E9$nxf|Eyy+9t4+Aaz!OVD|z zIZBV@V#PH|P;1*F@~bY6?<4M6}U|JrV% zdf?a51M(Upc8^Lm?9ji%+6HtJ;FA6o(}Zjq-)!ME_()%Rlv>UBk)&>dzh+rUj=IA@ zA7XBn?g`sUM3Z@fnDvt_C~@(Ph(9In*uE2)*XcwQmI9ZL_4 z_T`8BvTM`xgS}m!IecJyDz&$~WPhb4oQ1zu{H-igt1mfWx_ylr6W$9o;?$`S@E zY&^Din-37&rF<`@9K=(#4V8~>sD5-;(;Junv3d{73p~@znpN)TL2qDYZgQc)S(<4h zlLPssiSbW9zB`cV{LZ&d)>NiyOCLXin7Og^s8L`9+uM%R?L4uk{N$cmJg?+k872$Y zm(f8S{3`2fs34!%v#(+comG$BIDPr=cOR^d&j7U^HUntZL->1Z2qTW{jo070^2+Oc zvMeL{lAyhp!klx0PBPSgea5}~H-HB3JhIBB*c_L13v2l?%xs-nvn-==t9+@QWj85Dh85h0>WO3z> z042fVPwhaNc@0MDhsTAs4{e7)b2G_ILN7SrZ%k{0j0sAL^8%Ed2c=JKN1~?4K*7QI zs$}Qo6Y6LL-y{9QEFlw`nJBbP-?`GKT-^ z9qF@BWluqVxR^Lwd3r!XBiaW1)qB@U!1m)g>h7$_1o-KQ9T6J{!M>JJRVj`{Md zw@bb78VjZS8?ntlGvY3mKY)2i%Pi8n73Vc|XV=rK~mg@)$wnBuBME494v)zLh>H>C-Ec}i9*H#ElU5(xn39^<0z{Ji0CI?741n zZE3)i0TAogdn|P1hPnm_@K;kLcxqmA*STwFUwWhUy^FlsuzqO6i^FCs5D2Rre9hHa z!AKW^DkPzOUD$v2t#4It+}OBtE0$LD7J<1aO3a@R*Hj&=sXkg=#g2PH zqlM-^Hu*@C{`OXu@7}tpX6M!)eft~fs~4At@(-q`ma;4e-P+U_V=?bB#P@_%B5-hX zdbZHj^P6A&WdEM(lI^Lgip{heGdmc$vMZL&F0!GloMQ2>{{06lW8?RxXK6$d{*wCz zf+hB|7QA#ag~yFmGg-(HCqnX?=(bi zC?}x63a7)>r|Qinn!sI?*i%!nW7Ff``PS1nR~A2*Q|YlZfMSgtj{DI`SrnR=i`d>)BnN1^7q+7tB>CldN7^ z(LLP)#_9YR=(tamP*3X|Px}MDHjpU6Uq8d^S~8BGtaEfuSQYpwwf9~eIs=?TuvN#` zdnW^+u!d{0_v9tQipVq7zIrLfUm@`5Fjb`O{8R z+}a8BGQAcULSZAYuAzbslkx?Bt(KZIzo`(Sy6#mKm}Zz2XbN-HWD`2D=nw5cbsp@T<>G9&3GqD*Wu}|A>dE!QsfsD<}xTG1qb<;^jgw!DXX01 zkZs4YGN7uirE)^>9Q91^l*hzWw<%o-o<+g4Jan8m$FP_PYhN!Tp}<1k1a^JLa5iL^ zLwAz7YQR0cE*$3nMu6@Yav=bwN6Eq08!+hdDL3!_W(x@>N>2%`giZeTN8 zD$69gG45zF<4T`Hs%Z`b9lZpYD=`&YDv726YwP317nX8po&)lR=|7^tx$Im% z_u=&9L|4bT7k-C}EL90C?3gKFr`_%80AGIZF>hwot5`Kt`-W*sb^RNjZ$`A7zjEwv8fD;Jva#k&%$-|MA$Muh$zz|A4SiPPjaJ!Bpp3VW+6;nmd&*3|e zEJflRXCZ+bB2*REWbr>$wC_@nhiqeDiTDB5$g&W;cpN{bl8t| z-ULRC%-WEnN$c=^$dOX@U;nQcmxbeOlgEagh~}VxdZ2<4mRo4b$V*LbenJL^Jrhz-2MguMeN!xG=Qs`1-0k{e5@-{6&joF9|Gfflnw!+GunCb<8^W$lY zF)XB}#uvwjzWUjdxMA$wjhUozTWOxfYPD6KS&A%e)tV&C)%Ix3E&>}XhW0jYP8})R z{BK`4S-5c?pT?CuS^@)K;ct?QW@kJ-GTA>iJu-D?`SwtM{&&y6@a4~Ks@$2{SCTqb z`RJ3C`zb5eK3@LbFW>mf(ua^8>LC98B>6n38Je2kQH;ENtNFq!$PC|^oc`qY5`LHr zM6?#;hH`&XC5LNECvs?|B@x(YnN4(E#{Lh=NDhE z)a9XVxTp?-l@Vwh7mCp#WMTu8H57mrLEi!%Jxs)kFhU1hXLAudOiU&G4eBfvI*WO$ z-y*ySJxU^Os`(C284LFNhx<6PB&?0sp!GxKfI#^q>0+;_3bWQL&PfiO^PKQaUaGyw zn7kFEjQ9(*nGcmgt(dzsRDK?Kh|#^MO!GxTsQHG6tUpSMDLbE5HiayYEia82!6fY9 zMC8B8sna{s`awR-+6sBKEyhId?@JnE`o6;lz0Q2~*yIdoDZ-O7ILSO!hL^qh*N`{p z1iZ(Jz9G)QLrmZCSDAqXru_?Mc*+9L%e&@)A%YA#+al1reuBbTom}3cWkP5tzxJi| z%rgiGgiYtBRI!~a;ACYMVgCa!{^!cCcy#cpvNmlYqjbm-K&QRJYv7)&R1^G-za4X2 zNx)a+_n)_Z2dn?aBU3xJ@^MiG+4_!W`e)Z8%e0F^!V6f{I%vI5ZuCDe4}@|IjB#k`w6U# z*dOX_vFK>GNeZ1-#toQjX3p(+Oj&qWk-A?4FII)Ojah+4553xJJ{y*Bj+4kV$%sEe zJ+RMG!OhG+3JPSjSj2EM1{xs*xGM4*WtJdHGA*;WS`YDA(%0_EoV5gnk>3!kD4@XT zqCI5IVTv!3+n(RJ|D?J6z#f!AX2H~p+Rk!&ZE!qar^Dvvh!{!}_+P7vSy3cT| z_Qtd`{LA4_HF{MFSZ)4!_2bun`DXcsin7N`4^|!s6Bdl#?nbI)z*+++TTlqba}xf0 ztfrDFMIjt>b%$_EW#h4`Jx^8bJ6uv-`{=fY4JBuP^XmQad1axcBG7k$REfTXY{@>c zT^(P!J$>uaJEyuDf>iDkunn$-i`{nOGn7sL5dL@bwv_!P6=bz173uq(4 zuULJ-?sz$s>HYhin{Ai=@Y%!F91w3wGljSKB7{_2~_PQ#7`1l@= zy}QabrM_^yc4n~sAGbe%P%rflcnmJ}4MZD;9N}lE0Hf5aTs1lJoW#ME>hMM{@O!40ssLkj=8!Kg|msAbqArj z5Zu|c9+ISC#RvUH&p#Jq{jfc+3#WrUmmW*X*Al;w)=%rB46C8=uGtub#p{pTUKufLa||A*5{vh%Haz$jcqH0x_U}xDp|FwG*Owh^{wQ zCy{i;g5y}LHMJFVm4ERi^g4*A;)6w)IhH_gBm4;*Z7{LnZ%ih$!?)UFe7-=DSMDf! zl%cJ+2vJHlDK#9~@z>B$8DKz=N^UV$3U-qx^)-N7vN+=w!xiqUo4|%bW+x-_wJE@a z2y)TBjnas(+2NQc_22~vBAo)ek#fegTRu_99F?U>ro3zZMIDT$9Y9kA6`~3W5#y4M zpJDd$73_<9&sxwj+>2QH{Tsb!Tet%bNVt(xESt#W@Bu&yGMyb}0T1#9Mr>kEGn`Cr z5t1qk9dOwQ@}N47m9*3 zm;ndP=Km~wR1Xo&L?jQvK?|Qi8tTOk!Qu8MAt_D1-Gm9QJf|H#3!fM+71mHwcTre( zl326QHrLWixEKs!wslmoy71;@bCuMjMOChYx~y#g6gJrPpen(4z@|dDL$xLxMa`Aj zYb2U0lU+$0;1fOmG`GeV2Dq%;u>C+K(*yVJtEt~#+i;+UA%m6l0UfHX@KiIC7i@=G zFabxlsIIYrN3O;45ey(Q(*vuHR@D*5D^C%-Qsp&I)LwbzG>T~DpM}Dnf`$)TwSs;( zOZD^(+wvfjX0H9+wV8#f!q0#8llnc?n^PP2SMKE#`P=e+OdWl37|JC|(eTC7de1GU zgS)TPjOqsZaB23=?8Gx)JdXGZpUmTX%EJ7G+Hx>g?$hc%K2hC3oUGYflfTXT`c54nZ!m_bP8W}8{bsQApXbW}- zJ`8|jj#|l8DV*`wD$Z2vS^Qt-zD3*`9wW3L?`g?x?&HS^GmDe1rCXzRKr3MbppL(K zU%XCNUb(dd281d{&G*T z`?z%tvvcH-eOhRm-w@!HHi~Dd@;f=YM(B}&{%1%m7UqJABgsB-e-5xp#4wCP+l+};UN;x>PR1sl8r?!jPSwj0986*w7JI@d++WHie zCc%>1hC^^Xf+H^$l=c)ru&p8}4fAy2?fCQBEpiTbFVpB~YK9on8;c9eKY=YMECXb` z_pyw?awm7WC3~rvNMCB}yV%-uv90r5bKB|59cQnm$xF9&V{5J9@^uVel+J~r!$7dxOmzW{4pRN(_k^m@Ugpco4;i!~I46>0MZ+kw zeO3UV)Olf@iM9$J>J7X-+LCSdT(@hz=mKlY4z%_Y`IZb@Xf=VZhPzoBnI7J2aAA{L z0pe-_9=gtFVIM~l!#8>@TqCUxUsaHKF}y0Fyk4GsG6CNtg0GiPG}gOI@Ynfoz+aps zBl9)nmGf1ywOe}oiScaL@?(Ky7uUsjVDUrFIs#)$k)B@6oCn z;$ThPf$CZyvTNXvQfX#-b}$GznB4$a|ED_Clzaq zi?1?yxs7!cLO4@ynE8o0kg(E_&V(xl)PTwt7dkGL4!cHRb-k5}hRddspO*jhEM-7I zF9k+Ku!?PLDzPq3xg&y52zx?=OtNyGle7yNka_;oU=aZQR5<2>Ya*U^jHVe673PZD z0~RYiio&VSVqZA6%~ z00&XlTDYm#rWH;&zN0Wa*UP({Om+;8;mCk#{GeU295NgB2ICR5?wU8ds?e8|eCVN{ zh$VrEvfMQ~tij~N9Gsjz=Zp~+b{s>i3xg~3g=GS>DR;C8o)r*bsS$+QAumm8+G4$K zE2_wd1Z**a(^PQ4FgFY_XsM*agasjW#5ns`oGqBlSpWeo4{U=1ZB}3?(OQ$Q@l&Cv zo|i3BX9ZN<%f1Gt>g1L3S;(8*pB?r8>UNX5i2hQ-c*BMSE<$U9`_W>B|l%?1*fgM>F~1`wOcV|L{iTmQ9tL9y?rRd1f-qdv=%L zGYJ+GLSEH&HFE)+0)mfKF%h_O&$iUb>Mh^YbczDO7sl&_{-dhXc zstZ{9Y9a5DntB58JyzX#ylM||vii_dbw{79IbQkbt`n6<=)Cx3{I`JCeZ@eZ4Zv_6u zwc2zz&WRFqcEV*B;cy(+2Giq11og73q}E2qiIuTQLb&`X*fn^P7TWo;&?><}P+0N7 z=$ysaGyw#=ONObloV3meqmxGkSad3#+z9$7;%8qB*m{5ndSG8IBzLx*q2NsoU@31L zkk^&N#rUh&T#ToBN4(Q@@YQN0S)AC8VQ9qGWeBhG%mP-vhdKyPEls#o&Ed$Nljoif zKPd;Cv5z1uY;5(SI{>)8l38^8wO3__Rijzl#`kEMjex((nfo^2yRQetwPVgi1qknP z4lXXxV$Zl5HERhvUMwJR_|IIu+;{n6-<1p97teN|JJVxj;xp+B=Larc8@=8--CSCKZ}wi&UW{qOC6cP~H!$3>`xm8z#=Go^8T>Jxy)om%+Xc!Pre_1-{yX zzXriZeg~9|lsap!dyGI_L@1|Pyp4D&|0{R3^1`kos_ltJF~pyq>UOYLJKU6|~*0GbV*F+rszyw7YjK3t?6-2aWw)|EY; z`rN@CSKs;hM@u&!EZ>^QjH1^H8Uq(u#SAc!l+Wh3+ z#-oi5ieq~kkM5~H+)zpLk-yV2rZ`dmh4LS$s0NEq)E_$4c%*JeEoJdDzkT;VZ+`;5 z^YUQv9+q}yjcQ>;m?f)Kgje#pwZcHtAKp9v((mmf#^wc`23|VdoPlwErwsn)3gZ_d zy9&iFOIaLH=JSRc@}J%XBQ9g<(AY1dDq-MvuCYBE2_!{#7eQ=vTvRID&PAs-{hp}Zk#3FwH7DB(2&ukTsyj=xHY=s zDsIcM>oi6r^87|*kK$SENxj$DZ#432;r&NKSgk08>>#z41ioXB zmA8rf(3VEV2~gM;1J2F0P7{yjn_%gbXlE^%mj7uQEmp7u5S-v|(q6Qln#N?3<|Ci3 z2nq{-;fBQsr5bKP?RDcvvtqUkw)<(Y^^C5mql+U_SDQKL;426=6viboBB@Qh5(O6j z)$K8dY-Co3le`AFt#a2q+#*_C`HmHT!)n1fxRd2jxlXXyDnU|chmK>rLwqhjf=@bQ z`2o4s3ITx@L5!gs=+J@K;dtHafTT20+z+yV zFtx8R*E?WauM{iUIM+i9u=#92b*GI2TUgH7T@<+8qIaAKtQHyznRPssP1sDt zG_qxhm766~?XlkNtDav`NXRmW;uf|HYXEQwQ$TsB{_CfxQ(RcpShCGZIGdf689rnD zXgl@-%-_;nwuGl-l9N!kkx+ObO0ay#F*wPU2EhZhzrzvf)641AJN z2Xl)<(|d<|@pDv!XBRenkMTh70CZDa&{#y5WXi?T;k4Lg`#w41<8 z(nbwp<6O60Gk*`&+3+JXe6BK+4UaPD@X=l66{$xK?JOaA@_#;DxjQpBJkpwqm zvS!OJQrTPU%eR-MQr|p&a<(J=Cmfr-;xAMMd1-*9z`=YO==^wh@7?h!Xd_=R3mLZb z=CR-ZuMh5Bf9suwox92&-?;UW?ME7pJ@;?VrQ6aAdAf$jqW0=bJ_ zv6=osER_FvbNnKkj~fdumb69`d=;)}Pv*odAfJ`$umLj)KDHfBX9N*Wcsg zwmN>tAUz9Ie!ci>-dr>v*}~hqHl%wo*hOyYTGP;y=^j?PCaW%NC0$cEo8m}X=&-L_ zxI`PQkiI6bYa#R$;zSgP>1e>$tElSRO>ov8=!i!5YfO8FB(~VJFiZD;IR8v~c}h9w zgeJ4|!SF006c+A~KD{-z#Mbd;J%1aJOY3>{Ph;9JHv~F)+fd1FrE#v!4aT+mBM{Ef#IEc#=dx;*X)0K zwm+_FIGF)kM`tj9Hb@=SX<_>ir}ib53g5diVWcxEJvVvEnAdIc1+sEpy)olsO)~YktskFnc(NepAkDW z1DAd5JDBi}|34#m(wExOms@%*JAH%daQpci78kE|U1`cRHPc=?*4;aWGPyrD?S=-X zQtrjwH_afM`Ak1oLc16m)D{3-i{3#GGT5Q-7+^KN>=|ZAv6Xr?eW^EVyx2QRl;xH( z77P*$8B6twE$Ar#6&(B4rXCbJ+XuTk2D{pZy4w}Q-F-t{OgOS>zrx?7f^357sGtV# zlF7e=P1cPI6(t>wC`&N<5dQkSd)0hZn|51iTA7OafLxbF5!uW%TG}k!uLCqUoE1BE zsK@cGbf0qhFe_g%#(fs7sPaP?L zB6X;0E1nyuoi$$xAjVJQ=$^{M4Z9CF>>>gSuBR4UrAZA>)gLH-WYhj_JIC9){`2Oo z<@|trHnhcjgOKJ*D2(!IB|CO+Y-*&r{p^dc^j)~hr^)gxDZzuk{5HW~z?b#5>c64q zNK<$y-r*hO=2u`Y=n)LO>U^+n;@D?cUKVhCF!=Yk*$1tO!g z>wg4jQob{{#*7RW=YEB2M!N#6+Z~9IwHDTpd5(-&SbS@2f-SteCADXO*$&0xuR~k!Q?3~my$w|J~f!MjE9_z>C^?<^{Xe|H|^miy1@idi` zD!B>X`A(9cHW+p2H1!mqEJDiGEG5lV(gsE|lMtDOt0?up9CtCptSWhf7%7{_=6;!Djv z7n?dSg2LCDPhYzJ?)j$oFSee&(tE81dxxp);B+py-J&+Z3}GSDKL-}~_0FgJ=F@C@ zxn(|`nd{~9nEmHiNawhb{}Qu38AA8uP!@ib9&7EjXz#H>_H8D3Rxgrf{zjf_Y}diJ zW2n2+11xR}w{&$gfKT@aqnV(jIflyg3Vgv|a1fxB8-`P0!%AhLl~oqE6*7`)CUi?E z+P3B~C8I`MXrZCWFw}<#Hx*iM476OApQHmiLhBxD0dBH}Uwb+ace89~ZV`N$!>S5Q zPZ`=~leIHEBC5Fg;D)<1LtSa%Y=X9)<;bz+!zH50)qvlUm|t1*DM1n#{(E0_H7N&V~B zkI#=5)@H`X`}@Z-nTdREc6jLS!otGH$a^oo^tpWpe*EQU?vBmfn_OX8rJP2n@@FzD zLqq>{YyQ3GpRIZkp@6TVog1HvgcGKzyU6y4|OiG=exHCEX(fsl+zyHnp9UJ$Q zK8nfn(OQNJ?_dfr{QXdUImn9;%d}XHrR3FVNAixOO61VTtLlkECH0l59jBjvQMqsw90~7bh7Bc6XF1`E~KPYvCDd!*Fd%9r6IXi+xlWP~@ zBWMkCivyk;GS65*4AG!8K`aiBk#$@h9kCek34ITBn)qBocTSF8jy5sS=~(F8Q65;= z_Q-ini$ziRCeS$svPT`1aNM~tSNNhZeyvc&mWLbQj!hW&`VmxR4o4fC!IvW-7qgZ| zme+MVL2W~auUzZ7bfwp$@7$&C)91U-ob7!7 zbk}L>#TV0;&Sx%P7-?ym>};Rv=ptr1vrFlrHC9k*$?TrJZdHQd@Y!w$0MQ>ehijI&toV zDbqm=_yXv7k|_DCY_s}*=iO%nU#g;mT_RKXwb8z4sdhZe@?{{F{* zbX@Oz@#im(<|gki-Ji%#Ol8N-VFT+$awECi9~TyeThAV=*a%0ic_M|n_(*xlYd?Q( zb#&Qp-*|2W=$=a=_>b5;qSNfUyvSa0&Y93F0^>9V= zThIS(dF8?M47FfTUR$)*)49UmKA7*g_*T{C6tDJpZ3%I#jw-g5u!%z+*gikGzw#tN zTvMvLwt+8m59=!%c5UB~N`38LPJVL#_Pu2)O(e{f=r-F8>_oU_K09m{l=;HE2Ym$0 z9K16;-SUSso$p>5yw;5p(rh<4^F0)`na7PnY;vB)js6+BCsLa15Zv!)x_Q^3+j5R8b3~K@&73EiT zqqD+K&BZG5=$8_tO{dLy>~vjdf92|}&hP*q4ue=giy?q1@^;=g)c@ZCSM z8DBn`gO*S}Y{SyUKvQo}O6KrMY0VHm9j{`0=Tu<*m~RlM?_y~?JT^LOslsbU8G-OR z5QlG6V)+&Zb zEq%B)vAQ@mwJ=3Cce%%B}&GaHHq|8MsJ5S8y=7^*?3q(c? z^~RrKe(J~wuBkP^cx&hzazu8NM+AiwSqK}euGW^T&>BGK93GonUm=85L1qyQoy}U9 z+^c^fc|9O9aLn3(MN0tRytf#$4eZ!Dt(5By@5rRu(|V7#M2X66u-G(OKFvX5EpQpt zN8;nb)j&E}VKJ9(^0e?QlB?ozWM7S}xu;h48Uy0ZWjcMiIdkc1`r?($rOO2P+jIU* z$LV*w&c5Gw;d1_36BnHR3#~-{a)urSr&?DDZVg~9&NGCZttZ6g{`s3W8Kv%;xG1Aroxih z2s;M8j=wg0w|l_w*PWMw@+w`p&rd(y^O?g3Hb0uGuP8Y{dAD|_6?tm{=E`$YTU?+m zT$hFO1|t~um6cX)+W5=we`#^3o!KWDm`9ZxxccqwxpzbTiMs1`a;|F-`-v8 z`(U}{yWgnYiXT@6jFpOWshL(mQL9Uj*OVQpt!OCSR`$f>bvt+b=6nA-*nIuL)Cj@U zp}9=&Orh`Y!o=K2&of^`{lELS)=!J0E$h7I&c5jqQwY-^A*yv`TWRS4i~IpG`)PcufP6&@7Wfl zcjImO$qqBUVete0k{+jv5EZ4m7ngEoLEzwPqtb~;SSD<~t!JC%b5&gV6$ec4*Du|{ z(-_Vb>z?cR$(YGBlnIXw$;=?i0)QMY1)y>3s1%TQ*dbW>>nUi~1`tLwoMdaQOi*CU zFvqZi6yIKa9FWVx)lC6sl_z(WU7X|9D-S47S{r`tQ6>+dY(W53Dx9O{v0yHZ0@^jMavM@Q*+K~krir&vRk z6Rc4+79_emac`mj){nmY^Jk9z@YyfF@XK$#{JS5#_1=r;F1~iH{d{+S(?I%q|3Dio zX`GYu(bO_=e{|;F$n=Mk6Za>^?@f+>Fh0sCjl}|q9b=t9n6cL=6*ea5`#4MzFik92 z=};ilvk+LS83{w5&G3+!xeWR) zgW!b8Dz^;s3V)M|bL8nHt0lm;TC;_hxg%=ZE!cmN98h>U*plA1esIzqP*_;(BN$X; z6*GmCQVrxnO^o&o*p&885}szH>_F_5(4Z9RWb`dfOt~wqY#F^2rY~J?2A9)U+AS`3 z_gv`cKCkHNI@j5Kt|fh`1ECk0?SZRpv?PPe-n46Nxv?`B_H8#>Fr2rgCv)M-z||&Z z?OC*U5rf^m`13Pq*N5%)+mK(My*nfomfJ9bT+SR3eh-Yb=FpE>Q*diWb>ty+@~V#P zkJ3tmq%n&P<$ks01$Y<>mLhtn37J-K)dZtQl)<{>RD^)rYC>4EPJUgF6H*u@9z~FTQ^EsXZrBscoA#SMELh z`PbgQ^5@$(@63@nO2csfx_a;RpS{qyqr7fwDMN)$)-&C}W&+aIRK9UTb!qwYKl$04 zFa3V^wo?2s8_GaipSa{49B~vua}#m2p`voz<69m}ojkPf)fZn}nVb8|^5E)F+wIZJ zf8UvZ_k~~V-MOW9+hcHBBfm8}nDc8pwc9Leb`ZjJGSUPS4}o>k&L?Xcaqg;2ZN|Oo z|K9#1!+r3123sM#3t6&QW3(V80sa~UXU7RhG5Ke{EiG@IW7W#X&6?IfGVAsVok#uF z#;EHa`9SJ%Tps=UDH`h|cm}3yn%>|9A)J*ngSO!oM?-Ngu_s;m1Cx3vV+_P7scS7f z7A|s@CdwpIDsFP41L*BB+mX&$_tCbP?I!KI>?qqDk=Gztj}`tt#DcwEe0yxxV~!`% zbLq%HxuJOxClyvEuT!!v{#1wW?ZJ7A|DUb*@RF;%wtX86C`;YySh;g<1;7|E$36xd z8)M^4FbI%H5>aF%f>KMZ&N=6xRx2a2G4=`fzW2s^_n&&dwdS{Xm+%>5X^*X{T~)iP ztM>fnT64{%cjVg`H%W&}t-a zt7j8)S9h3J7eE#N)nY*0d~M21QGjXw%O6E+Cp*A7Z@|-t-r8fG+j7pFUi>c9-E1f9 z%M9HcRl4OYYAzpfz=$slEw2qODWsbU`eg6GB@grM`M*td&fi}3qwKb(#_~I=3R_c~ z>#KG&)D$&U7uD34Hq@2Yw5FT)R^Rc{JwN-?15fPx?Q@5oIQZuN!$)3v1HBMnipTrc z7ryvn_4*g*uYGml`sYh)pU$8AVrG?c%%>AGSI4Gstyvo$SsfW#8N#NuZ)vC>v(|-y zemUfjT;eCDRjzPZ$R)g1UN>ZSg=LQ2`RJC9eL`XJ;!qUsTbN^>0)rL$!(l8p!Nl%`m(XtRO>(T+)L^ztS zOrCRRORm+bCwl411wY<{P<3rX#61~B#?ofdH=b?fBNw+4nKYX+DpJ& zi6FRgZ)f!dM_W@tYkw5{JvD?4lsQ3pUR~@M`~}&8RQ?;dFeB_w^(wzja@!k%d|ZOh zvjCAjj2-hRLrjI&XNA{NXMw7OkXJJr#@ZRge4JodFp?@#(0phvcr$8Dd6y4R5%$HG zeamav6`K^d2XDbr=;WW>Nm71R`IC%-rY^Mljv@n#j#=AyvnC4U@{5eRY(`js2~OgRcyPpF;n{C z>u+D0qgr%@g`b=~rh9Cx&-JBCD2sk~|F4Q~+nOsb#!b3pb3s-4)<6AW*O~VHm)4fQ z{Oroo#PD6M4HcVit}ibHd|T6r)=ZiCP$kQ1i+Aj}x!|Gu?tS;=1CKuZKux-!a8m&l zYI5M9Fd^SdaaBy4nIYJmxFxf-fT4+h`AKE((HFlxKm5PG{LkUj1G~~(Koa)k7KXgL zau`l-Yf5h>82ihX;e#IA?Rm1G@Xn@m67Or8mF{e8MnTPGL^WU4d5!7(5>U!s+_SfGV{N$8XLpreC9^jLiu?bB^2e>8(e@z$4u+$M$sctnXJ(bU~ z1$-TUXKg5zXDlP$x~@19M(Bzyt_PF=C^|k|;m19LDi1^73VjU1=*D&M5*vkw5}}%D zsbgy_k}Xxd4M?gA@6_bQDY~;Q*s*Zm4CSsP?B*Fl2)QDBNAYF#Vr)0^m0AJy!Z|xL z^?l)QwAB-a^Y^!~dS#4av8tt7W++K(oHsltx-4g%+~PM{QawK$0nM*N;H%Fk|9tY# zU&{cAqV;*|^H*6`mDx)Z7YTJsiA@KLmUCC0`z5c4h+QY9zh0Pbq zmR;F4oX>aV>Tj>B+FM&mQ-HSy)U;%4TPqt|at(WGT5hjv+m&s-v-bA;+U|SsM-M*y z(}y2>__62z`r<45Uwi+hcaI%nMM66=TG!syTNvUqHVNI7^jR{wR`Gbnu00M`Io zY}#-f+6(ydas=neRyAmK9!V9(NqQAKHk`C+iN=4;?t7Rm$z<_O;V;vEEqt~Sq*%d! zDfCm8g^(gFC(D8Cd48b#^OgDQOOrp{(_nes;teF63C$QnYE_xsT9w+yPHn2BCRt3d z1M{^=Zy_2Cur}9~6yOHcoG5twmrcE&yz=$h=<4L$6muMU&t9CJ_~*5S7asj(>CFY{ zO$Bu&+Z)TNs}^N<+*Y#T=AYi)_WHBWef-8-haP{r_~wla0VMO>W+QiL_A#?%94yN> z-`;HwTcy&cVLm9RnmV9PQMYt)owLY`CA-knMyN#$W z*;sgU!4De>J~{lWi*wKZ^^wA@)RfCgc4sSEQ>Cpb{6Kb+Z6-v7+bXvM!OBA02nG{h zS#`KRv$bSPL1M>EAH4qTm*+1cd1Ac^+9IKJxfEK;X%^~)+%~i&wGK=}e({rOKm0Ma zSCns9!7BeRk87>fsZ6Py#1g`$85mN3Ms2YM{c$kT(vsVB~G4aI^3>SZ#&HV!qMV($O4d ztTrOlypV8Hn3ex3P+-71LTW$EIcz|yxcEE1s#QTg-jVfH4b%=+)>^r7$7|6>Z3VJ= zzcEJh8R`~&Ci>ih6PLSU3V)U94H)Y7R(kLH#G>=D6^{N2!c9&|i<$V1DA#>{YKcJf zXacaw8PkbPc!Y|u<){@By~lZMTEC@lS8;*AzK1U)pz}n&r>Nh{k(P^AUM0*(GUqFK zK3ijNJXO%Ue)tmp*VTXa9Bse7vbil+y{ocjcMYDaX?g@&E3hV_|f9b7fUVrzccTT=@qOW~mdT{pg+@;S}u79`o^*1Y@f4lhk z-xfamYWCVU^Vh$a|Mbel1q9^GRa_jMCuWDnrw2x^GZrbYjJ;CT)m;X)0bva>HG?U3*QCHLwAC^49$~vUU=7IrSbG$1 z9x)3xS(k<19`e$*dMDx>8h!-?T^kEM^e&knFSc6Lh$&hT$f#^U@Y$jHjzP1l>Y$Iw zTTsv1@H;4~hfT2ukqwlcKT>S+s2&Ig=`oYpdN62gVD=r*n7@a!phL$!jNzVfc$NfjVS%-da85TOX(?w+iCOkUL zNW?YIT)-i}3S2dUvBp`O*G$Zv9ahZJWaP>#%{iLs1bod~4Dgl0ZN9III6vKgWpVWJ zM}Ahcp&+sO*81|D$exi@RVNB@HU{;GDoiudJF8PAgpK{R5ey}SYNpaG0mr%0&Ba?b zCyEMx)bhi(UOaqxewMg2G4PLzOQY?dw57|6Zz*V~u-r9DV+I*CR+KmFNF{FC)>P8) z+T*Vr_~Ww`8_UW!?x-nAX}VBTwx%&tohhs+{K1x|fAc7*x$EU8(XHy-`d zZ|bU3+i&?ns;IEJqKuFqR~@~{>7r^B+S!u&Tt!{BJjYbu#@xL>+EbmY$dxkHH^Iq( zzkqKXss&>{3FeY;_AqAMkVz7iiBb~Ue|hMkYs<@*rsft0kSp7GBW0Myc6E!V%R(0b z(&*LU3DAvY=czY7>O0yoc%pao^q`!_Xh5J7L92Auz^3kSMMua#hxsxPVMAU;jKAlH z?RKHFuET-zItq(<>6YjM?poqJuOxBOXg;8_saDO@l{~^G!x7FO02Q_hb7R~z=p>2l zy{)F`CL-i_AmU|hnyE?W)Kx2)`dR0{5C3Y#)i=V+> zH)5~GTOAprcM&whLT4a2!rvevbL5TWWKiayRklw|Gl*4L4y@MoPMU@w6xQwWx`mx^ zWav3E2WfQ>qtx;po6#U#4mWZf7bVFsbp|Z9nU0I*0 zZK`Z&%+=PWDjPCQ_36fJaUD^$vn5y9R8!K{P_ef@d1p)Rp4;nxvbXNvZg05no?Q<- zbnkEfaQ|PP{rxkq?R)i;*FSeGlgko8nM>bj!`0-!ecuo7V@qcK!icG!>4T+kT9A_d8*Pz z){F98^d8Amy%SJ|6CEGyf9a*){qC*5{?)1Ou#Bffft6dv1jFKVpIKp?#@sK2nPZ$D z%&!XvI9A6{5GtPm;jI>5Z88#Z2|57YINtX~9YGGtn z80xGxMrUphR5!|C$M~yNiXI+P&0^~vG%SaJ^HK^|**CD-&|72~32P%FQ*n0G15$j0 z+ZCl)3^#r13KXwQcCoi)=TDBjmiu8ateEU8AkzF>=2Zo!Mv{9A;=(nynmp&&w5_CK%v*Bb}k zxugE>rpn#5spiJ2I$*S+5eSMsyUv{Y5{_doHMpFO_s*H0aK^x%6h zzQdUNiSB`=(W%P|bDyp(U0=FDe0}cv=ZjZ9oju1gk6t`X!_JS6;y849YKqRs^P}VR zEFc86(sR&qSa;Hm5-itNg)FoUm{7J^E9cM=oZso}9w0(qa%CR#GAd!qqqvLbCsdUV zeNu9kcI@Ukxho*Do7l_WPx)%*B~cEO{}nPGb-~)F462&pS`Q`0}HFdh2QUuem-_mGGUxIOhy+^{~pXd?48> zDecFYYczLRQraxPY2FYe;fBIPsH z$*WaRn5sjy7hM)*-_AkZOC2B04_Z5t)mUv`8kzaq^2)CR)V@4iw|TXY+lXH6G^rJ`$UOINq1s?wVh*)6Gkzj^lR?DcOhe0OpB>e1H^Jn*BI zs^Z%!HWySDrYkmNI(>>%_}%p-bdzOnrfM z5z6NvduFli_)C+oiJ5(xo9et)>ZJrssn>`6vcrGZzQLd_IRk22EQ(W`K;{nzu)LJ+aMfX@CZ5&v~%wQI2Bcf<~;YVX}f3z?kQ zanl$m_$cs=xp4k-vZBzML0dm<6Ey`^o0kI@n%}rKaPsSQa~n_?bu{xNJe#_VBm9j= zCV}Sb^AU{d8kjm$9=~vY49k%VK3>pdS0SfiZzs%M;z$o;+44QR$b`z_IZ`F$OoYEN zFV17frY_JoN)6^PVs8Z2paHJ=b2vEz1R`fPJUWew4kV@;c49ZR%i zsrl9jNex~z)!Yj%x7B3HG6B1QZ)2*eAz4|UtY+{E2LQwkxhxT$CtcN?skXv#u5MRt z4bf6tNwn71wAIxTEj88n(cuugyQXGWb@i_5W|(+$^{&?1Jxx`uL}S{D&RD{>WgGTZ zwKXT}_txzCVZ-hBw6*>0hd+Gap1U6Y*-!rP%b!2`n}?o%^syHnd;I9jhk8ExkVX}X zj}yoHE)33pHhG1(LI>RB+SPfBOLHr$lMEf3MJ}a&UR8m`XTw(G3!TXZ2kgif3l#_I zHESdsSe8vR9a3}`5`hJW{$fLOf+vp>-K4CIG85r}>MV92k5IV|6>X~*E6};2v7ru5 zN{z6I58Z61rn~0yW2$p@rgM7w>{Jvs1%MD7`*xgqF5!HcDT-rUtgIw@{Pg<=UV80` zKfk;GDNP!(;qn#?N}#vMv`JEAF^}Fy(t9tG6{cyPGDKF-QP_%CMx9DvF|5;{g!vRE zmL_;|H@UJDG%sP?Rx0Jly~+%%ylx9AcIxWYTPyyWS#&7ip6Ep46>Fq{X@gqNxAQ3D z)W`b`+)oY=dPhjR4Szde@x7xRT_a~YF%qGrh#ay_RdCxbwnlz?1}D1*kTjdaRo5Ug z=IP!L+=UjJYA;(0*RcmRRQLFF6g-M4oTuJBTDh~F%5QB+QFE%KF=6><3q}f(YEBg6 zf+e5Hx{5N7j12-#rt4D~W+6d`L0${YD?s6tn_@AbA#KBwGDSP8ON*;YcG4E)J;KGo zCOTEZfT7Bg%_y1gZfltA?Y^=&Ps8B(sfGXb%|FmuJpaePLO-__{DAny-ut*vT%EfD z_+o4xZx@EA7g;}C|6+1*Q4cmlUACvhX1h5N)E>217u)6NK=WB|2&` zxmZ)}<#@`zL~CX%LFW;%J5kYAPFlNyhUnk@^e3xhqn|C3RvNvL=NkAbbH!5G*SDs5 zMwdnMnLHR8>Gn4cpFaHNFv{oSXUSc#7P7?;-Xq@ouL^SLFfucI=D}G+6j)p)vX>k+ z@O2_%@*8XX^j;NjSFvky%zus0IhzYi);oEHjDVWytfH|*>>!9@t9ykM5ze}aqUyZ9 zRb@6-VTpdSC7Vs@Hp)C(%~uQ*rmQpZC+8)?t8fkuzKd8!W5(jopqjEx64aRsTcds5 zZaq!Ws|U>$&l8ReeNXl(Mtgm-lFFc}mD`#j4?QE04K`oGYdhOl@X%@Q`i7vmTAp@NrzzgMQuf4O|^ z&%b!Ypsl*LIak$Em2S!<8*>%7qm%vug~fkctLmF`wW<~c%yqvw5=~4tX|M}Lx;~Ku zg&Q+DqCS@(BhC0u4sJ?THK)Men$}!xb4`Y1byIbUXsONmezvg+qMeF^hc{&_q0ffn zU^kD-RWWU?K2_O5O1!qMrMhKT{q8-Dds`b@h+Qr9L`!X^p)#?zAw6#kH)!KQ?X*4&1g>wj#6Y3?7r_FwhFa2NEM>X;SC&YDLPw^Q!K z?dpT)4!-{6pWc7|Dd34v7m_mA(ldM8ZxBTTUK|5(gR1J>3H>|HrqzYSEfi{%?~S&k z!2_m4rD3e!s}w8kyqs7vMuepYpS z^Aq4N;{}VsZinFFI?~jsvg%YR!H(+1WT#c1)dVFhl#6cLT)w&R-Gi@xxpHxJbefbn z!h61B*QVDPNpR%F_x5IY-BW*8_xoqQU`!GQZZ-~B&3!iKi!UBpM+8o5V-uHWCq_;m zf8?=WW$Mc}ZNH_sV!ILZR7Fc7p|HVBK7&}Vj+2I;kF6G@w!+Fofr1Mu7qPn{P0(;e z+tKP^&v&?O^bZiyD2nqcPo>|2m4gQ-SBIxbJDd5{(Dxeh+HS_Qt8*s8}K)BUEm=Ci~kW{Vs0Cw5_p7F zh5J+h#8!SKbXC_jpI1ivuC|I$*e#vI*ds?$BlAn4UHun!m1u)6tRZudbB1YR8;yg;(*+JpZ-uO0M$HH)_;PaHaEvu(PrX zRc0m3oku$*x5K1rHVKw9;-H8gUcQL7*%bnMFMu-XWxiUVyuM>{G38sQs)zj6_~~(- z2R^ve6W8Z25x@W0ufgBesv1zQHdBV0qN%d9F;`+Gr%VYX7i4U#tZK+vZMQCMmq(C{S6A(>&$ZQN zTC1>mFKcBjk!8$E9S^Kd){))QD3wZFX$&8c$lK<^=Sq%-~w&lXp3DF3!URY7;}F4 zko!J9b!~117YiG)viGaE8H&C5Eo({?gD(Qc`hy(?K>Tez*HpkUvF zKmXrXFJGUgYmXMifyIF#B0^!4TFG<63%=NhSpdW!(PuN~I^X`t+J29o;(KM1^|O4j zStePF1Z!8b)g82u+ax4~OQ`Ty^|&BdjS6n?Tzl1jboHC#9;V9H&2v5wkNX4@7^!ftl_H0&7r*iB4s zxqUf!1XLyU9Sw(aDjlXmMEI-Of=I2FG-m-x0Kx!acUVzG%4h#NS+!T+tEk$`D1^np ze+Ny;4+?N$zdV0sX>|F4dmgM!HtecyZOJu}^=e3GchzJ8FKk-D-`ZShV@;x=I#pMh zAh!%AT9P?gtw3Z|lLQ>o)yY&%DoNC4lQjTnCPmcca<$nk2XL5j*J!s)zLm=m^|@4C zHVNX#3KQunr8eP4?n2^KljZ;m%3%c8C66kj#u@L2%GEWcs~eIzOO-?6b5#&+_AP~; zOSjbLEE;QCn`)cto9b)on`)bz>sngtnu@m;4xa73xVSpiJB+NF9Ix4IdUsM_sgAkg zvw~E${RXHVT3#Anu{NbRTnkN`unlmGGSv>X2EOVA)>h9i6xK8I5MGE!sZ1mx@XV1H4yd*( zR8?*{LN3cQD6zdCNwzu%Pj(LR5U2Bc7s0d3(&jxR&!J1w zTrnVn2xEV_(IyO9M1^1`TQSUc(xP*k5ZevjR$XKKSKYp1#IxPKGhIFEQDQU_ow=C1 zQdE8G&?C1uZ7AKJUyFdt4teDz zChPJ!N)TkvKbNAiF1zj4yXxx}`iH+ce+eXFl_fl7Yb|d*m?2NG>_k*B*E>uoqv8uZ z`{u+8Ir+vQ4#dCl%IxJ!(-#pWJom@PTXF^I!h+fzg}V|-irREMG03=)61JZkTN3Qz z&?lnnWY~Iv zIBFgl<8p1+a3igbo2n7pzGEv@KWhkebk;G{b}bPt>~NKA1l*Q<&iBL+!bTj^CkakC zwmOOl4TYo;@`{JrF$z6{IAFjyUk~U}aXpuvfWljTL zcFYajdEC0C2z&A6xu~>@}-i!TFB8z7GI|g$PTc3n^i<KAe;|^=J^;lGp_4bf@s7)*;A*>dxF%x_K+GdwKp`po;Vt z571}~P|`GH$z*ZfXzOg(Z8z4bu1vAig5=Csk1~7DkgN_28UUq3%D~qrps71a`E3_e zFKccEh#bkZG}ghC(mWQtR{GlLGkHQcx;Kt2{JmitC9C_x4(kewe&(NA_q1JC`-bLv z2j}{F2{3}LnbA+qyz=L#jy|{lqZghVKQ&>|K7#mJ@+uQNdl=z8(@li>sc!d~yr8gp zlP$ySfM^yOk^V~fD+Ck@bC=zF>U7k)>kup;KH1JRK(F&s>J0%R8~F`TYhr0thob|m zBFq6I%j|&uBFnb*Dz;V*s69KdaDM9Ir)!^|J>LEB!>OX8g5s?Oxr&=>=mky{Dw9lJ z+EUYA8U}gImol|Y)>sC<3<9<&wr~{|#bT$NuFDGd+A&m(bHznvn^Lbm`_|vDes^K! zqCB`XMazm876!Xw?wqs#1gxULH^hUlO6au*1moZh2Ljz!Cl(Q(WVDXW+US7|y z_-{~NIr4@)p=YGoXZ(r~S=1~bsSVdN-fAJo2JQdr))53bkYA8j>07a4v0dbic2T-@ zXs|w`z((`BdZSy6muWv>R=(2Mv(5JK^=I%owo+=TGbpOPS|_X%y31259!p`+Qq&_X zGRte(3key0%Ppjuxz`x(_&Ld^rJ%O|jxS#wgECuPSStYzzB+}obYvl7_!~?)9DMD) zM<{IA$H_Znvkm=LdUZZt1I;L6Y8-A6&N^zZO-*~u0>Sa|>?kL%&qvN$%0Gu1ZeYz; z7mm+|JVsbS`I^eX*DTRu4l55*os8OxxYs~-%=(fPV$j?h3LL{>W(Qv<57N;lHo$KU z1(Ai%EmE%aZx_C#4tPiXowen4d#m@hW?Gw*jTVTOGj&$RO=VD1gvBd=eow^c6w#X)%@V%&!HjT!356rk%Y zvP~@+^dQOP)^eJRF3qlSwX`-Rx3U!ciJ3}a73L%knsKy{H?UyMKolZ7UOV_2Ovj_6 zbfmhR1fX1DEP}_ip>q^A)HtB<@DdMkQFKsEn|#ZI<$>^5dBVVj9iw%8AkMuMzwn%2 z_v;HsIw&(o$)>LN4G=S(eF(w_j(q&epCAADg%^&!^wRjrVT)6qLzbQyw6a+5^Ht`V zTRd>i#PE68R8=Pi;>gcGi)`u{goZH+TLW(%3Xh#=XD-s%vD1q2<7dW>ouYqd?8F&n zHw2y=H7kL_n$e(bg!{qAl4p)G)mC>_hFp4YxRMpGeZru>we@Mg2Ep#?9CFc-&z0jZ z5NuSJP`43%*)$x5DloQJk@FSKOP1kx@td|BPl9)KWaNvbrP+~FFYo_7{Qz6H-n?Vm zEy;=6Uhof>MY%BAebm@E-NOlyC+fp>_fl2 zJ~i^s^NXv4L(Hti55&evJMv--W%f`#6O<&v`qLE!nxqghWgDyz>ad8Q42VYw-o-yI zUHb0I+GD?Zplm}yZRu8im1ZfKf_=l*OI&$nP}`C!WPG6_>>~^Vh?9x*)-6nbc<~R9 z{^y13j7}go&KzM$9cUr6aHkq=rO49vSzP$?$k_C$(?<^+=zR0dkz?&M9hPk%=g&M5 zJ5T?ftJW4)!aj&xzV}=h^T3y~T^v>&mJb7&PdxnT7=_rHbZT@?LL+wguMjy<#Quv6|Id+7pN&aloO+^*V(Ayh9a8Nn{e@YIs6Hs#8l*U3U1IKmv_Az{}D2_6# z1~CRSlc1rsGKrrGo7nOsVSg$la)B6Ona(YGMdP!=VzJ-|e_`|#ptTaPSBLYuo!9z$ zAi_AX_=XUP7IuS}H2;dRS~)ui*vt%55y{0lhI_ryDUx>Ugrf|A=a%C)6|GN{!4Rs( zfbI{p<8^y@S}aps&&G!U!h({l#3H~oYD|bDYlkAQ#i(_qq2MAHE&jVQI%m`5N0@Or z9qaw#S%gwv@#I|pp;UZ1|8eyn11ARd*50|RYVYo5QAaho zHdT#k%ffY@K(L{3iVBepUyJcqj97m4awK3Dn}zpfkTYkg(3q*B63sa<;~Rc0ip`eh zL^=~W;nr*w^6A!0eOt1*rJ}L)Ryr{o(R|WkrW&hkvTTTOpA9u8s0Ev0`{2T=W$$fA zS>*_StthN%N`WtH4)XAlJ5`D=TU9s0U)vEzbATQa9%x-oHv~u0sbuEWKGzuYOpir7 zNq5o~s}=u^*9;E6=67w>_+Qwuw?ebh@P;OOn)(N+vSHA~C9dn8cVGF-qsLx2aPr^_ zK$UUW_AW@POP$Zcc1hvVUW9~Lo|ADeu-8?s^N5C~V9(`tl$MmXcBBCj=NP+P?X0fk zZUbd?Dyg3c$H)_Bd^m3y5vYA_Ll!!CR0i*8bsL$bvlw^2W*bNA$nhggaq|9USg@}d zG~u_)JU2Aw*BLRO4D1Gf>{El-_)C99@_Bujtj`lv7gAus-J@i86%qe6jh3`n(PK^O zMyi1id~I^%`qFIo$s-Tnf6s<(1wYti;}~$>h^o5k^ELk~*s&Ajo%|@oju9f7G&=lh zZ0(A~u|Am;E<=r3rq)yxR+sPCzU4OJ-yZtep?6=qvOITnWqz)wYreOeSqy|YFLhED zpKOUx|LvY!vY1}%GFuoz-Ecx-8#c|<%l@H%Ubx)x_M5ws6`4)9wI%R{Hg_!W*C04k zOsEq|nA=#f4G5MOma@>4leXSkaNnJGtjtV*x^&*AYRc`ZZ<0|9;<*v$W$cmJUb*eY zIHz&0Z{)(z#5Gnxhu>%50+nC>rjTD`>Qq&r@ZK_D$saCsCfKk`;w-(-9y2K7R>o%u zAMeX$*7%NeHI`tE3c=z&F#}>pDd@n3o&ScMb6yrS^uIuvBPypWJePeYDfD;tR7zZZHiM{;@WcZ4yhqCET(K* zz%ege_NHWlYvEXg#On~=1xpJ~k0;bS3foYR4$rBFSnJRY8EJR2^6Z2@H+?cegXhDo z^w;O}~z*k^xf8r4aR?ZPdcuneTZX3b*Y6ym9-lu#gajRG! z9sB2JpPhRD{kH1*JDb|})b6rIU1z@JW)0CSi3FS>gsyNR42;$i>htx|ZRj-+L6#^rMiZJOF&b#bk_` zNaZlAh*+Lndn_kcA99;CCR8mfr*w>LsOz|bHQzQ)a?Fd z?JHKlkV+ySdBsmjwJ|PApO{cM%un&GzDO>0TZ3bzpuHljTY^Sfqm;Z0SSTs{724_; zt&0TNl#(@8$+q(B^7VkuO}B)J4?YnxX476Ej#JiF+=p(H)8i_10}J*(%D`y6*BIWx z6|ej4cVGSUqsL!(`oxRRh_{A}apVof=puCt31soe%Io=cTvk^R6ga|P4v5=?4T!y) zN!>^9RG+-c!eiP9BT=oDGfx{%YD0_Ex@Qg9sgYZ2{oJApBL#*+aQ* zwvNKC{>ocFi1Rx5GC5J6lSY97-#F}&?nYP=UrAV{t;AQ{PDDa1^3R^m?HOVaVVu_m zly!hOvbvTgNd6CxUtT%4FfsYs{wMBgu1;?)NNp*oE89TZ@&;Up$ z&(_S=%-;s^#RPTKA=ojuXLg}`X1;5hkkUZkB5OtOAHA|LeQjy>@n8Q0=Sdu%-Q?ML zF4Ws-c&6AaowG%RP#FAeN^A#<31~8DtD;*AYRb2K^yZ!sdM_7!u9I+E6Vk+`Blf0(ZCwUQIN5Brp z8}Rv+KSEovKZB%zAXBhcqsGS6rj}iC6j+{vV~}9kOSlS)ZG0N>zQ2$?1K{XD>39^; zOdhM(iok9?{i~;f@1> zOr&tjN8uar*V!}$Sp#nWtau9xq_P$Mj_i}aO zsfYHWmP*=uYC>-jee^_*N!j*uwktpqTbm4eC!@x$cm&blx#7p|{ycK8#Yqq93itw= zx&_{f=&?SF;Vmw&WliX~oG&YT2E3Z`Gm{~zQ&ofsx`<4wI*mF@T4;MJsIe|Y@mBb2 zc6RB)xG&Pu(P=Z`33cHtZg_>7_uY`9qdC)(uB$Jv+*t5~pWc4&w^zQzbefJ@B*Zo* zsee(-S1adpbUF-^NtnS_OUh>!Ft&#ARSi{jqr=0@O19~Q-wPo)kFp1PZ_)Rv#&&#| ziM&43A)f_DwO|civ~+aPZjDUM4^0pY!@+CX!I$xBi}933VZEvhQP$3w#Y!J&YZ_O} zD|)}l;@QasjHzCI`|Zb{IrieCgzy(+j6+x!G2%{oW>F4X71$YWyft?0nyZLoT3$K| zwH+BUs^7!0smrxWwo$&OTq4v|7Cn%`3G&Wi7FT#tO z8fDjHM<)i<1Jl@zq3H#Mg= zDx-^kMN6upF_~*1(NvKmAispP7C~d2G7&ZG*b`9Jm&6YL)$a>sRLKttT9UX%H;lHw z@$H3?i!-yc{R8OdNkMZO=mAhz0b9af)?*5@KlRzr&`EM8>O%*7$<+Ph>bZAbc#0to z*&Q2gXdvNPWpQwFUnI>Ktk7Cjyr*VITjkc4^cD;EqHHZs*6!H3z2K%l{o8|IFJAn7 zb~P|-@3}kIHzQM4wt@c+3}f{ypJ#0^j!s+}n{0pW&5vJvS)Loz=v|0z5;$8j_Yb}LZ2)@$A{4F>EJEe%3@{nkS zpu_9{`;cv|Vqx@rin6sCR!8V>1|ULnZNc0aE~DG?j6nU9yf8gOW%>Nn#Ombu>cphS ztckubj%K*9Hx_BNrl*oerG0rUg<{J&sR$r+UglT^8l-;~Q{&ohJETA<7Y@wue*fRU z{Xefi{c1}Vhsk<+cFffZgE)RW?q-9bjg4LK>@nG9^U%oQ4sEj-IG`0>6TftKf(3AE zS@Dunt}Ls}78BK(GK+KtAs($?rRu`8Bhi!urW4gE6wVcZFG^_|g?v?SosWJop$p0S z7s3=&_vN#)SB2T{Sh(hsWxr*)-G|2#aj6vLV3r4Z-MU;Ur^f=oRAkQO_f$1DCMq`- z-1O&P{N_Kd{T)!V5}4J0Epw}yuXt|YynJqvA}i#j-FZV;tj|{4nDYi&8r6B@RC6A@ zdbg2!j)KZs+3a~+u8-gfG6VaL9i-!DATP~p`G5WRf0>o66gLNW2HO}hlTS7L)OO^r z<<+s!@7Xyu*D-PO(A)1m_2RLYo+N16jYA@(;E;Ug)YzF*6K78oI9Cu+R@w5*)J)Z3 z6bLC#;!}XxdMVi(h1_Q&%s14O!}ea1!}7<#acl6zKrp=WIfI(TAQ_c}Rj^Y@!Ct&>Aky=v~R9V)_!KpDT1nZU}!EUD)kT(FX^v}L^QMB`4)XPNFt2JnH zEN=(SC&Gc@dJ3AN>Ux+>iUklHs>V^vunlqO9hm7GTpSy?wlcRoH+Jaxr|R0OHWl4k zQM@UaEYuWWOf9gsXicVD5-Gm$_&J+#77-<@`E{VXAa~6z>ox0?WpIM3Bu%`8OKoCR+>xSvb}C|)zq=EZTQqDB07X&BA7`i?xI`PiYQ|@HbMWLVH7~ zH-xRO_98V_^79s8OH_LBU)6kLM0N-cuq*J@)*vIo3sJ2YJ;fz&*eZkS$BTgqqj^Y- zUlHOM)_}iU|CJe*`Y6I*AUHx{VX<$8j8`Y;ER-t_*=*}qQp8vp05MKtJ5qC_RgFDB zx4bYcUI%y1D#Bkw!|){H+UqhWrKW=zd%~tJ@+6RJz3b4Q>n@QFtkD!geLlV#k(NxV zDjg$C)Tauwwu1QhIU?}r0Ogz`TQM~*{#=&D#mG-0)%(A{`M+L#`~~>0 z`C3#Vl)}EI+%4lO%*w{`7sBYw7bC26TPEF_PO%`ja$KtgS4rt^|3Ds<;|} zys2ze5BCmHCc@fz_*;FX*FrZ!}@Yf<)fjfcR`Q$(w zv?xeWtjbLHF=c4;qz&vKTV7*PLe%Ao8>&j1bF48EZMjO&AN<|-$X~v__?6=?<`t1o zdk{o1t;`0N5yHN}i~Sb$2n&C2+ydVg{#bhk0qzaVH>&gUwD4c5!`fQ9nEwvXEse~F z@Oz_9C4V*zkGMg>70%NMuQ6pgRO_6jnpQ1X964Vv%`Gs6xvt3>w8IDA{NR~`Cl2j9 ze())wSd=J+Z8hJ00VX=+(^$EXI9ewh`LRdutSw556+ITzsuiW`6 z)OiEinlIUG*VIg>H!IPv)MKH}YgLEU^QHc;+1@UO^IlyT`R3wW=ZCL8{_7uOmA>iL z0{j@!`L{8sC{a#>PiwguOfgr89v>p~@3{U!x(EuyHOV5)PSOx1{>zOh8!b|a%95S8 z6x>?4vFP9Le&E>qC%?S#`P$5?3C5V#qBkf!o;d4rQyPsx1lL`VCd4=c<%>CJ~{Mi_j^a^3b!PNE1&r* z@F8PF<66ofapA`p!!BGn?7It=n2RpUF*(wXr6@x!IAAe|9OWdP?|9dVF6J==3mW(@ z5bSWgjzL0!d@RV%pjy)_5+L#AJe`%$8C>Jp#7TvH`Fbj8v105t5cKS;o-OP~G)Ne+a#Jhe*b zwZggi`&Cu}Lqh!7E>4qj4=5Zf&3H}+(}1VgT@tD(E6c5IaLQ6QR(R1Ujqfrl|6{3` zyPk_Ct?f@}e>jQ0G?hse2W3B`xjHv?er0r3f&LkLOtah^p~8#8*+kB<+A6+<@*+SC z5JZK=1*ZAT56;k#_}^dr$J4)mvZfSmQqyjRCL{o+bYn8pRFSnvR#`}k<&zf>GFkso z%7vHSIFmz3KHE?go-kERHk%-EO;j>eC6lbmCTc2EbR`om z)5<~QD~l^O7Tk3BxkF#9e5xv#<*dAmCs0@KT(}W`9qBxKuUeII%fjFMmJdd@cLoC7K-_@mzc?N8|U>fXl6CVR_Gl1l_KQ*9?C~>Z>xpq)z{B9a?LgGKB?d3 za=EfGsQ3M+Um#8#JaGJ_{Umi2mdzU*eGJs9=$x zbpp@k;Yu*G{;*6qlv$|F!STJk3V<6R_{@-luToXPzY-mMit6N68*!MTE@S;lwB-VYt)!_7w^eW3 zxGPce+LQY(PE1{&zpy+w?Itau$wn=FETOF@&M?wgpS>vm%5>+9z~dhgY;!dr^B z-DKdNEmbgN#UfOKtOv^5kc;L2EMH1~@9CYl;)nUuA2ptv8ojbatI?oAs`Fn}MwEO$ zNAj6){0-ZF`=X-zA{UL2`^2lSoqGMv{-YoBC*}*rAD46lT;Axs>z_kT+v~qv0J$pa z8mJ$R6c@T$nh}=iXAD)vYXTPgErAGoe<+u3AlQIdgM;D7;gasgR;mS$)2w6yY10>t zJN~*DNqJFjaoQCWL=Jo*i7NFk@M=lji7EPpv4$4TftIv6cFx7))h z@4C6PoGRhM7M`b#$uB!G!xdf&f^F-}7x3k!Bid`dNJ8Eif1PsLE9xBJwsb@D?nHI{ zjZ3X`x&QwByM2#5R#%a2tE}Huh3vV?%$z+$LfL{=+^(80MA$@Ata~xo zsbE1~(OjNxEKSyxWeCg!NK@1JL$_~Dy1F(~Q(u`TaMLnFtt9gmQVk{^ zfU=Qdv|g+eco&(XqD=(_C*FSVi*uLJOq%$!7r~@KF!&3wDpwG(NTa~~at-GCaUqZi z?JT!^Z*7x5kx-@NfX@HYqomuz)j)k=u8RwNnd2jxEd1pnNtl?#wpLTt`EWcJ7J@U3 zLEm@Q@fQ^q5uQQQWCo}0M(-%0kCFzTqfbBc;WN*+zw``&q#5U{D7r0O%qS%*_;w7R zK4V2yCK;VZwnT)s9d?AjWU48@DsSw78_l&B3ZFLn$~Yjl)OHDnW3Hm6l>%YSUgIQa zYm)&%-jMyK;A$SGfKC1Dk?tFW?f9{w8mcJ<&FZQ$$tm&?AjBgulpJ+g$tQWrzYOz+e?7DrHL zIrmhJRpr;I9tuy(-(3H)yZ!7RfBjI|_HE44tuMbtwP3TYU^;{55+=&&{8eBjZF3pi z7n%hvFvoAoepX%^irFX!NT1wLvbp3YlY9N{r}Z5lKK8dOo!Ft%mag?!T>Q1ETV59y zFJ|%f&C2y}SFZf-p8HdqHZ_)&wIZ{$eqIwiw`F$j$`-d~wzs6WDM&cmU|-dOxh&8n zQBkwCU{h^rmcP;e{_H#ZLiS7nxbi$->7DhkiJa$pW>0^3elL!UUKyVl zIC|ux7hmXp@BNWu$K}^O&SJ^kx#_B#8yF_UsrkLwn$}&g_{rE$TN6_J7o>(w#wQVi zRI0#M2+l)faW0pkwbROb?=L-<7?L@E)eX*8C}O^d34w|;8c`J=D!hhw4HURUC$(2 z6<8Q2{tNQ@i{^WSNa1YSX2OBbtKN>G(K}sMyD?B3y`mMq!z()M3Wd3s9f&Xq0ex=p zUun#AfZgkFL}#UKz;VwKu#6!=5%AZu%~2j(ZqGJ-Nw@G?{t`0(RPDh1%7Uud6;@`J z6@{h{x`U;aeU*H+{4<@r)}HpgQ2tpRzEO9%xww#sRz})r)JR?^=Q&_Y-~YV!<)goN zn4~V0wyhe4Ftu94GLp2OvvRdkC5^z<>gb^(iJ;h2QM7sc##=VuQm}Qyj?K4j+q$uI z+orO@t>quC%71ybkTLg=RW1R>_1|rWo-A zGB%YNi$tXY_KlN<4|k}ptEi+|IQII0P(!O#)m2_A=&0oZV#Zlzl%xre-GZhhz!x1$ zGS|fPm5t<|i#p#Q{A}^d*`5XIuhfMU@Gj2>L+p}ej9oDX%z-v&HOgEh077^3aN~RcUJO;>mT4UPb%D;&7h+oZ&j- zXsfdgyj~1iTc6T+X?T9{$mtLEJ^RuA=T9BnfBNO;8?J)j3qm57Bjw-UUsy`B1x)D?d%=3`S=UA#-XksFxRrano-zk!-CBg5(N}D z_Ks3`y0*A9H!=3|^9S&2E8V%HXv>CdMTt~ZjBK#{k2^xMwVeujey#D=!uxPVij@Ht zE{g@LW<>3#w!E}tOX1E<+iSA5`=2_n)<5~Tg^Sn57vnS035yFa^pPU&zcRD*#qxz$ zpWL6>y0h%oO$}v9*5L#dCD0$07o(karHY8Q)HXtuU+F!XXqre?=eDLwZYq4`u>;?& zeGWZb?j4ITSIBEyG2d0v&N2DzpF$Mv;^)z8Q`1wY&%C$)00S5LK0MBZ4pj~K>#(Fz zwmj&>1}7GWCQU>UM_y)0e1u<|hcOi8r6XR9VS*V%TeypnSTM;+hiSv;fVqIF;igo27` zLVOE5I#Lq}!`5dMvMNwBINR_j&wfa4rSFCm^@!@dQ%2tW{KpE%@|~LeDr?jCr6IH z@%9_fAAI)F1CRdZ@kf6C`+vXd{vYkWx2-Q#+HkVc9Dq2uI<2qlqqlQUq z3_Hc_HiKHTyp~`{3qcONx+GUynubRs^0bi+*h@yi3DEGUZVTzPOqfl+t8!;U&a62+ zcP+n4sIE0YSy?J$q?o)kfgz6#N0XK;Sy#TPWMe_H@1uq5i`Pu~?9A6>X>NXna*1{% z>6g5&Y9^4vuaJ4BCWVcsucG13|E3CH@^HvT^J2MKIh_coO(7rET07ftLOs@4W3?1qE z;HeisdiLeh;P1ixv4F~oxgpaF(+r)z0H2dkGjEL0(72@3P?oaxthMQrU85zBW$C3s zVS;@l)J}E9k=YuoyO}dvPC$8Puo5^na*efb-k^u5lDn?Z=6Ive&VoVJIAUWfbKBr~ zoTu}g3M=?_^n<(}ATLrYz&F@leUJaD;H#}oNAdxMQdN1XIm$LuPdCFWbLudI3!}Xw z#B3jy-R9aIvtZSNwY|X|w0~r#Z}`H@I6XcezkleFU){UC{I;97FnDN3I&yamO5@-; z>0Li^!0@>Spdk@5z>dq5eyGkUYZR1?Neti;J2%-v^xj>+I{r={F3?=GR|h7^JYyD3 zs8e`hpnqj#_#f9kJ^JcvnZoU5n>I97lv2`_@3j1R)HBtRh60zgWD1@9vMrR7vw>k~ zWw{LnH{HAY&i}Y}`HPiBUWJ77YX3Z#EB>o(e$F{+L>hC$jlR`^{%e!tgC8FG=%tr? zKlpIylWwXEET1g%Z1(0O5@~Z~(+`249-RxsxsfSib#&Zga&>ePK8vUzNtQ9trPS>ouRNeB%FPGu)%H=(Y;|YU~uVLXq``fHC)fEd!yTJ zkr2K5dheBP(Weyh-tJ$`!Ukv#S%oOiVCm?5cW$o>@Zlq+3!i`JNHRvO>a(`OWt~ri z(U$iBXAPzukt0&PU@heCoCeN%Uz>{!q%L4!d?)~QC*H;&=P_eo?yL@?i z_Or$1&(1AhU7WwVw6HQUHZd?bJv`EXw&&DG$KQG7@PQ|ue)4xuJaYeUescRye%SQG z`b=xOxO&ItlFhg7zyavCf=ycrwiey8GqEjEv9%(xV{5K-XLe^cSDdRauWzVmaAS~0 zC&HCndF+Sps2Xa%88hn$?pw}tNt+Yi9)<49^e3?&h>5o|xw6e$Hx=B{ccSy+^oqG9 zhnX+de-x$!4hBSXz_Q%$GPHMSieZI#n*g}pRA}|txJt{u*41n&*TxqCt^)uc2rhgV z_zF7nx58ZEqnNA%tEIl#G22N;*!XGv<>KnFoh>iQO9yNnBH_&VB)}IJ0-eA6Zt29t zi%I=;8~zTwfA*~>4xqpKlSql32D zwN>D(%{WkI*eCrdpFDBuEKXJ!2AXH!$!-F81%koQ0N;^shEr5aaU0z}*&9P}d=BCC z;wrNzw*$O7+D0TD+HO$p;hr11S6-B7g5hM`?JMRRoHu;_k-U|#bhha1n?BoXNom3z zu2kn`-*#eytk8~R?ovbyyMVH85yuSsn1V8_N9mUB_ zKiFHD-1moj`cJ*Wj82MTEX=57hl*-{-<4U2*QPcVZ7@G1}cmg&u_0)s6+k8I|BHXftAn@lFV@zAy^FTT33+`g zQl05%#CgSZqI{$El1NL#Mc65iKZ8#Sjc%jWG9y^f-{m z?QGR`6<36Lh#m|6#%mZYDumi_URw)!gwHk)e}k1)sQlVX+=#HsOZ(p?JZfDm%^!?+(-QgUnXD115XP}R3yox zVtqNawmP-4L^J!$+{)ziWPeXz`-z@YUEL=;+mD`M{LuSveE8BcuRQVSzDFMZm``cEZ7x-t4kV&tC^P;mB~}x z$jpcck$XuX^MvX-Lg%b*U-%T@djs+Y{2j1tb9cX|pndGG<{$Yir0*fGL2##8TPWh> zv+qn8uDRcoR0q;NiNVGA8}A@axIDY^#q#-6Z@u@-?;dC<-%|X8g8H(8`qCnR*vvXe zJ}0+(+8I+;GmG}{egOd2JS`1psT#8iHGxEexq(QtwuKxu4MgB{Ms zRUvZ8t9lcqK$a)w%-{jkE+A@IpmO|j>a_b zUvXP?@(Od=#zDC@hSwNywG(4>M1&2nz&q~M3IT>|_&dgSHPGHD=PX!^pD>`UI+Z48 zFHV}y(DmM?&Jv#us7i9c`;rV{{Vm&zdLf%FPslUta}(zc>^rajI&%6lxwZJOpCrKQ zHD5o6urEIDdF+@G^Ze24T5od9e}Q0qAK1z^P0ZK77@;Z2*J=6s=X2OZKzp5JnW|Rv zZI?zS3Do}oy!;ib$NQS^Li|ibo>w+KFmw$0WI$<#L0+aKHD=qYD;jPs*!<)lp8fl^ zzt0TJ&}YFQ(fO`fkNMfNv$I_@bKSFw>8=H0zHf=J7D#L^Ae)1*$QfE(K^e$9GVE)M zm#?0?e0lle#g&yc+;PW;#|L|%&)uimKYsJb8_&J+%wx~(`^)~{{chhq_x!T4^`5$> zyRx}GJBw?JwpAD7USD3tB&E7cR?=%5#+0z3OPUIyY#Yfp%R*;TExAl{O~a0jTbrv} zR_D&o3``K@bx*DcIl9$H0kMX+OqTK}}84?La=>>WwUMZ1VA zcuIQ*@wvfO3;CjizNa9g0*>P|iHWc<8XPw$cR5x-Rjv64F!%PCvy*qxor+&x`2vFN zjkwE!64dhLAQO1z@R4`+J$dZli)UZoPXLQl)~x&*O)dy%7~wB?Dn_g{FZELo%i;>1 zg@%w{1>g(uv}7_Q)WX=VA|kv>boZ0QruuB{NbTMBqMob{y-uW;IiC~O;$$>DGNTqE zgR17Cp@~J&+(38LP;YM^2jrtRuuE_vDda1hLl3_9 z0IIuR_{*>}eveyj*|4Xn_1x6_=WAE^?(xOrd#JD9|G1U^YYbdfrCy{jnZjL^o$Y)iJJ>Xw3yuReeH+pFKs4$jW^%n*w`^9$W` zaq!=z-UYvLVWB$&7a6-Kx#knGgsEA$&{^$f%yUPcCLJ1E8XR33@ff4uVa%JLC-HtE zR>o)MhsH;Hh6g(OJ~`U{&f%jUe0cJSr=F{=Z%C#yji|Y@85R!!t`9>@^L7B@%8FEV zGK0j+^Ka!gak#2#>utCF{NEl}TQvNg>>Z^9>?3r2@=-L*h(vTix>)ecqCF&>jrqc1 z*ISCDo01yYu{gq6^&*K53zQj6_&-Je0&T?+!dfS9{?>wvQn)y*q%8MnP*g%B9D z6^ox2FKjvajd^lQ(+e@Bm>Ab?<_sGbTA_LLHKH6;}XE0b)c=loY>Xm5ul zhlRFQj6T6s2n;la8P9~drHyBDCLXJo;Y4vLpKO%Yry7BwAhP(cL$krI0`QI0R%R)g z&wo)^8U99tkgRpr8-erjcmC`IG26*Ed@>4wuaI{VkhRhFJ=6SHY`qE)cRB2KPuPv1 zaQBSDEFW(`Z8N7@=%s6EfO_J0moJU>3_kJuziir3bo0i|$xJex<9kkFmyY1Vy2K8m zwqm=U~3J3d4V+xjAq# zqhldwYpK_O2%f_<633i^G1`&l@^x9X7rC3|?1iyen$A{n*c+lK47oEkkp#vz+>mgZ z3~sWcRe|(NSEfzi1YCL;FzFm33CtEu3{bva5!PeB&+En6d47Z7Py|=k()y}E?;6kqK!v;k zf5%C2uV|hif^g!}gvEu)wdj=4sM~zN7=_unkvp4?OP{U&?v~te!zWtB8GZ!3w@YTT7&;Q@KQWRVslE7yZBTtggI$9+pgN3qh+00^tvb5?rD`ir`EhZX~z(!b`qNr3yh z%U(t2U{BId|J0nvjOYFAVl{iF=ly42r0uu;l^4#u{6zb~$NJtoKJdXw)o0aRYog-Q zed4fg#L?~zxn~5XCN0K$IiRqits+J@cECOnRinM3rfhw z)`A35TR4Lj3JaG>q!Us>n|0OMfqeX(IZOXhGy>Su&f>O(WKwEy?FHzz8J7~D`G&7`H5 zna=5R35By#M=Ki~oNY)Br&BOfFjPIcEy+rvHC08lC98I&s&`jp8w$&^n{I9{-Tw4% zf7x~9?Kcj*R9%`(ZpY;n2Y8ef*xgu_IXGl8_Q+-ny87BD4APWDv?X`!N)^(PRCIH} z{deqQ-o&SiYjZZz$mps8uIKBGlHH0R9e58KSw>6~7(G$lCBKp=)E5H?|Tm{Dljwm{G_;l5#W% zw$RlWfHQ#hNv~2bE1#Kh0S&lgKs>%8be4uHe|yR6t<-0&)t46n7fI|W z!@SOa^^Sz*>knKn4Xem&jYu)~WCt?}P>-GPK}8>ko-I6&Ufth2_FT9?hcoos^)ABa zBMr9aheHNl?=;%x5ocSsfWnu@*Dg<9x-@a|!o_UdOh3)nz!S^08%ARho91nFc&!U8MTJL`B*HR4Sfy0mRpPRZ14hnn5QkeXFV2Yx{3Me zuDK{?rn_cr0D9M?HSP9~eRKKhSXWQ=&X(5n51ZA^L$1rNeWHPg7G2kwhf2CL zO+Hwomd=Yq7TWo*5HlYc`6F7tu;+?}k8!dZU=5(`oZ3&rXs?C%?2wh77rl4erB)`m za)dPrR(O@TNja)A>pWF=Ug4<%_yWO3L#Uc@y_!GUbL0T{`^@QApYMMCsnZ7^J^k7n zU2nZZQ}EO&s;-_ZcEq&escEI$Mgz$cLM&zuIvX`Y8dS#yz7g`;`~oGVRR|W11%i3n zn2H)9MVxHU=1(4aeytZtoxD)p$unbkRq7O3#6P zL(GTNPZZ9o!<@T>yy_lhpB?3{A%{ z`85_S2=*-`*CxW7+gRT$;Sg-24HP!%z}KST>_( z2A5>eHs+SiCh(Ra;F3P-cZm|P{u@*=!*eE=3cmVecSFCYn31?D zTL`I;S6ocD>k=KV*m7QSej4C1Z~BjFqhT9aWB|kh3u?7cPAQ}E(I?`qVyvN)mq&3s z+bBC~#7$c3kgxWcO6SKWY^H;&tnu8ieP2HW#P>-DD7e5`gJw9d`y}6hiOxM?l$LOI zkd2P9K40jtttdPZt?V)H4kHd`4%*nkNwVeeO`2`hcx$R-3+KN^rFEFENqEuky;HqM zZj4brd{B0aP~NuW$#uJPRa-K7IUr;J0uamX%0VCgOK5En)qg`K*=qFs1@q{+9hQg1 z(O=v=vXmQPYsG>vZ2<@dj`6%&H3;^Aa0S7YTZl_npl#>^j_4TqQZDISkeQ+Lw@cp~ z+V>Jw-Q6`UL{qi~g%!hz;s~1OT$UYBxUH%R6i)2ezPX^F@A#=NmM)TuV@MgJ%2?a0 zePGe&!aJN9@`@MBSD86_3q6b60$xW~8@nQj{Mx0-jMKSmZmxIPqGu(JnVuzLwr_E| zXMVZ|(bqJx5X=hT;6rBya;2@iYJZGo%m*i7C7ob)1L5GC!<3q6NH#X5o12nNjfqBj zoi-NSy6@Lde7STT*hMM>^&nwxV+TzKW%w%yj4PuX(V8 zx1Js8GHKcjtuKs)kdQh8U-JtEJ;k6=tPy&09S|Y=x(hU~XlD{C#deahg3ES1tsPKX z*nMM7nFWS2>O8ZFMSG{XQq7$1J^I`$Ctlp&@#;%`uk1hj!joqXztQpL;r=5>M~@w! zInzJY-Ydo`Q)r2n1BaFCBd}-`)qI{+nHBayKg|uJqkr`5z$po4#H%E;YP@Bn{~#-sS$$V!epN1TgCCbL>N@0lyXP+-ssmno=Wn=0&)+#Zadu>?V~ohxIY-?_$g9j}QpjsK+cO<&t|DkQ z^Jmp_qwo1T)W$hycKpj9{T4hI6l=8Df#}7a(wd+|nT9$bAMDHfUbwSyPsP@fY%z1X zbEJTpQjv3AvdZQ)MEWaZ#|FVR&YLQ5#FR@7f-|Mf%m~9LC&%0(H<0vWkUrLwhdB;B z&_FzczpKr6AjGVNLd1C%*pl;_?h!RYM%i{&aL|@VyUD9DJkqoe#&3cd`g%)DZ~>t#J4bOTk591*gjM zgEPe15XIJcLS03%1j`D+UUlVKYK+D+lL5<&NOED1Q<3a=1<+lsA}(w-Sq}3s|3hVr zqwBn-j)lC}Y*an5ObA28@~%zI6PjNTwxN4y9eHCUVn>e>k_G?@>RdZd;Vju~3#haQ z_{*$eLM$>;O^@XJ|w)Xn7c0ZHu=!Zp~aQB>;D=&s3KM-G;b z=26wvvcCE>LKZy_L*w_w%SG6l2fN`_tQhSFghfa`UW*0moTL16SlGnR8U7X%fG>*& zKmR<1Rak3j@=>JR=8qWRuNbknGDR%g5#Rc|_1N&ual^@|E6vO&!QcO%ulI0{<4U$Z zF(XkDMT*|Qdk+vavSTF8Xy$38k*7q7(#RWn@4bWf-g^+VqLm%{-uvSFc4K4zs{Li2 zdkYO|-bQ3p6dKjl)!jgydooX+yxuz-g3p$}F7F)YSC+sOBa;haH_ff%j3N7G_U=b- zyNiJH9*Z?=dT;K86y&~RpHc~%usnzgPtqpD~P67EHxZ~pAw z)Lm8r+&{(NFLzv?b^Nu=v%>SwBBvl@(#e%tzzz9olVBmHx=sj-9hA|($&uT#6AAu0 zZ&m);*ek3SgOtm(i80Dhx^cIi^uyq`A%UEvkt6;Z5bH{G4e@}v^@9WMB+NG~X^Cv&wRkveD{mz|DyFRY{>iv#GhuRJtXxe+A``F3hx@IPu%Y|~Fmbv!L zY>fh|BPrsqv0(RgB%f@ES>H#*;e(c%RyH_tUdXQ{fe8+_CmOLeZfhV8!Er_mF6%sj zWeb0?YXV_{hY&!s75?fN zmne9Xk+m9W&XHUt!C6bi5W?T7=3#>Wg{fv6Bt#^|6zVDbNRlTgtWtPxSaY^Yqh%bK z<*Th=y;97N9zL8Xb|-lgQyrLU>Vuo2+|9=1gB8XUHM`TT~bX(ah(>birk$v)#P!tNM!`s`Bm_s zW)KL=m~JnmHYVtqSRK!xA`5zjM$8hKP*)CJkW!{j~Tw=#6^}>Z6^*kFq zR~8SGi4sKSQxb!|Hu1I)W?pQ`J?Pb|mKzt9co~1fM@`#?!7F2bdrHGGriYKL0 zx{yNVIq0kHV6d1Aj?7(M%+@)j?hI|IN3#ag?Tc=XGu!rG+`aR{F8Yr?w)qQZj&+|n zRKI%{f#`I!wtnDZgF1F0AcU2MQ_wSDu`||K3+@|iv~;m>*`e7PYmh&g@Hm85a)&SV zcytY4vfS^_gY0yu{89+E0EJ_?hoo>bDz35qumObx6j(%9)!x)RGf$y348tX?Ozd-x zv=vf|X-Xw%<=v)!22~KFO+%Wk;F)J@GLqVPs->TZ>M5n8X`u~>34?LfV0I z(@h1DxuPh2Sz(5S<}$>=o!*_~*<<`F-Lewb$|YsFi%PN=7G*zDoMT(t$y{8L^_Vb?I4*hadtA#+EiceMQ>BlI7WaYyZ6dJ%4@_YQ5lV;tdlj zD`V_vD-=$8fib1L+A%`&@2#H6$@-qNU+!zzdmM|lfpgX^K3b3eST_jo#qglr3B&3pm*YiMR~2S){bF;xaW5ac@X4<^fUlR2+1F=fI|KAgQKX)@#kPO^ z_P_*10WoFXsXt3KUpJxhl6v?A{L=t!Nz@DSl(8r6Y?%Z$HF@P# zd1i8@{F2-ng}?IOkQWw5u3E69GNbC&=xx@GWNuUOHzKdCa`|^7^oaiNZuU&!DR;B` z^8KNk-;Lh=e(c`8@mp3P7{5x>*5!e*iS9Z70s0?x2F_3VPPsc}A!k!@I+ z`mEIFipheck=Ogo;ldd`>O!XWQ-*Hw?%>8@G53-z6aHev5w68`E_ zZ9aDPvo^yv=j_x_P^V6my2m3FcW0s!{^7DB^z306N zsMcz@LSDDSvI$2DW3L{mQfD{5Xj9dd-`bJxb|qAexoau1l#cc~vUdrkdQHbFv-vh# zV%ZbLmWy^)3k5a^c0Vfs*YeMPRWuoSBe2Q9V6r1k6qNg|pxj{V1k`m|jbo ztKhKVbB-9AFy7LQ?W7xVuw!Y%);-Q(dgj7U%v?Ht`nMa33)eiFvm~u> zWg1)+eX=(ktyZo$_;P@}Hqm>DMb<*1EaxGjG^%j}j%BkCtXFO7&L{ zYG>D9L3NpGgu{kKMurjLjMY_{YpOD{3zx54nD)S{&%TJmJU;W_Z*p^(a%V_PY|I}r zVlglpdwvJWJiFo5AUFBmGj$ht9!3z@bDnN-^BdG*4Hzlo!fI}KWO7=Ga?M`fW6>=?X@?9+kRa=fT?t__b7 zHX?R-;`+!WVPNNY$L%`70`Bd=L4c&#Qsks{iYhP}Y3QT_sF6w`Ddx`sxG{p(l%miI zx5iO<8{qt+hEcn3vy zN_jdk<*06&Q-%45jGWBz*LZQv^!f$p+J1J00e}4+1*9IB=KInIs4EZeRGm%!J@Q*t z0$hAbXl-EY2SjcdqzFU9JJm`lWovM6VMZTTrXRMY3vWEZV*XB2Yr9d)=UD6zOoG(u zu$ZsU>DnGw_9uYbqw_wj;tWvdISv&#u-lgu`m$XiV?%(tvC9Y-vmFP4t_ z0t?FJ3m*PW^|RN8Zh#lY96OEY#+;ICD#9F3+YijQV-&~KA1Chae|P({#ZSHU%%4Ah zjU}ApeTK=Ws|v9i$fP}ZmD%oD(Q1t{W0?z==03KPTj>3Xt56TJ z=wt#(m`f*?%7Vc!4{KEp9kt#|JKZ-n)oV8pg2Jw#(6>GrL2NW|QWHlBiz6aiM|P4o zc91nL3@#fq`?LJVP&EjZWoOOtg+?&hP$;J@jaJArd(Agsaf*wV?sd{)Ndq8OQg`Uk zDU#3ocGT_N-n{qYx*hNLo;%)i_E_hs!*%<20Lq=Gj*d09jMO(#^EGG43$0fCbr(t@ zuhh}PVr@lWwI7a=HHUW)OOYk~HQAMyfJqSpqlTp3!4z>5qHQ#m5T*yobVxxnJrq#E}IDT(-=%pBda| zUCRH9{mb|arkSpe*`BW9rb~PNvgHZuxqYB$)nl+!%DbeCBlETYi!i~#Ab17d9Tr&& zE3zLo2+m$0{AGMnLB=A|)Ye(-a}4=Ef>{&n=xk`A-L^fK#W~~B%GUGY=E!fZ_k)}IGfv~8BX@HIvoiR%#W@|HK>j0=&$>sMck7^z>Z~# z<;2i(j2>bJ+VG~~uW{zo`@in_!d%rl9fCuq zR~!@kjnEKu&N`>CNzYQ17E@)C=K)&-y4iX_mzPk;pX|hHtn^M+lNEv+RUe&BWzgD> z0O!mxWU9kr#y@0MHO8zx5NXfe=(}>W-`a!`kD-zFv3=?Zvd; z3I6JH2p=gQY(!7{r~{v!!(SfM5{s(}JK>uRO-%8Nw!=78qEYAT;DtD$zOPXE4P^^k ztqFPU&jv>|PqwJjO+CM%^B3ny)X||A<_c^GeG#FypugStaj0paG~KLe))iEk_&JB> zojisbSOC5rrTJ^h^YBs*b9*bZ3YVlk^u}|WZ;#!B7*dB-##ka}5YuwboyLE|>a=5+ z+X(q&3iSWF^S`#f{2`E-zq+z0dwq4uGwUm#EiHJWD0>6G&>wC7^2WqHsaCM*;Is+& zvg4<9`KW26QJmT|ReA@0y!p-6SKfZAY6B2Vs}Tuj%@n4p#bl*|tjF*oU+0N1DI9nmfSI6UB z#w`5xVsYfoa!?Ujb}g1`p~ZYn&_!>Q963@nSG=483Lk#!;%7}R7XFg+hfJN@v*+aY z?X`P$T-v?8apy-3J3eHrjKzg>Eys>r*uAHA-~Rsdjl&RL$4cs|Q7F}-8NPDKNSrq; z-|J3~$ddV044cJ3@9;oVD_qsG!LE58sqH34>$`|lJ(ciR3_7NxyIp+cy-1#g4oIr@ zBCq5EyGcoxSK;Y&6SEZ;cA9LsEPBfpNf^Q~^%lj|ejYK#VmyI&F1ZQR% z2PWHkDd=z#5Zo`Ln+Lxh9wd(K{rs8r#Vc1ov?hIFcIIPDPTEP=A-cv?SE ztm@j7B@8+UU%oCY%(f`VUgcraz3DIT7K3zZRnlcafoW)}UX{OoZBfypjI0NiU)poz zU)SymPnB)fRywQM?x8C^!>FyOMv3|Ax2pf%8JNA?I(%{G@!H)ddrvnHo^Kzm>zQaI z|F6ogeV7t_hO)2!zX)(=2ZlWc8KKU=M2o?30*3^1`w=cU@`}yLNdvK~Lon#ct=N*K zx+llGx;ORs#2j&`xHSZkN1}?ecGHH1^@kCJ+|mPN1jh;En>xR+BLa${hW6vuluoxW z*%*b3IsSSkHtLanB2StVG9|uM;=hRWh#R9*#?1{-?2W{i5Z#;ppU#>3IA~~Q9U}^q z(nb#nc%rdI_o?Q60p$pxQ~GU_#muxG=T}g|`8`1fe*qs|1*aaXIeFW_7053@oVrTk z)3Yzg-;P!bf$4kxLEi#jge|dRtwFD1&x)w69X)RQ;luGyKH#sdreglt^Y$rgHkNaq zaxtZSsPnqpb3ozVE2fL?y?M3g2BM(Zu4^Qm0T=KBTKc!Ie_%!orsx&<6~%caL|J}O zX&x4ox#nV#pJ_f7?tg`f1MsTI$|kBa3m2#T@{2b=K{-se&F(hN9Hq7rV-;3vbt=sj zzs1z$pSSp ztKBn-D_zs;#xE?p$B;OIDcs^Q?FOMCax z`aua;(}?K4^Z`aKJ#~yaV$@*0nL}Fdt&bpd$P@mu1+4ik!R$h-$u_nA1b+i3g_gz; zE}O&)xtHc2c@B9XWW3$?!|@mLOJ9;|xL6@2$jgrSFMyjQsBPb8D+IJ{=^1M69c%%D zZRP?Gir6~fCYmeT@v^Vo_KkKmd5|z^hWR?@4Lny2H@ICHOZ6-6(CfH*1j zhAcElDlio&r*8j$exmswUz3KZZ{737OV2%-nYSh_b9q{6#)7iUhXUKp zFyqS9N^V5nbR<<|kdREXj-_NQ@S+HKkI z&)#N2r%8witbxNOck*zn=P8jxB|EAY%N-&-ePGw6B5<>B{Cdx*G7=Fe zUH#QNeY1D^>%JL6Tv%U(KK%H}PZ>lGPY}wJ>!TJ%C%N%NcBLG6)OF8UbHv=Zvh=QX z7lMb?n)29|Gg1D8vht1I2?D*nI)62VK}sH>FzTS$-fQGA-6_%hnz;9p2>s`sf3%)$ z$zGXVQ&Lk=P)StfmBE55^AJy>g3fb0$1F&tCh%5d7E%0Nmsznm?U8f4Pk%RkUtNme zW2#V!AK}HR!EZ5zzBzLBpSQo;{D(h3Syo+>Us9HpWpjk{va1TL-FWdY9yt8ffp4!w z{_Fj}2@3xdf4x%)^uIHD6BXH?fBo{hf^|eiVJIS5f2?VqF}GLSb1e7#lz4wd)r z*{ex-75?X3SPWNaOC~vqpPFccPU7?T~HqC>H=GkZhtyQ+PxJ)Pq zB3}{yW=QLu*|F{1t{t^|zaX0Tf8Ma?6M&WIs=e5D{%p&MW0wx>JH2gt+wl`rfqO63 z_SMz)*VpyeU+S&B&{ubnz>#CHu_07=A-05kFd7&ZT#vmoIb?9q62k7$5w@X9tDLjp zuXrw_4Mxnbk^zMvK3kMj5;NP}1o=qwTMycJTZN}T0beJxHpxgy%gA{XkPSP-tsT}f z+|o4E+N_Yb2JNLDM!2lhYl5F{O2C>JXd)}e-xLU*%P^BsvEr}euT$VKZGrtHgqv2< zb+L4kbJo=Rn4LDN8-N&|ON7!eIVGBl{YUShpX}(GXh+E2OHfC)7i}MfV*Fp<{)-pd z{mHt@+_Y5>r4_79D@%X4GV4)%GXUbs%%wzi#*(V^$0{-&D^FjHgBA+sFe8b<*(%(4 zLw!=g2^D0`K*?SKt;T%A!aX+_O1R<|3o;*BLFWITH|~=e#z_N^WlN~H=yG>AaizP{ zqN|q>N7jm+$-lRHXD1tmE^I&9alEPbOgj!RtesdLsoKsv3K=#ZY3;4PB$j<{ue&{C z`AD%(TjdGmcE!D#9blKxJ)iqIJ9;GKqCLYbUVLj{(!<@W#P7tmJck^?G-RrQHsyGQ z3xQZ!IWpj{vZ;Dp2omsgc$5|)sjW2io+IsG_kIj~)0wY0rm#}lV+Ta^x+1@pJ(jLm zg06_a<2Xf&M<@8s4(e-#r?>(1p#Gw5>fX>*^w=iJ_G7|%T!RhZrUvJU+}vLM>wz1@ z%|SYXZ<-YbZd9I02=6Fhv8$uKOBakb3A*dG^evbq%qLSwny%lvPRwct`bPBWnK-%M z+H%ck#it>V7FO^TL7Ha>W?*yT3Vi0nN#9Xh`&ev>deP}`S6(or|!+(p~^xvU1&R5*JD{>XGw& zamGrF=!t^#f|Bf#htnQ?rsg+){rVqcos;OVco*XATC2xgu66f_w+5c8?1A%25lP`` z=-y4yUuhMjPdjc6TUGx}vdvcT^}gWveT5P*k4sZ$%Gz;R4gbMxst*L?Ex4=?QeqG9h}8uxwJdf@$*{aeW@8~$FZ z?YMBU^X%ECW5>?#+D|Za2IVup&;!gWs;%$6R0kFlVz}OH+s>`h!u3~y!(M7;hX);h zttlzuuUw;TYZe(aF1e=%LMh}@(k&}{Yzb{-%uS|hj)CDu2(i~x{1yLA%_j#xNm9yg zBk5FE_NQc(e$9Fq_%k`y#KXTX=hBZ~m{9s2n{>Styy zb8zlX_H~{=^5*Y1l&*a!>#;OuG%z{=X%@&U@CAR%(w8PqlA%XQ>gdcBh^UjUB^sAf z^;eoK93~EXHalzCqE!nYJhE;3-*2JMq%1NZR1I72@SX1JA>@1|Gz$3Sz?KiBR# zb8*+H?z4yuI?Z^^te(wnQe!EEza*Xb_Yw1*xAkIfBD-QIa$DV>eO7{NR%8FB=~ubQ z`_13*lskM#T}Cggh?uVDNnx_eOkU+)4fz_Gw5Wu6_ug}hilSvazb z*@j?Vx>Q1*oRr7(Q&KrEF%y7eez&w0Kp^UL3ver zw^T-)1?d?HeLG+-B?)|w(VhkMd+0m!*VQZ}_u|l9I&8h$mjsIwVVhqWy`X&xgZpoY zAMtm~U&23c{q@+kLwU=yHx|`U|En;lzBm$c~qYEfh}v{$t-;| z?a}6wjo)6rPofxR8oVXjrm@$UZl6tJvSXG2h%wFJn7b4AZcp6*-NrvYQSwwpW|dh< zXF;e7ne&^yEPc(RD>`dC?~u%BvRj#O7QiJ(N|UdJXQ6pHBH(Z9$X{o_#s2r#mCrC; zG=FV=aZXWT7XBbPdFk2eISQV8%BMO0>g_B@TkyN5e@8x?l>@yb4W5$RQ;P z08KGmp|DVlEkEjjsh&xKeg82)p#!31SLVG;^RJ7a?U9J8R_&EnPuSXk$l%BknJo1J zdm$q7U(sP>!JsWGgn7u@H7#lkJ67g7K&=ybO4YReLe-)6n)*cMr0Z1uiO)W{w42V~ z4;yxT(7by~>)!Wz8|!-->bmP|J8Lg?o+#Kwops82(uW>c|#Z)6;{2&-n47oL!o2H`fx!{6HRHDg5xjl3L_16jCU;sowLKY zAMXlhfv?kCXxhpcr|-x{+N@HI!nh$2o27%L-iI|mkNvDdcl|arU@r3 zyu6$IR!_IBw9xIn69oGvqP7-ey^1XcyfDNLVE5zI>$heHzWng@6_u;fmZhy)^=Pj3 z^TKv3{+1J$dTJTGCGa(w z8Pk)IauL|q6YvOhR?7ROZl$EUfgpUq#LrgV4Y_4USkacmDxuenI3oTA-o$O$6OU{a zASR?Zl??^K)rh4%qyE`CgMtb=;BTn6>c(#L(|s4CC&oCdqkzax!6I52az!6w8z>+Y zPKCVG{du|QfzVn@YC3Xchf7a8C)gdHG7Q5N&U!W3U9Iei{@mo?KzPSy0OF9@c1x?< zLpSDvHWv3Dk<=L|Pt9QKed%3DA(raTtb8@HF*94HK=1y{$`!{CB&;ih+Ctvo|LHDd zf@T&@iF{6&8(2)gO^{TkG2zH|Wd96l6JVNyl^aifE0Hrl@;JX0riYrf&wAz&J9)rd z>p}_~%7$ICCKU9~z?H<{xJx@_GmPL4%zON=+duBw`g#7!jK@o>Yf39B3$6ZJl3P%e zS5TOnUzD9kXq=FGfyFMfmaYXWC%Q)OU%q3;J{>G^r_J8Vk$0wTmT(}R(JUbV0%(H> ze|P1F;kJ<{%b$8`-6jBCDrF2kh_#O{%UQOD|F64K*F+YCuuiLuS&y|($b~ZaQbM{B zXtfUh_3Ag54xRf|#gk7|)RfTVTaa6jo0XrFnalizZ1dmH6em@w)Dc{qlV6xc{&~S` ze|!!6o$Q*#NC8VOIdrLKHt4Thza(_imJd)`CgrFN$b8N(ZO9fq)7FvXtx3tJEyAlU zYY8c+`AZO3vjgB-DD&+5>i-p!HFz~kOzV^n>g$wwMtb$smmuSjJW;4Ev<)xkLM$G) zC4{TiY31uWdGXw*AGhw^-MaI``Y+zC|LWEDz3(t&p|`QIx1o*{boa$`2EoUVHXPV@ zao0Yk&$Jx72%V(QmnlVIE(3##>f3~dI5H7^jf^8~m+}hoYy4M?IMt&vcr}|FWMsSa z9N9DnbCdRI!qP1#x-4)%QFZ~r988EidpieW9zO>6F|C27U0e_4_Fa7EUF^2`UzG8V&&QA(q{A`E%G!$?1d2W`0U z@_@gYODi(yLtJU$-M-~(a@Q_+Y|VrD?WYI-``$mTfCAg4Imn$(6O#>V^6elU9r}Nh z{v!@!HAq6Hx$3zHPUH4iX1>Wa1FO)}kF6nN-HxT4L($vak%czh=o%OQ$pO*KMdsnZrCDC)kr$Xao;>erS4P&;` z%E3G+HI8jvs}t|TQ+LO1F?{fo*WRnnDSo`TYJG8eWnNKfZUKQeab z?Fw9KYjShe=jCQDe2f7Kbm}1z;I85~BI}@3PJ66<$~-51j-gzj&F-&t6fw;A(|3-2 zd4Tze8;dK-;OP{9v(wR8F?KDet+LnnPCCV3y_cfw{Or{k52h{H{>hGi-u_RJeY|MO>5W6kKY5}}u{l?QcNKeJCbii)fNzESzCYA;akUy9YZMM-Y-j9`9s{>#sr z_U>xg^-0~A?>BAV+`i{MO&|gj`!2PRWXAEn{q)JEV@H|}F@5#(v)gwJ*E0Rw2MEdq z%eM}N!HpRNpPQ6aFCVIw;RaBc7Ebf-FgOmCVw-?~EryA;+`%JuEwyqPRyeK$D=i#4 z?Nk=Y(>aMZ`3m0wXd9iN84B7Oy~X4zh_AdNY*;+fJQtugFl|l2jf^@P@DTrXAy$hq z-{H0vVx+y5P`{6q&#DN6k3`_4j=3Q_!2d7(xV8o1} zSI-eO5esL*;s3(5UP5_2E_NLD4XmT-G-)D}TiYX?mXoux1?=u!xp{l$+PxZxrHj&66s}!L7CK~`ea=Ea-i*~W{8p#0tX#7^ zK`?=fJp<1bimiAq0SsO>O8lGgl_g(jp@4Jg$G z?Aj_Ja^%9Me+F3$edcG99XjXo#zN$9O!Q>7i;UJ=02Y9Fcj!7aHYAgWj0-zNnr|cr zc;VO>t2~^AymHx6rI_wxBIc-#CKK3IX-8n(%|we1}PtU{;wz!H3 zWGc2^KDL=hv3hF@xu|Wn`O*C7F=aXJzF@2&a|X7tAn2J%pZ$?Ehx(3nr1C^ZVFdps zhxmT-`u8(8x4iJ@CrYX}7FKL1sVL8}gfk>p!9JekUcM+-9w(^qCKS-5=7l8p3aSvgCy^H=0O zoVI|{Gaf!yC#FZbI)^(uhC6ykI{QXDM=d(WAkM^4>+mp>7g|R~+J-G!2JTPa-uLP5 z^##>bs4Ez(kk5D{%%2f$t|-V_N)%=x*IdoJD9A|HMBviQ;@lP43)2=H_;k;I-}yU4 zoAe-eDzSBGlQe!v9n;{cBEYC2scEj@m2+zw?-&x~2}AWJI&-q~^a)rn9$9D5VjVl) zcfM|@);fYoFAFh&FoG^5Yeb5Zf<3ovP8uL1L19<5T?bIF6q}7kaMIv@%c;bvwFVeS zQC{&BLca~zEd-Ay8=T+DyYSo~rS2o+4WtsO*=e%PW~||^8fd*d{8>IDNuHKcQ>SL0 z1NgReBH5BJ2PI(iSF+atg(sVP)R`;IvpVVGBnA8`Wrt-+>7NIBEg;e%*xc0RLK(V% z+v!N+zs$Rhj4}u28+2KW&NS#Z?<_^Y;@CS64R=tc$YM{N>FT*NHu2rfu}dd*{o%Q% z^D4ookZdbEm1V&7voBsZ|D_acrjhW73s?eWueJG7q4EnBrWZ! z(xPkqgZCz;V&>WUd7-)id=rNaC~&*E)Oy>my9Bfk8BOjSJiX&U?ZMOSr|Wypwf0{` z^Nhkgd}2~)h%eK04+Jw!l@Q~w8>1xyg}<(37XCW&N?WBYw5od~rBxROG-KA$>%yK< zgc-qt$vlI7VzNSEQqYc)=C{l&NJ9KqoLA@3eI7*kx1qCzJhmBX*{KYIEfF1DGQ?-+ zZJoX95g~8fx=bsjs;grJyZ48$C|pTp_!}860_k}nl~>@}?szF_H2CYlYT0R_EoTUq z;pQ9m_#nb99Ts%d_Cj8_Ra%&RVI?L@#Pv~+e=|wY4ogrH2#)ycIxe3oh|8s=pyjy{ z)++W1=n-(%=ySkdq3zt3HC6mo=s^=ZPPBO{4fCTkd#T%?rS~GT2o!oMyOX8IeAj2PgVEfa`6DOz@AzTr27i+;J^AdWqDsss;g8iOdoF+lqkr~63^t*?Ca@^4>%e#7%mmp@&VUisE@Z;myNpwZ<|<=fF~-;PavGdg*9 zeDv1H(9O{ShJa7?c24$mO!f3mcK4V~C0wI@@Q2wu`##>asj#LrBd?}_{v#j$ON&It zih}f|X1j-D0BsZ*Yb>&I3A~_lSL8mD_V9tv_y2J9JIiCYP$>&3Xlv5(nS%mZwxpDm zRrZ~kscym_aXuR%*e3AmlMguSsGT|)K_DL4?%eQSGRDBJ@K@g^Nol2&y^^;}>(t4vGsinl9c?|fzv=Lvy1lzE?%L6PvUao< z@HNL~4Ic!vD8*9t^~$d@!={BcZ>7Y9T00#GYT@ahyeeMOd=B0V)lk z1D%GO^Z2j(JSS%s__BrD%0mNzUksIcqY&B>)W(8Un+~~Xvwb$C#)uNLM~^<<=5RBX zS#Ag2f*S_uWrA@4Axw^i9hP^>XsbtnLUzPnm3f|N86aXt+AS!3QX#OF^#2M^g{|RP z&XGZ@q+cl@ue)i1%l5cOa)8lf*KX+=c<#4T*T0*YJ+o_1_J)$QMQJNCR<6wrRhyuH z&RjwNFM=!6Tv?M*jI8C5WlL5Q88&vXCVg$zf(Mr}ljZo?|M&a9GqHENYrujlWV}Lf zDMs-cy%hxDT)(t%&Z#v67sFS>2X0?MD*qKm>v8|I!x!p&S$GZX zCVB7F=jyk*!qmT+xW=>0Hpyp`+V)Q<@Pi_-We_f;a;3nSy7_ z*X5Q~=ae%1h~^_;w<^D+g65!H8i6gC_FI;R`E!nir=}lFOMCXIUwwc3)=*dIcYxb+ zS2x>3s{&j8Il63FT{z*wpfKtS-rzl2#)hWukBxsnHT~`6*!^*y-u9PQUMgN%`BcGg z-+keOgC89|z3alo0}X9wyGTA?Ii%38Jh#XqFPFu9VftF4Aaf0w=iTV3)bBlb`it$SKifBWwgsUS`0IwusG7sT1w?T|VO4~+74pjO(X630I>EL* z1c(XnMdY3H+0f))VX;$YZ*DRw?f6^Yfg3P}{Dh7P%-5N0lwEnY*|X46L94e2^^}8b z;KE8rC+^TvXHogJaCa%WP0EEMnT161Vn^T4A7Kxt$VoVOx=au2Rcrk(~K=FS;p#nCmQ=L zC(v#_*uq~i3+4Cb;jgAGc%4_BM-hb+Bc_0=srUTU`>polvmAe8h0%WHa;(T(O^6jd z9|C)w4Agi~%}PwvRacSajlnXCp$Q%4Y{K!te08LBS?-+}E2s8EA?*LO6)L47uO0yt z2|9B<432@VpM`KR(oAm%vN?nE!j4Ih*LFg^m^Fqa%n9D0$TB;r*C{mQ5re##4dA^e z)R=@nKl-n{dh7TrZ51ro)9H52`t&G~+h>?8Y1m15N1P;{D)tD6SF|(pD*`gkD~9v< z@h11Oa3?OHKc!N@@!{_z<*InZDYI2EQW>$7)p1LoQBd{7{A3t^!wne?Em=zb5pYCr zJJa;L4ZkhRD6PmUuP&-aLR_9#>QU?|=wjGzc^+Ga5*in#E?kh2{b1TJ-h1<%A8vm$ z);ok8k!F8Ysg&W2LM9b|NkKy=FIz^sbHd#-EK|+#EW#zuF%Zem=f?5U1?*rgXg|FdGV|Ljk}Mw?fVi4?%2Dvb=Q`I zufBQYjknKy`2NZF-#zxxmNQ@cM-|m(Jdz*LfuKnt(WAA=;@v}YSb^T+t zHU|MKj5zJ67yOlUIUuH&cMaNNK#KiW%0D~vZ7^$S9`wgOBOT@(nZx8j11AsQu9=3a z0`n9LPi`limmNzu+ZOiPo4hCptGsdC8vb&Chj?zsXiG=XSvm5m=a*zl1mEcP+`<&3 zplwdNifpH4P&OU3Cb7ehf?cT!{YNYETKK?))T}iSoB(UUVj;4laPkO%E5B$E7Nir; z<$nrz6;6rssv~%!y_-L`C6D`ciz4l*s{?AnBmE9kF`cz5P;$Nnr zU`7;;G+HWXwlH51If;tw^kko&-?H@OFyE@|m9%bsGlOu#n6I#vv@^AAnu!RdpaX|> z=C018Yn{V4d&WABp0C?|6y+5*Ye@Ahvw_t{Q!k$$NNuTUScCudvhGc^E@u7LH+G}f z0?f6*ka=K)dANkho=y4eJU4Knxoo8F$;t)`DN^s!ss$(MQ@eHh9a{`n+dBhO7K02U z7`r_*Oqis~R8@vb!q8L}nP3n*pd>I9YNo?u3?jTeG-?t2oadTuVqFz1PO`g3O9Ku% z2B`uYYqe$)m5P}BHn|MpW2hbf3}?km#X7YQ#&N1p7Y2D_^RGkgl&)F+)uBf?U}7M! zMsi~<+X?TyoV;Z!++-Wz19ll)=(7vSFo%dKXggatGR!uzRY4MdX7ejcK4F7q&)Dm` zNWNdoX?PV_*cv8okZ^_q2N{?~7+=FABv}b02rfpR)5KQCV13356^uF3&GuQnnFZ7hjs#ivGvk+pL3M(&jS#HMCyv!B(S>UhnUz(m)EO_kk{Pif}$bgZG zxI&tKJH_vHvYH{h?Snghbd zoHOKC*=C+gB-&0pZFd;BknHFfGTYRqm2(iR#~BSI9LP>v)5q1$aY8w2Wuz$$-{`y6 zf2QN~=SMDX+gZPBZ__^D^|RJ}A2#mVy8BPR+4ItG_rLb+!8e{iu;q6L-~IjWxBsy7 zt>5o{^W_6uUO&Ed^QpgVIsN|oM>cQ1xNY|&0}AVViO6A1x-3WNP|0nufe0E7i0~;X z-D;m-=`h`OdtqR>tu87uTC2{0=_$w4vEf{K?G#RC+ud|X=w*P z-S)rle0!tMe67LUsJ0pv_!|C(ny;skHS@Q903ph)zN!AR&6qs59zNf9j`4z27JR%> z@Y5!d9%s0C=2@Iq+Eo7IJcPeeIGgfWl{-0K34e9J)xB5p8y@%DscD`zI}c-~s#`s17siz#aQN=c)UDz=FWh3@OA?1&aYUDWMvm z`#A?@O}ykKR7$HBLsJ&o*Nmh=L{=dp{{@$gZd*7G8gxblwWW(=EnB}8esMAE1dVh6 zHKV}c5$U@edG#*1W`}!wEnUC1tOpc!nCF>=hViwezw*fu;WE@2?b8M+=?o0uky9}PAE@v8Ag_a!1ZP z(UgVOmRB*j_cfdHAz|P9n?k)fC=F8e)w59=&5o<1e-6R1_w(;Je_(9i`l59u`K1-b zm1RXm#rgTA`S~Sz0IsQ^&4My7SB_GUUz_WlS&_dC{C)J)&eK0!{oZV^pt0`EY|;o! z9xROWriSv*Ve2fICwkDyYebj@(tq9m=WEaZ841>=(hZtKEaWZE%_1uDG6`zCguFqj zb2e0Eudm9jUYB3P@s@ec%e4x%t4EMPyEE-tD!;keOLMZ8n`z=-D5-)8=L-WUJhAqlOSdUEq7lp^s~ zVY+h8Uhs9RjUH4QIbRc5&yE4012O{Zx+V=I0+A!_RZ)15d@q4n zakSoK4tns_aS-obX==GD1-9hXWUynSI%O_)PEFhQ5mp z{0W71j`vXW_DR-BtFJXAMt|n@r|?k=47G9cYxtXFk4?N9G|#5R5_8f085E|-n`p6s z%Yc#$*Rzl*cPP*xGMUSj}Mk&&jgL&9HC}>dGAR z&H{_evPw#F3oY`pigGey&(Uh;H!(t@D0?+&=e+dg05OkH!e%0QWp>4i2bcfp@t3}t z_;$1j<<*1~+PXsZ=_KN-)E$0>ZW*R2g;gs5%|M=jm)x@FKst>=u?#LT8?Cg1|X4i|=`(Al^|Etd&cy-gk z*B(Fe#;*>){>+Y-o;~^g>!TMhOw`qm)VGW_w30w}*p(>S$gJ=;sHd8lzhL-lejQ%< zRqa@A8}hl*@rbOszt~}gh*)g7=#fTXHzc|t+J?m`ELd4h@l927 zZGaUPTQfRi`D0L0PPeIAct%DYx8HQh5 zPTLte21n4wuKM#IZGN?+IBn&Mw0wpbW3fvhUO!P{9i-#IN-#8W8J)sTA}oG?>ppBt#Eu6S-d z(lDBR#|7^RauiO$!glnC`*I{2W)Lt0&&m~aT6gJt*I$KDSo`q|fGkTm1Kgj4r=S7o8LrK|IQnqp zm}L-yhxu>FkwaXJ{|?|l>V>~uy~E0L^Yedt9M7wqLXy4Jg?acemlx!a=`GGnFLLlT z6w9^&DD)lB7|3X()sHUDcx>%JOW%#rt90};B5<;6SRob}S+Jq7a=8;-;|lYzGMmb| zFyqSh;csT{kO6qpi7Q$i!Uum12X$BPbniN%917B4@wn4?dP$k}n3-9qjBFsTIXC%~Xw8OvNFppiK@9BJ)nkMYHf|IeO~!w%rYTw-fMR z0{kTozfwvZdcFL}o0Ui2tUmO5KYu-l{mbx$@u}&+K?{{mHFw z_a53iUSB`a&@fzAPYinBu6l?Jg3UeBCNB)NblQmCMsvpke-TdGPPEMGYwRS1vp(X% zOrFgvqq&KYZ${X*HV-s6sv{VxOQ63(Wi7M~h}_hOE44knsYSe4JeQO&Ap`{Vh~d^Y zf+k;gxUxqQ{DlSM5KbhxAZP%vEC~$Yri6f1dFJUBcx7;;MD5`CD=*4WO$K(w!Re$U zkVY$^WH}|Ns^(~~AlU6StlVRL2UK_o^(DV!nQ1Lnw+bw+|2; zrn)mkev~u!jNF*H^7ZVsWBYf$@S7(o2N!29C{9nS%zdb;aA|qo;<9Xu%Iu}N%N|&q zmiFF@&;9G`+aQHB*jDhRf0+?Z*8EEgFd?1Q+!nzVG_e0t^Z7l8n~$HwXQS^@9oFJ} znf+X5II3!oq+>%-_||c(#{~SE2*Sp778o1t2}UEh(n_=Wz~!C zj11i!9i|i77gr;J`zIe4m*j8XGRAWBVrvy%ujC4Uh25db>|{Bd(^)`Rx`Gmh##d3UdrB{7-<{_-$naBQAk~kRR%i+ypzN!3x0!j0-X`0K z$a3~D{tLt9xAqf8=bmKds=g}X)ZV-GE^Dc*Q<>h|owG@i^tUiHEU6!t8%?)IPq zuw^v4YjnJOm>BKvA0HeX?(H4y?jG*$BStz#?q9jz+tBy>Cti5G?D5LnN@=ajb5|x& zRgh0@w>+z$EHhtQyO_+}lFV!Zl56zV=PVRuEe3zpF+sZ`5g0K;39}*5Ib_UA(;0r0 zw&(pF-%Q>?lASnDrsm8AxC!Pu^15|~S8~NP$s{ODV2lqQ(|D0N^@dr%;-tX~EA;SM z8xaKp8pl+(P0o{GZnEQNwr*}c;wA(53|N5pp178R%dN3=Rb9bSL-{)E1$MuVs3o&I zSP0Kc!zoCL*6llX;`3dM{UsXre?c@I_>wsMdI@p#t*T?2tB<|0;qa^LcK@+t?~7%J zUn?g7-@|VfA9}Om@LQD!U;j0vc-KpRKJfaRtp^TI)i;ecG>+6WcA=3_VHZ5Lj$p6t z1`g|w2zFJ^6(TEZtu4>R;!-}7$~kL0j|HbpaFFaOQC=apPLx-+1{wfg#E0fr>3vBY4-gA~6aK3_v!$9npUeUAwDXX6 zq-6*zIheA|)KmYFZ>N5azpd8HYjOokDO(kolmFx}d(hC?)LGyl03Gqybzh$S_NH2U zIU0?npHz;T=Mq!hZC{TKes_JM_59JzZ~jMN{^|t_($do(ttw?2_==LOW&Dd(X01VX z^?XI;jo!ZhzHxW9y;nJQyF5!88}p4QY{(mP(aby=y2Y69hSrPw4>lY*)^)zN_fi9Y z8)P$7c{e*v&c^zo)8;I&m<-)U(B7DNmgY*^p8@V9HZwE%1N4560%)mPQJ?JZTQx2~? z2Sv1Z6GzT#e+(f;q77(o{n=9E3UVCmykPv4xmGu}Nu%qqQ0>1FFG zjbT}6n4{b(g7~Uq>c-GbXd^0F^5l2MuHPNMLHpso@hi7SFaPJQ@9GYBKVJB|r^;R^ zUArlFRYBhB!s3h^5LQ#q=ooTK>Z;w1ofN9-3V9R! zz3jAC!YUYRtOA>MTEQC0*lrJc)Jf$l#H>wdZ81tfu8m?l{>DnQHAZn&2CS;>GFu^f zTXCHvw3P$bH>He())i!=$D4+0cOO6f#s0>9+nNvTXg>H^dXiloCRci8`_wl z*V|x`=(6C^-lA&_!i3t285}&;(5(pm8!oiweh%`6PTr;=(j74dt$`?%fU#HV8EI>W z|EBgC1$NvLETAE(V5>1-kC+)|BB4=Wp{hZ!hg4Yej`ZGJQrTWpo;%)u#6ZJp#5Pzn z6lAR|>s-}}#i^Cwgcl2Q1$Jx|u?jrb!4!@Mj+LN}TT;+QS6ivoj;R=-wP-3&pX%ti zJv#cYJJ%Xd9ISq#{E=l3=H;&~!Vx}qbwS>mS)BznJ+M%aA6#-RH!u;9D@gq4jYGM8-klZkHB%b<;uxF=}fWYr^$PQlU0)E zbV&8=v@^AUu$(8(Lc${&kWyCG6SIMWg-6IR1GO;#+*>0f1RuKvqZ1Hxd5uh)N|c`LyS_#a8BW7B)LiD*LJut`&36JEr!FW zDR#)6YRgW7zjne<9Z2Ql>P1qqI3&R1<#ECGxNu)M8x&cCmxZwUlSsC_S!tdV{yW=0 z%YEM4MQ5xj*dwMZacnT%&Xv9q8dCmt?LIf9O@$kX(#%4eIhdb^gh_fQBbzq=Hyb-g z`6sg_g`1yM@W=xTfBX0gw=RFfxPj4j`gU#3V9Yryi)&lm;Q?Gv;a-{PzV`jh|2p{b z*>zdZzWBtuZ@lo?hi~jV`01H*dzTo`2Tl!9x76a#rT$ ztRnJr)+C`!_1Yy5tXP6gv>x3wJD-g@Apmi-?x{D=`q$KNO; zPP|oW@n-q4*GrGSR!$syt%^AE#(F}!tix|TjZ6y)3=uxO<>g~rUu`;YV7j?^tbv|f z6@oPan7XhrV#W~;H8d#Xq(Rj$CEBp21c)KC@|RWy8|)_NGqUhjUre`@a0Y_GUm_%* zU2_#wSPk9Y@auk(4!+E~B2@p4_-i^VRe=K+PH}bSS4oJ&3WdYgtG!`5p(Y~vb5$u8 z7IUO$s^QB*%)nu<4yN(g;BZOaUMTEg$u+jq>8@hevmsD^SiIUvZ?gZ zoaJc+tJ88)_CZwzeae8uy=V zKUUXwwsqiAr_Z@CGZUrX;k+J$B%krQBBIX~lVz-?7UdT9Blkc#W{B+qtTsm0Yr!J9 z%1H`awI5lJ8zrf$H_qyH{53C#RQ#0?H#Wr>hzLO<3a8+&PkxvF*^-yWikKLK3 zz%KdRl5}pk$U$q+*7ydoZ@qfp*Ur`x`~_`8jLcjcnZZ^IxYP+b-f4bV>hN~BW`F6> z3(P*(qesY!ha7cmI8tTWpuGVw2N=;z^jzxRfTEcl?7HcUfpKZvX*)_~a1r%$M zK6%Hz+GYnvuMCd;c>R9oh31<4bsLK|V7tNOqpJKu%Dc)rBf&~kSctDYk-NM&YgtiF z&Y}eiUiri8Uthj6-9Jt0SeTWHTmr9z|H6WqMR;}a=HG7qYwOD&maJX3src8M%6_}C z^fw#xo_@UGSI?C_^F&U~wzppzsz23!p`q^h*;5D3e)X3f?{E2J>pLI6^77li`PCoR zZ}?ql+0!M(Pv+*-tX`V6e9_vB$5s<-m!##bSx}t0xHNl7Nfv46B_%l?*}7VDp=4q> zVP#?~0b8be`J!LstbJ^>v;Fd5KTRmM*!Ip}FOu3?!gjpVP}$nJkA=l3>1A)H9h~hQ zv{bOev6s~Sx2oq#fgDV$%*jfXUv1^Kf%uA0Z8uV2!`!f+G6h(-8sV(=xgLHDk1H1) z63ccCsTpRWzxc1-1^5dzgV(%A?uS=8#%g!(Kl#~LO$R=2KKMo3p{*?k-fcbb7IEzD zGUCL$B^K`#AKP4b?5%Q-ilc9qAK6@y#L>5_Nkkubt@_X#HAgo8=J=N9PJHlY{q~P~ zPd1EQ>Xgs3X7z>{g+8~pzo~)H<6^`ZXfR`a=wj#ar7k?Ru+f0tLWg6RH8iZ*(Ah&h zzv9u!*3{1K>MRzFH;9c=Y8)Vhylj>1Wh?y^oYa%dR`8ABIKZ#QY45dQ$9}X&+|?@K zuzs*>q*L8C0vEQ&kvGo;X6CdEw~dfz4qjL?%aZekS8Roq0Yh7?J{ur7%wjN8j_})9 zv9$`XJYZwC7Tk=IY0RHTg|kt41w{?|)R*ly6{PX4yq_0^R*qJ-%Jen&y^LA8#PR%@z*0;AG zKi_=hWY_5nh_CuDYOrsQYQD-jOTD0O_tgB4lr5^MUF*3_fWMw1_Ig}UUYTzO+?J4X zRXstf&!)O(fRBW-i{ZZQd~F^^hwP==QLRTt_i7=Le2i0dM}FJ)B>snv1gm3Udtdjo>Qnjhtxlzt}#O{ zbT;y9G}GQf#4O{90dpP2k}F9)r(>BjN@txcHuSOBszb9HUGF5XPK~S3=dVv*uf>b< zqh2n)JL{4T7vP^m^6B|$?NYBhj}~JMNE%GBj4$iG=y9>{kXQFCx6T~s_JB=JyfJXy zgB@FFDXf{-5mxwXiRd`?D5c>j^c=}6Dexu}0wK*+>!@)zfP)tGf86@E@x=Lx+(O1K zY%D8YSCn5-NRMrLN#5$x{59oyidAK~E6Z|LSUDB&N?(D6@?#4g{OFyJu1{Vc?_zmO zb;eEPV$E0Jn@D{Ee)UW+so}@3zklubuast&*A#EaUzy`v;$X zd-c}kp|RQF@$2I=S4JjhN2k7?x%TzUjjQ9cGb5J=JBL~?wbq@kJ9Fgx(Y+@={%qU3 z?}N^NE-EN2$?$=sIZMlOmX_u$w#Z&$SS%FQ`~}*9-P^Wc?W&~@EU3uIyEAzeE(s43 zF!4sTqb>ao&_ft5wcX@#-i`owwtL9JtG&RnA+mc+xxIkhq9b>P*SL$7Z*^ww{QU9Y~d|E)hY?>#Yb zsU35w;o7#5rWRwquE-MF27Q!r(Hv0ME)Odpc<`b@EqP`u88cJKCo*`-+8m?--+|gd zgbj8XUufbk_p9_D6Xxqetbo6=t70o8Wt$>_whXhJI(RTSAhr?B36+6V*FxJ|-Bc!< z5aeV>hY5UPloVS5Ukg`QiAM*GmUCO7WXvr)&U%$L;kllIwsNgFZ^-mY=_Jk@Q5Xo* zF;>{M<6YS7Y3K0VklD5_Bpv~F0}4Cbb7sF7xHi5ztWG=0LQl50gT>eSd%wRtJ>A*< z;h$gKkeh$<^8^n&Rt4%jiq{-0yb@jv3QsikVsh1V=v3>8^F8P3Hli3|4x*+Q zhpL;}*{a66Z>V~b+7S@cEZwtLyJv;J+y)JQ-78XAXtw+qYj(jd5+D|;>YhJGoviWG zMz@FK{ka^OaK_ZTckF_hWqe+IJmht`&nr8sZT#&V5Q7mG-bxdH>$vQ^*h{pAypF$S ztdSa#>Keidf+G;iZBpJE0e^!4%qeYx!vC&!$*QsR&r<3I=|FQKtkw^JAQ!1MMk?}x z(a9&HodhM>5iaXpu-ah(kWhZ>?(wn6S4T#OYr|tiSo3&oT?nWQa6sF@SPgvD2kkH6 z5Fh9^AFuTe3nEo1U`w(hvS!PN2W*u(+B3}YkwAeVx%|Nx)m8pne4nk$trPqW$m=Ya zHM|KZK7OahZl8#9=<<$ z^XU75ubhv2#X&Ej{JFR=kMP#;836L?_UyZ~Yfsa@uPpX|)VS}%=6!!@+WW!D zEgLP~-E```$4_j1^5~XLiX-oCAdYQiTLTIc@ZTewt3ctSn`;hlew;Y)#;w&o+pUoZQb$eGZQ zLS$*7k?xqehqVct$%aKr<#(v5j}Y@sAQH?~g*PzV;MI()l^I)s)cjUSW_Ch%uz0T2 z&TU=DwD{w+z($4$ARFw&ZQ4S!N1Qwe9IGQ~+Od@_KWbmuV{uP90ti)v0|oZM3SgzA zRsv1m3So`sN<)=wT|qs}F)@UOc4VCFFy=dR`D&psTYFJ|MOdwftynGpDHM4rN?z;l zZ#i*-Cg>|&{WeC>mjOm{+Xe_d3+0uGuL3VN7c2?^J?ENRk6&s&dVw8dzIELsDA2<3 z$%Hk4*NEdfHcBJnT9khW0;PlzVRsqQ)|z?yrcq(ZIwRRJAdX(p$|dTaA5hp4LHpq* z>}QyVBgzklei`d0wYC?_SUR=}Jv}e#RLddN^1yT1W^l@=rL#`qMZjW5WIao$2hb2g zwCJZ{2rKP$#9W${=K7WVFna{P9O2tAnzx|fL3=#B3zeZwLlwGc2W_sw{66A2Q>o~` zVbcbM#dgS$7iFf@e`OKMJ)*;Xt8MnjiwgvHrZCeG{n zV!&`#2PCT(zBu;zZGyb)B*>fKaztk&&DM8jUx6~=epS4SBsb0_aZY_Z;XH%aoCSMt z@1UJQuOW#F_56Y~{&8W;tN8dSh70j(dFIe8jw!$-{dza{;yfG zn&wC3H{x&7_B&1~=ljdIuMUmA_{?wq@br`G3ebAxVOLd?U$ri;l2Jr!7p?BC?Y=*K zH`Lc$DayWXYj(qI8^Y_J`8CK;p(k#n&kUEV-51-Lys!?xto-uQyg~;1(gKXmN}6XO zFD#fwMH+{#WhuQNX9>x(1$#f*d7Bkk3scL@Kg0od!b{cW*5!z%h9jF87{fljh+kGo z*Lp_WQ##bHTIu}V_ff~5Puh2Xd}_;+ z7VkYpoY?x*(JhZ3edozzTb?@luEnu;pWu<>?`$~s_WI**uRpfA2JrRRfG+FMYftTa zdDEU(pFgqnmG(o&htJiHz-KSo7{UHajkK9qxE6~VZW0lHk!&^gH`4ao1ge@BmLHSq z?QC|_wTQJ4V$tW@0tBnG3Gk&&IKf{_L~G_k)6htx3$8#_*GG*tcM;l;_$%bKEotX5 zn6mdMrS>cFh?8RP-W3YFBL|6Sixm6?d{e@})Jniny9M(#`BTdfG1ZL1Y9ts^xf(xC zDl1RT-O`gZM!)|%v)o8n|8R0V(S?>Q!u9ee{1b(Sj39n(LoZ@nkiv)SQQTKX=x z_RqErU2YB2lmxGVM|+QO;_Yk+b5Dr}2oBFO*tMqPj{eIXy)*4S0l{I^k&$2Lzt-(5{u|t@ z5Ipx8C@c7_?z(0p**`^%nDC~~K#;>d+022b@0badnN$z~zg+nv^!!Qo(m*7oE_q|r z1VY|q8XB5X@i(II94`*|`!gg0;(!BU@cOTj;Pg zCi`Tia9GGcyUDZT@1230cL%@rVTIR&p;j~ivGyE4Uihms=v7o@=ABBU&CZRnk?W(w z+TL_u3oQxw1Ux3uxvA$XJmMMPfc}Hm(Rwj6@%8{REho7`ZA%~%`oxUS#t5ZeUr>3S z+PSd4BcW$>%`$IiRq%zyQnT}o+Y34V#;SzRqLN34BRj9fUzdxoO!=(z1wZm&l7H3; zkiSoM$a#8Lhl?T=g2_NSCq-#L^Z)$tpWC;5gv}$)jYSzH#hImr8Tq7zNzz)MPTnfG zWJTsmoLwu^Gpm8%i~{gCZ}F<4W$6%*yJNGuYmr#96}7U^8m!~F=vWsvEi`>e-(9&g z(9-nm#`Vu`T8EzoJQS~H>dwT9M_1>rDVU&+L%O1Mh^d0y$GBgOwc(CMSiOv(TBjaK z-(i2~+spTBkJoO5M7wh}ONgTELMqgS*>oiN%wO;Sr36eDFzRlU&&AASSiI(uwB={_ zHheRAmwR$5{-(D5PSWt}Lk4Zd;yGSrZsDH5vG}7f4%X}tEITZlYGmL$AAgl?j&ed3O{YHJc4^l>Y$vVrcmIb-u-f;2N}PCqBXR13CyA5qKMmj#$KQMM_ ze6HAO0%YlM#`9S|&t~(;a*iK`zv!k6g1yc=*35jsF~WBKNBo5pV_IbZtq25J(~eZz zbrc3u6rMLu1k7Ov&MRWejv~QRfiJ|_TXd1a;Tt zj&5S6y@T+bF27(hOFdG24PI(ws!`j?i@g_`25N1BGrDw5mEt_po!!E8RMR+8O4}TG zuDcn;q>#C7m}v$PnnC>T+HXn7P|5YmFSmMcESiuIi>);o*<^%C0tkj9!;USaGV&~_ zetUah;tmc`Zpe(YF?JcDEE_VSseH|lLZ~Dp4gEOc#VPX_=3}36S7&pJ2I($C8?%?zXSw@i6njj+ysJ?9jzr`$M9t-sKv77zQv?qg+Z|J z*R#{YU%Gv{!muv{ht!jfcAm?s!xkQ{^yU-5!X@B!z)k2a5cBPJ2IUhpgE%tr zaJ)UmkvC-imBW|v*;UXnn_*ZCspJ9@q&6T<(Nm$Fo2%!}fp~)ELU5iwr98Q|x>})d z0P&G)W{{QA?Hltj=Dd;Af)o4&j%`?Bk4@eYS0b}|d*sId{n!8f-plVW+h~1RWm!R9 zX?`xA8|ah@@QljOD9O*je*@PGfU+Vzvoa&ADl@+_BQN8jMe8#2uJ(?w0At;2!p?U3 z5aExeSppZpCT{+2c$KsxjZStC{NwgF9ku74d%WiH%A(2wEVfKvU6ox$Ec?Z(n!HUv zT>0_(;0+U|qE7Mr8=7RmH=^(;T|%1qD>#N7|NZ(mr*`dpvZVO&(gI`ad6_U=ImV0m zV#h&95v{qv6rEQuWRhIRV_RdZ=u*AlA_BFe5@@8s23 z);YkmIH|UAdgC3o`D5#c|wf%w88GCGw-cGwPoGucP&o7S95$zwI!n8tv(iBUrkbQP_{t~hP-b6oVZ;X1h}>e*mVdVvHY`kxJXSCI!3Ed>8bRW>FMiXFj_)l-=^ZPyFdqT4XeyLMYTn&h0jE=u?Wp0;>D5h zDv@ix7V1k1eA)RgSZr2i9+Y&AO}BBvs7ZFzCM^uBRc2U7%KtB+{P1Nn=rD1$N1(k1 z{i-Ynb_{2?flUOImH=2M+B%R`JJmJWmChh!*Q;O!Lzb-XVl&{|e(F-s`9>Ph0AErQ zeDWmlG5eb8?4IuGneOVF>F!q_y(O6uKX_|i$Ow6@4Yw!%kFT{iGJGDzK-o zXHtEn?+#ve2)=sHAsGDCb%Kmsj4%;=jNo`J{5ImR6jmTtz+6Xaa8N&L>#%6(rp$b( zmD^k94bEk}6MW}!@za36^N@GkQ{1E1hAolnVV;mluMl5>vc%d3&O0xY92jvTaSyCn zIx$z}MPxTz<~$tf$>*zAP+<+&j}F1s`RfvB8$oD0gEQD-#Qr18XDSPA(7>eN&RK@b zlWV1ui~8@IzkjK^qyT&8vVt7U8|8V$7Vs^}PcP0}TLw|ZXgxEhEF-%zJ)c;T_Ruq> z8|Yo)FJH*3dXxBXs6smarg~nDc1-+m<8R0IovKE8m04PpQ&Eyt2I($JuP)A5hm`WE zir@U>)<0;T($sRY%?`n%4#CEPX>! zSfcnW1MHMVegku4@LxanR*e5M85?C^D*+QG)0$At_cB9{JZ8FSxiW^UJ4xu&9+*sV z$NK-+dJnHS(j@D`u?Lk%Scp4+4acYx$Pc@*afnAcT+?$RQBk6;$E92O$Z$ z?pB+fo!wdgo$nv}ei89xQuMs%#L1IYWMx_MJ~!gV4ZppJ9YJJCUr1l@8e)1F%AH~a zZJuVXK^Ri3{MWu&FGDViA5t8=Y?lvLJ+{k5tLE8i+%Rj-_GV;0^rVgAZ*T3x{)hA* z-598=zR3wD<};3-aV6g?_Bod{9Si1Tz+1DZM>;>rhNc718OHahbFs*!^2BN zi}=}vqOc*XhrB>A^9OBelFUfLZ9^h3DPj9=)UT@on;cp$_%_Mc&GVHxM>^_g(8FaX zat4mA;A{7XZw;v(=B_IW%bX+k%M8xZRlUMcgOIaX1`3r03-Xe0v6Kt{KDKXlUe6I- ze9>7G^0PGfHg;^SyO!y-+_J0EI*$&;ZbNXl75sGwIB2ja4BGI~GT1=59w7I_KqYi) zp>bhj%Ep={$tn1yBwHF*GCguD) zfJU8k^m1^_&>Xy$W?}c=G%u+hmvgpSjA;=Cc^PcOftPKUU*vbj1RLTQ+F0UdibzV=^?DW!M$ZKSo?TWEuW6eLz zED#@O=LqX5{&Aipw)1}uS9w{k+iK0>(CKv^>3g(ys_P9OE9FI+pR+f

RcXq|jSo z@oc>swAF?6etUQJVTPYA{C>+S7z(r14_tBFVWlx&v1Rsy0xqAV{Uq4hW9_D`i`b%| zV+(p(7DXXj+LyHWD-FeVHv4{gtkgJE3$3vG3Y?XgS$Z?O{CawkuwSD;ikd{F=bN;s zNlQtYay{qr3pvHf@v4S16abEv&! zW*pVlnjMG!NfcbB`81Bqog8DTX3IkRj9Xu)RH!XOZfw`{29fELb`J*t&>*rb3`S-{ zPg>e;VtZBFUw1oHdn;I1)>m5^d{{)Z6lO~lZv$SNincv3%x)~&0scO}N>O+x6u2RO z8?eh?TT9SLr`KOemAINIqO5{#kMgq~%Q+kD&i981 ztPaUtTlX$JS76t5{>p$PCCh&Eul64Wr5DMoyx?G|4}l}AF52)^WLKakjkF<25lajR z8ltW8)xx< zdo=S5leLhPGC({GCmq*N6D`AK#b|`ks)ZI^G!$4`iTlUbkewbV$TH73G>f_(Df>`; z`|z_4z;~jpd$Ows4g6g10I3Pm{upN3ih@+R{1fSamisSL{Cz#XN?6YkLk2aIJoj#r z*@7A*qH*LHU$lQN1<AwhX|H= z79R;iSVPQT;P2W5J*`L@!xaG1iG+r{Yyl>PbshcS&{k`*ZYH41!L9+5VwUNOIh&qP zm0cc|`2hNHUZ(vT>XAjau)?u!Nv$t=HDb3OP*Uu)UjBJ3=WOo|z#_bRq_4-%A$xDo zWs*IbtgKdF+sAoc=aTDU*8V12U4*Ys>}}dgWBn&%nlXb>_4m+>w3iu{4l;r-*LDnT z!E{MzsC$*;w)&M>71-*+K&qB;#cstO>oU(Crv`!@j#Jvu0tXOm6<*WKgTEGvze3H- zpU^pzk7@gi`4YQX0G1q{|G4zwr+42Uq4hQ)DS1m`!sht+n9T`M6n|rJh+=FmgpppP zl=zLbBawwx`{m@+=;U92^4sIPPyYAE|AKarbK|06I$^Kc3+yf#^EF(Sq^Mjg$nAWX z3M{tV?=HGfefeZrNkQqUW2X-u*qxrnh_g=%FP;0i@*Xq*#si>Gf@{TLsFM*PY7UGzxF!GZxF9XRtcZ~swtNA zQt{$Bo-JPrJ3JiT5vXf4{ySKEmw^1r@P)b(0>CAji*x;WUXt5b3=7^#0Kw1lw?Dm* z-g0$&dr7XOqooj>E!0@b#E7N0J;~30oR{_RQudSl?e+QlpItdnmv`i8;fdZy_h-A? zNIy)&0z@XbQ>0meAk!JldD3)NB9K=1m;9tygWo} z0C`{?c%9JG$u=9?^%$<>xpH!y;p;t@qN?&=g<8j> zy~nQAj_t2Xfv4uXhDmn}J?j~L-Z$3XM~R|;$UwR2F+p86>~LX?6~a&jCh`gN{uy!ON#`-Yy}Cc7t4S-ceyq;M1#qD+DY^} zZ1~GoD39iijX}eODYIIQ)ythi_n4L{F-O7J)JwJ^VZOZyBaIw0^fP@=HuMkA7|eqsWLqk@G1pg)o$_B zP+(pU?AxBv0j@SZDG)oxZJA-iN{4HTo`%A9k9*BdrnQzywsYG161bOUWO=pr?i^p{ z0~AX!S=IQuFU{49c-uZn`yAHuS?xJeY;_20-m}D}P z)@wSOf=~;y{sl)Cn(VXzQk$@_M;SYIC_J?+YR88PQ3p5nHgOD)^@8 zKfnDS@~aC$lDBi_Tw(YXj>3k^zMzpE5P=H#TPhuk?>@d-eKj+;GB-ucjSRn-nxvI@ z1_kM$=^&r;PY*Z|P1`t=H0!OFr}BJ>|A2pDm=QE$LjT1$&{Hd=-{E zgdJIRQs=R^^k{>RhPl3aitCBPUximW!1JO6e@!^2@=onnQg`T=>X+po=9Q8Kh&ACX zFUI(_DXtyzj*9xrJ-bU~D>{3)%Ry{BDw?_yUq@aQ%(1P32h_19KX z0v^DstauO6O8vKRM|0u!mZIF&Qm{Cuv3MKtyaaWYq@CfvZACjk;l@0PhWuRw0nrg02b+#`Cdr3%YNEO(ru+--Y{8b1ZpsR@L zt;w!v?6rA=SYCqa?$JXSC}t=t0-(U z7lip*{ntUTDyWUYnerJ6Xo4ZUvQ@0Kt?^UqDl(KbjJDj7_iGnWwmOahx640k7TV>g z)j^B&wZx#HF!-pm1;>5-7AV(b^7!i^lF4%ut-(NTTj+7uU{mjCTfbA#1&0{DVRIs! zrc1kh!C!LD>+shWqPik|H@Zw13d4d4^VSf14K=xvJHzG-`e;kq+28-gj(6>K1|7gb8t5tjbt2EbqLyhpPDTuiqq(sH!-zh{=Kus~-%j+7* zvi1BPDY>p;Jw<23HN!ZET+<>RW*-d|=hr{U#00D18n5(<#cR21764vR*k#431rzRg zRbSg{3&&r5!DiFpQo-Ik68`S50TjMfvgK9_O8MP8Rm&+)AH~wbRe`;n0A+gP*M^Mt z>}>hs{Cx+$PN=-z&!o{`~4+po*4fz_5> zIlvc*CM$4Biw%l1tzC;=j7<|D?{sKv8dti38S1>CE!QQPOX#Vv*asyIlqFrPg|lzx zKkyM9&iy()CL<>`CoL{5B{m70XcSfnkt`=-w?u6==9>@|Euk9kq?oi{fAag&2hM%} z;>VfLv}mmvVXP4BW3G?9Rw`8p7O?ZM&lqU90$DZB+HJ(L1nz7(VO#Au)~SruC@sK8 z7{iq$Fv1e;tymATg0NP{C)H2*fA#U#a?u_NOO06nBri-uZz>Zvbaq>-0*}m*wObAi zw%)$iedlJd_TFG!4KYOV_kQWXgF>RUuwl$YzH5F$z6=xGx zBkm~K(^|N*>B^30m$o(Ln@o$Xl!>2Q%C5UCiRik#oe%SO)#e`}9$r2BxafRq^|kqq z)i}1+2_3LdV8(Iw{HxcvlrpjUSYmm>iwAQA7S5Ro)v#R zwkyuh05LSy>qQctYn?}?w4V*q`!-|vYq)HW74e|}qG+oOH=RDAUo1XM7j2bRtGdGY zz$|jennt$&0@;$xAt=)T%61;MAgSBedah#33eGC4b^k{~ilu)?NUmnQjYr#W zn==#i0Ln38Ra)BzLj`Sz$X$zIOlh*UFS)*814>Pj&&o0S0_=a2*=O>~hi7C6nr7GO zt3p&j+PVK}JKmBoU%;380)w0B^erPtOxAkAcK81Gvg0dP^mv-N8rM4&JhSp zs#ysy$z-7$zGDVACI76>S8iW#3q4^ey! z*GFl~lIzv|ejaOm2&q+&BsL&RV%X5M&EzG^!>mZxxNYaPVlTi25% z5@lEIRq*{`2HtJdTh;?aIHuc&V4T{ju8!9mE^8;3CjS@Q8pe8c!)oq6+UinAS4832 zfH3?uR8@-Y)eCeP1AzKw8iuxpyz+%&(BWLL#Q%!dCI=`B_B37A1^fnFHb*S0lB`a= zv5=cO|IU!tvXoY%G>-_-@UN?Xet!4)&g7j5kqLraaS3$JCPzmRX;HBnacNO;snM|n z6B!7UOOYEQ{?8}>@c7=N|M>V9CX^ZSvK6WuW*%kC*Zyk!*Z$F1;Tv??lHT@7V%t^A zkk{k6l6m&nfhbZI2bKu_Ixku*V0z{|7FNSES;;x+TQbw*(^Fzo(xMYnqY_dh;)o=% zx{9>P3uM$~?1E1IWlY0Ae)38AmEw2HuV+G&oX4;pg%!gY*9Z4FMl7Y(5m>FUOOnQB ztl{qAPXI0-yNQ|Qrb9sK_c8f2RRcetEP%@wq*E14%;~E3VI7Wk2EWd0kL6A`(?ONk zE(@(p!SdMt^Xiid{`&0LC8ZJI4a@vNE^hqNX1e+st8aJTx!w2Rez3NR79?yp2=jk# zE!so07w;rGigG%Ow|A6fx0hy7?QJf~kfLy57QDE%c!%*{^3P04q8EwMFs>a0-NE9> zk(WgxniMoF_)*?Y;_>A}wdeO$e!cg3N&Z52U!<}k9c}Hveqve1DEOM^ znLe?vB&^^IKm)I)c?Pqj9VtAt7s>F~F;?TPVa(CUCaCHyM+J!Grg|^i*6=s{4Azn4 zRc}q8CB;@CQd^DI@YfwPxG2om^#;nwLfU-RVrU$bNoo*6UDtbUfwNXM4zC^CN<$Xg z+7&37C|fxUPc03L50CsWcaKfB4h+rtAJV^-@B+knB8$ z!q#U*C09QIeb=tW&6oyPyjB`+WZdCzIINo#-23#_7BC*FYBk*D@a41eFqBoqYxh_~ zWIJX?@mKZU*5mA0v+CGg9bw%!WxA|2{_8le`ErL|87UF9LF3DmXm(_BF?60_VA`+$;gg5A?!SMrO1c~B&BcUEid97wa1C~4>x+sF zVLetWOw-FPGr|B}lZcHy?&;Vuh-pd^FB|NiR0!5F&5FNvynC!xnSB)Y z;FagHNf07F@xm(yukT>8*G6^tCDJlQUL*Bf-}u3gH+MEw%jChnhj)W@m4W+Zy?65i zck=q~UI^a12&xhSz9qZZX)Vm9&P%iwlZ(zF9W5E@!kukJyCBF!Q^C##QqcK3gaYU7 zA{z2?pIzQwe|ZP-ByX3-4&+!w-Gyz}j%Hr}Ci`LG<;muHoRsEzLSE;u^c+dsQ4a#? ze(sdE-(G}LLu?Ip9p;8crh;O4OFYg*o;SbbSwnZZng)381$=p~A{gZ%^N3NGWH1UdCwMkGaBxl^89>?m0q*p6=9OVTe<{8jkMOj*ZQ>@CLJSFeC;cdn_n?78eW zW4{wIpIi$;e_zLC^jY0Dq~X_Wuh`NkXIV;PL*8##F3}?Fp|J(9ATvDp@;~4G*WKcK zsTnm6dI0VHzvnypkc?GbdM=BUQ#gQ2S?`NNG_|4z_Q&&^(2kQ1ibI7F_ z)x9`8$82A>##ZMIO&3d2moWT=5i1=QL5M-H<;T3O%zUwRHEfi1HTXN%FI&RDxUu8% zdwR@&y!+R_r`;Jb>FLpFS@CHJ8)DwkAZ#v@ zy+_PT`N;*JJgdJ}-Tk0CP*>AeR~&dy*mwV8@4XAb`)5NB&UF-RCpwFFc9rZh7TjLC zL)wu_wo?Uep((g&Ir5dX8Yt*pLTyfrT`E7NuL|DZ`tlQT-H!OP` zo=G<3_3=0SrtGUxB%GI^P^)q&?e^8WupJ!EeBtQ|yP~jyWoznq-fwMYMe!my{cZ%O zj;XPxwxMTDgAI*Cjn7A#n#WpNC);|b+WY1@2NrvVDb`!V5t0K=C>ifznFx&d%Jf%S z|5fH|$Qzc`u@$D3eSQ9GGY8pP&e`iol2#+bU!TSzn&P7EXs_o2%}o-{T&~p!3p*@v z#Nwt9Va}kPN;cAt47R4bMZ4wnS32R>@Y=Qxj~pX&n5!Tw?95AI>;3EVsIXn1!1_BL z{*u&%HLq2QJ=_%JweDSy)rupQ?b{Z#727Kfn|AfKTW=1B=}jp3D^^@9>XbSoeE zBrR(lp7ZxHnjN1h1E5?{fj?_6;u{pXly)1j^~0*b-jc~x|MlktIty4+_C=mY*ekTH z{mJY9^$o9Rlv}fC^YK@%>iSFOQmj|wmonmZ@b$mF{*UWlBGvl(-qSM{X-{=VP zv?)>$Gr(e$x^#1kD{JY7}yNTiVlOwC+mcj5|m4KgDkRb;Ll^;P-`0%_^+_43S6R1aZEq@dz8rur(O$BT zXe-$bB4dh$HCA)c9s+b0%Z~h9k!D@aZMfvPFg})eL<7OZlZ(6V9N${B`>W>S(%1cC ztIWtl{)EGG@1SsAwR*;++QtjYghBOxHs3<4{Dxzz>90KfvkXF_whM8F3&V?NLsTKX z=pUgd>?xc@b7cr?TNp0Y*tveuU-`((&(GzVEyFzE{87PKM^{z7wng zhPeW}tTw5l>W=VDmno9G?h92)vn7w@_2-(dNo*$#J&l>lqEU#Xnrvtd*I}_bW*GDR zzwlo@((u<)SP8ea$67Ah&grVvh`I;I9SVnehFIV3O8>x{k_W?Y(<7hArN9W^uLJ`ShlM*^FwOJrC}}#$)M+d* z8N}v;yc?S*K1?hShQIuqc(5yNeeC;sD!H7_sEeRhZ^1{v@gruElgo~)3e;jU$xQ=a zj3^!cDi&jkF6n4LOyysSsn7J3eCzw!ap`wxu*L=8Wx!r5iJoa}Y+lK9$cw4j6bK)cIp}$m? zjqf`eYrI>%IDAwEe{JpLK6y{OrEftMYgUieEHp1q76V>iFh2>8`3l>$tiI*F4MV-H z{#Dmwvh=)~yF60y``E7@sG;9iMzRGWL38aAh#GI2c+Q>R%igBJ8?k zDim7@^S^K9FcFlhriDwNT>!%sDO~zI(lJe;_CLP;_qxj3+=LwF7w2rrNRLWPiik>% zj5hw8vNf(eXsY?>0sLK4PqMUdQJEC;5A}{M4$t{n(^2185JXY$$o)Ne1C|7kp8*6<`%q_y5!@;Y0cbJc* zw^h0-w$gklqiFBpCQs8zoIJ9dr_~hyH8#O)mt_SOG4%C)) z)#L|mT<*Vfb?9!%;GN>uvQ(nIbSq&f+*Pr=vwRm6SW?iMb2k3lUb45dY=39@9?HRu zSLqbaX)44uOOnzUXEhh>1b?4h&Utz z6$o}S2Za%nB{tAx>zFUP3k^*gbFd$V%i`(`l1ey?Gd=Sf+VX!V|A5{DSv*@V+Mdhy z?e%->>KSp=)VF}cri2LIs~&oId!oL6uDN-x zrHhzv?OtpT%yo8C>t71>p?hEK?_U~VxS@7VX)4~W0Y+wCG4ogTB;=(<{TQasvWcIr z%Th&I2E>NHrnWNXYnkNlgyc#Hd{530OpbUrHT8aamH=x1HaL>&uoi`(u0oS7l!zIh z<$!ril{o`j!sCNNIh`A%I#1%DC01JQHONZ9aLva8rGaZ1W{0!$266IS`TXs*NVY|2}WZyNe|_$!cvk<|Nn0D3`RxdO4Ns`Po`&hO_J zENtJa9rH$%ka}BtE~D@WuqpUDer;V^3SL&-7C;xIUgNoz@rGTB_DQ+0%ShWj;dNeH z8z>6%;(wX()-!XzQd&V(4`G$ede&qrS(h;imPWQ??2EP^gRRYYG4N=ES9gp$q}h+b z{ob7Gns{LmYBeb>NURU3|9W*p=eiF}aP4JX{jaM(@S{DtYhPA;a^{vq`ey-Ov6|GH zDJf_HEak$cg*Hd%#F%)TI%58~Dem*A%*eDuTlbwkc=Y_?Z!R1^RCwWJMbWuiRRygN z?}b_#raA)D^4^B#K8~(@9DnoE)cYUjzau^@e*4|>_Y5w4J^y-TW^r+1c4>U(#pE0V z;z?qYcGlz`-I1Dt?&m^>G|PDkiTR}Buc$uy8N9pv|9F5;j>)ZMFrlL_2A^0mva!FYH{Y*k~qvws{lzK_m8Ffi&zSV49QY+}X8Ie}mfgY#Nd)T{qavtxN|4lvEL zfLFhC2WyM238>VpW56j4(K(0}sv@!`qJ0mm8JRA`@Op7$cEJJQI9u-?fndjob;m@i?p(!MeDHD2RCDcI z`?L9uhQ-d7l|aX8e~5Sy9DLb7O!h-!VB|&Lz!ForgQLFsOWv2RBCIR<<>)li&|Xf= z5{wY_wau={NPB*r)fE6LA=yPM5JPYXAvn{^Fk;I*3-g_txA=g+$ApQpJ zvK=W57wt4b`d3e)by#zu)G5IBs_aq7u9#r;ah@Z23OmEvR_^32DE=DrwPOs0jg`Aq zgnl?KQ>~mgTzMs9m8Rc)Kl9$?#B2?beYPvQXs5Uqu@+1ji)Q+LFc9M>z?4}LJfsNpPIJC*EcWf@#X zfdJqt@`?}|MV4x@^d55(cFr4)F6jjst9ab#+CU~Fzsgi=y&9+>1RN6wJ{xc~g&&liuL z%uYzymYA587@xK|+VD3qawCClwM5kC@f-e-$ROaDPZKwPN&KHr{&8Ds&JVACcs2bB zA_JOR^U?a|UlU&mdGwG@@D6$_4P0T9?#%_-^P&{%Dw9C_TStC9{_E-jo_t#K$!p#k z4oTH9GCU*Ag38x3+nK#=wq%$c@~Y>CQE$D|ed*q-74r?7W)Ao&{w_f=I&MF1x_-a! z{vD$KUTNR$qQLD7eRnVQJ&@=w$tJo=GYOQ>T@^WiujsF6M?x1}w7;YHKzs3_j*`Qj zfG<({6V8^=yO$txJBf5xSLsUI6>F~6b;tc76N7f@EWXVXQ` z^i259lk0pr)VG-uu+U!X`IW|AZ8!=F_=-R)G-4q-F1J4h{-TX1ym=o}vJU*ep)hD= zj+Ej0B8~)h?RaC{hQE@OCI)1wX($XF%fWiTr}me{CpfMp8xciAgxPusyAFJpLezQ; ze+71Zv=t($tk%DW9bgy0wX?Zwx#pTo!)!ToYA^FM=Q&YQIvv+xyi~KAJaIuwMD?l4T zR$O+-6{g7b!B>Doxt?L4!wbE~VU}ki%}qmUr$&bNktkNZmN{eCIbAl`^%S~Lixs;C{u>{?B`zvHF(!dw zLzIT&$??V~ZB0t!L0C`W)e#?)z9l9zDQ;U@^6r$xopGDDZHeEWuw_Sl-1dy*U1^Cs zGE;JMGIk%_dF=Rsv*(Xq%|BIEdZyym)w^x?ntGpf4LwITIYhE?E-(p4zee>#nzhAw0VCSxk1tHV7*puO6;yrfcLcqqwdsRF+oeM^vtMwjI&ir3}} z8OBNSS@l;cM7Q={Ab_7n_?By{KcR77mIF5VY ztZ`xJ@Y9}?@T*<&JnyAmugcOsPet0hX(Ydc4 zpWplB;*onN_ue`4P4|t87d@dj{UfjXXJqgfzAUob$8w2hI0Ra1TaU zgr}!k!(7u=d0TBtQjRQSSqv&=UXmdu{~(flX3{00RbVDl&ktg9HA*mXfQ12D*%`p8 zLu6V6cnVg6Vv@EE$&5mmzqM(DI?2coQx2o^cX&c-#KU96@(7Kz0+F1Me6P=BK}&A z!FQ0g(3|1@A7>{&PWJRZD6hMe`{c@Q)=`hnr`MfLeR?UA`T}8&D=6+*ZZ&wrqh)?a z>E4#IeXZq3n@SEhm7REAdc3Llo5sSUEyX9>%g(eEpC&pgE_YVu1#cD)->)30yFOf7 zHTtl6{Bg}h!`u>z(2L+OZ%WX4 z0CTOemC+$$d3^N6#MsLC6hVoZz#CAD$ijjxyvnkron5xsF6IU?vJI;SdRJ_pI(fB4 zmn()5O99$S!;;ssQN|~EifHFfNnuwkB4?oRLVkHwJOz-=bJ;7j`wfEq zWB60}+5kuej&18LR76Gmx5#TN1gE>!$%Dp!4LA9;{nd!Q5_`XF-HXw>fP9tXGZK?? zlpAbC6wYw@D1_K*@0j<>EmNTc-({)=d1X1FZ=4-(WhKdHhrgO$wkZs*ahTcDI+jbe zipW+6VGD4Z`pfjse&x}y0M+#We*8CF{%Af*i%L$4NWfJxF)B_XI)-3)u0+fR<{!~B zOQc4{r$ok4JS8X!YhSHQ_e(@C&5WeDXd)qUGm#vVOyGcl6-H`YJgygsF&pvGVwyv8 zd_*!w#>db{9K9(jAuc5)F(Wx{YkX8ndR!)v6Tdq%CO0c4H#2(sj=0_XQVt$SJACe| ziv>rDOHWqbyL#{WjpscN+XotYnWDmFE!gn36SnBTjOHWVxrQ>qDO8@iHMZg zw3$ebaDgt1mlykGB+b7^_Z|G<&G++Tv({_uJ3ljg&{SAz*tldvNR5+v1y%!JHBuh^ z<#c<)*>ydP>JJHZ1wda;EaOf_$d2>k5Z4q_%^_Te(z?eYhaQ*KB>Ix<$na`@_&4jL zFfWqihLwYLM{Tk!gRTCVhQG#q*}fQ^;rGwwx3i|M<3?@heogS+jnKV<-rISh`xp8j zTpX;sOaQFFYiH@c4$8abd%G+5Q~#CXZ&6lz3AyOp&a&;`vIV@jqd2p@Xlq+x8o}TO z?h)`Mip8~q#^I)`Y0vUg8w=8z+5iNTk!~oE*}+dPZlw_X=v?Ogld0^`vP3+&yyx+y z-A^x_>#8W3ZRuSR&O0W$XllVdfb66n;WHO}=Vx||3yVResG28j5HMQ`yEAGbU@)9P@LlcBmLXE@vTLWePrlF((tB=3>K%T{QB&{6`9X96; z84)-Fq`pwCg)S1NL0)1pI7K=gG`ER??q!F@IQZSz=!c2%AIF0e&+fOE9%(K-*m8A8 zb3yKNG8pI6C?Y(&l*RWbU*#3tzcU$zMLfNf*31ZxLJ}sqV&tL_+*I;abIJae(gSTJ z2U?2uH5H(r=LdGUv-Ios!XvE(hug27=qx(bQFyYg;CN@r$k2*}D(`P5 zCeZ!992t5wI{a#UWOZ_k`NuzAw!U=JgZ_mU$Ttf;l-OKgOvis>iR+*5O*M05Qj+>;L=l zUwa-k<|HH%NQKiNwj`F(S;a(PSm8XAV`Q9xj9rLjsGqQ70FH^h3slh^D;^i>;2|a+n`6^9N77JC`>jMQHRUhT;x-aiUrvvUB0%5t%@G-Kkpvnr zB0D~j$c#$JjZfRYB`YUBb6Z??PW-k#2|M;C?b@4=OB_hvaV&G+`TfTaXYJVj{qArj}@bStgtlDM(R58?SqNT#?W3Cm8+1j_` zVW{Y$rDfae=jC`!aY#d%SO1k~@p_j`wYBcd`V<1hWISF?NZDE{*IETO&6QnkxbnOl znfZC)^<4KrYjt(^ojbvMReg7__20h=1P|3-8hw;MR#!OkprE__fUw$Olx%Cm5ldQwbId!7dMW$eyj? z$n?)bKRw=R_xMoj@4$T2~?wwht4 z5r9By^*0G>UyjAR!a0h$e&>;&TXwJC<+{~9BPiyX*jkF_S!WBr8tD6WV&vZ!`&YUj zwN~t`yOhETs--xK97tz7+l0M}ubydD96X6;NToS6$@? z(CpE$W^*m^SmP$4I|n*S4|bLv0+-v04g$@+l}9^E4n94T({^QFp!5jQReGkk@haXjrHQXJkzdhDiH{1SfA<(ka*T#0HvvHxP?M1Nb&A`BFfB%b6 z5X!tVGVp3_XnA;mSQ;5$9-ZKyf^NR)aIPCe>y~bI5BgOZF$b-)f_#zv3PZ=}-=@8hc1B zMA$=8*V?>BFx7nvr4~feh(cF8BqPQRa?d^a(=Iy$sE(!Vm$yU-VSJu-yTg?!G6 zp5YAIuozg^xk$o@-Eu;;bJpt}j=Yxl{xJE5csKc$zzB=rOP8;nMHlID-S;|w^``o; znG`|npeL97)U+@obz4I0^lE1DuRs6g`QvAJK4W`DB{y|*VoI#+7|y1~(uE|Azyg#2 zsA{1FPh%sH6MOjkhlHr#Z;ALr5?CDlr-aB)6E^)ZDe^OExsCcVfeC<7u;Y!^1SD3{ zD7%nVGtfc1jM)OaX|bDRXm89WMhB+HZrB>PAv1m>H&Y_NOpo3SjGB`b!e;8!nK5bU zk;!Q>X{phvO0g3&5|UFkZ%K{Xuyu1}demnYX_0?Q+VnY*yeUdT`ETNec!A*Pq?j*a ze)Y*e-oJMDhnF9y@mZA(ZYW~QVK*Ggz*kh4E@X0nxrdqO2DnKW@>=HE{wl4zHi$vC zFQnmG`LCCSuE=5~D-)Qjq0L8f(x|h-dycICcbfT1edE);5<+zaY6V;EuO28X+S-^T zh28MZ-}T5VKulg*NUr*Fz!N1Pj1n^v+6Bz3B1j;E!WK#}H4#lN8u8TtC18|VGRBB7 z9;yjmYOK;>M0Q$2JM2UdC<^B_wUvj_BD=C*uiCV_-g#4PdB=9#mUkrraj*osj7M$y7=&U(+``iwuNiugsOjnAf2GMqPI}i*7Zn#V- zA1Ck~GLPem1k*PdmDN(MC<(3ujm}kCPBG)+Q65AJD09>B$j4TQq{o6KS8T|; z2BRRXIafHhOljja?&&{@9neakR_nZmhNh9Wt(^fZqx>zfrPXZ+zTvX&YUcIC_{*`;mC+IG zbdX{$4-dgFsPfwUDbGCrk3N%N_bfhibTwUAerrslYX{H(4Ktr~c&E^=C8pCco6$3;$0TLMCTGN?ZjDVdpR4pOS!wZ^iJOzQ#3Yc)Ceq_0fpri( zHTnyHo8W{*VnkF@WONc1UPMF^ksO^7@oDU@KKXS^UF-L(k`Q%-xH>@w6@RTls`_Wo zCkn$T7gqDN3^N}iHFOKWP9Rt;u;~9K!{DmJoPmefJ;re1urOJsDnL$Oj4cy(2F_;G zSWY{!KyZ_Nuc|A$FAJVso``jV3AFS{xyM6W$v@kXuIy{oYH#v_m_Mse7e(#26O`1y zn`obGs(Rdgx4G|r4fS6H&&ZyK?v)aw4YQcQ5 z*ta}0MPY&F5{+V5j0@z@1o>wRvd~RebHLn2I)d|aVNZ=0w>6wke{wqc*|{`!9-mHq zawe_*BG+>%CR)(7RC{V0EV%x{v1b>*X)Y-rd-&uvb<*BpnP!-ckHb%hkgTm-msr?kGFj zUV0FOr7ql9w7a>K)tZ=%%e)V8m~&x_P{}`e;{$L{HWE zz8jaiuU{nk?&J?YEE#)JNetb;Ir-$l%=72cE!Eb&&=J6soOltUvK)FnEU`Kgd^OZd zE9J|f{#QfeuZJgaFCk3#wK_IUpqkc#GTJRa7JywtXWM$EVH$^lTA9%Fc5dd~{Hz5K z{ALEkk-vfwpqPiYvg6odxPztDTP-)M`e#@qLEhC;uOOyZ7Z#3bVdn|oT1S#0p!27a z;l(1nF~n5pbTI8fo0w2(sK6T?H3V4_=8K$JfLl=&Jk@-%p#1v4R$3ab!oin)Z4Lr1 z*QucdF0CT0>jZF9KGiAob(5r>Nj}R0X>7vQMX2e}wA*XYw}6MU5m;$Ba_>=BCJu$Y z%09%_S##JZVdvB#qZY!IT6`2zbeLFuH~yMY|ZG6HI52=2KL-J~Vtk z_u+qj{GV%=ujQuas`Il{Z~Y+KoIn;jCc>NF#}r+Pypa;p4jko&-5)Q z7RZi`;b2nMW`&U$5rYT{z)guEBQ2v0#bV3lileC2()T3Hno=@B$F5|pvHOUn?(7Gp zYII@*I~1cORI{BH6GJeb$w{~)#r9b$)EYJgDUsITXu;nNzmEccyPx%Zx9|>51ZNR` zp`Axo`Sp+&{S{-6n3XIrYFouqQCRtSM|Y1QuWcPrYSnd_K4R}kW3q<70>N^>E{Db! zOppxWHID4@Vr_U-G`B7!zqbhJw6I*aJCB}C!Iy>B8U%AB>j!_U50v-9W94@;v4T%v z+pPy3cj_Ge-mmB-v!m=Q1bC3#j#7~H z5Lsn3^SF|>6zpp&JYY3n0Qz~c_nRr%&1%g#s+An*C_B_$aY*s^ut))m_W-_q6^DDv z5A~LP-B*4DQcNJc?*N_44|P<2-Bs~TXZf+N(yx2UkM>?WOt5HV=@=+G*iraZPYI7Y z)?Eg69}Qfy_`0|1NMH5o;EglE8&`?G+f~DLw@2#lk2TazG(VVZshw%Bo9%eG(B1H& zuX!cZO}ri$dNVY#IyClbU}R-rm{w?;0!f7(@f&SojDKRHllgkF^V;Unsrs7wpoDfV zv!zf7;0o-{PW#~@wNnI%F37mBsgc)NB1?x3FfD}C!(1OD$pwT>4nG8%yTOj&k3 zlPmqxn%$Gr9%Dy2DUhnDDy-IU*=_aNsUQ=)5(#~UmhVON}KRCH3MaSSorS0 zfB5hGZ}Q3C(mjMYDtU8+7f}>^q2iFJcDuVY*LwAe&@0E-^tYi>USH)LYYKZM~@lFsnP`M9Dvx%8iR0A&W zw5(un6<5N89Tj`p%XfEOW4p7hY!_Ex5|uKYL4;VjB&Y>{r39SPSePk6lKDywsQUP9 zw!tg=M19_Nf(w4bl^tX(AD`d$;A9GXv1HpzcN*GALA89!CZBu)PsZTntY{!H_YwX$nRokVIes0g)-uh&H~0cT=LI4 zDM?$6V7`P?ME8#@hKB8ItnswlSEli<1Q%cQExZWM5&$l=)LgJU7gm`YXkna}1FZd4 z4i2UumZBUvu9kr5K+~>r-**}TmuWKOy=F=skLV^@$POc{7Rx|*YjdmQ)oq2S1SHyO$EE2U%~oYMs#8#1H<&hzl9XO041Zx>T%3WGpH_bS?!|}Wdyl2ZCub*Tm@9?3T|qpB zy{cOjJXa`WWIW+KqNDM3jNY<|qHu)$)u*?hu$x^dqG&kmXI3T_RMkFQv1N|j0%@fn zD-z|%O;|Bgx|OjAV7eBOT`4){PsqBq#C$GcO6;h3M*1d4CK73pj5penz9}wk<7Ogt zLo7og7!_gJY?9=fA&*RqjBt7|#c0z)r$wZE{ttgj_&lCQOVaqW!D)7U&6Nklg2Kjv z&5Oe?2eV_W)8n4zZXx)~lt38~sJUfh$A+qg)W&Ts3vFE3-y-L*Y_lC1&VSc&YFE#- zs_x%z*U;a-Vz-1CUNH+VdZygF&>TB<_wqwRAM~`nt@<|6bGIs3TQgKw9lBRJct>LR ze)-6QijFcU%uY(WI8O$y?dq=B(NVsmr5KTw&LS+@1^z;fxirB z;ra9jCljPo_)-S*l^7^Ylgq=4Ul9*3op@S!zV}}3OiMe)hA%@Cq;nSnqj*&Mvw+nO z%k$z;J7_bGwkohOVsguluJ#KkD;7)5m%{Es-}JihL|M}CtA8S8O-k9a&4$0GTmph6 zbW+k%&@;8*UP>fE3HY)>uxy1Ci-V=X_fkm9!68X0 z8+~T$Re>#Qttf2Mg!|`ZY^YN^OCuG+l@C)B-%brrbv$pZx^TbX=%Xw9h-Z0U)t%jb z?_}1~OS_wj4?Ksu7VQ=3av7aOdr1ycrEM#p1z0O_tB zAlT`>euym+dijx*&^f-(SM-U%&oO0@V1b?lFbeqgRDEp$9K)l5)ZS~y1C`(OR2(A! zUrNJ)YhbhlD?s}lvQ?!1y6@VNzH7(8Yx33fs&|)(luJ&lAy1(X;b0%xdn-;6fr|4z z<>$J~&-GPZ3fv%HeZK3)h3=Y*J=K>wubuC@cB$ujUhjiTAxgJJev=+5KF;sG(szQjD&{X56-;ppIYsoA)F#^1lBx39h0D- z`D%O&_>lf(pk!j+^w>6I!g`P)ZXYHV%__uLrx|c5+<5)Vj?-xtSJBpRCwvQXDFqfG zmMNdT`tq{$Nlh(%6k(OqarvNZm@e?@qp)#a+ba5dcn9PGe2(x6;5w8POk9t=Hrha# zwV}4LPRj;^fM&-*;niWWX2orhz^5VNH0@$W)ct;X`orYZ`-w?DJ!%2GNU2;~@3yN) zuPqFC7Eo1yRUvq7YdEXi*=7z}ZrK>FJ;Zeft2emSeC@B0Vrl->YNH`9Tcu0Fl_)#@ z`OQBsO)l-t*}F9!iDGlkdHN12AXJfUh#d6tT#c`K6j)-Kw61Xf?TCFlu9VU{Z0piaSqy9*k`v>?7 z8IRc%D;>u=)z`cw(>6sChU4bhWh)0rx-1^-c_Sg$HUeowN`%1l*2u&!{{Hu=n-a;) z(+7mR0$D-iE2e)I3rY>>VYmyHVVNParl0{kLqIF3%RXxxKe$O=8a#EdiyT`0x9tGF z7YqM*s}2}(_E1>z&uU%eYP_6{5M!zJ@~LH^q?Yir;Hf9Upe&PT2I-|ZW*|c(NUJ!b8TCo zDn~@lr6PWofC^n@K+-Pg19Dc0vi%z9rS~wnfgCj*!9;i2_MY-wwjIUUt;Ji3meS0& z@@#|S`hvvT3o%dgwlowZH54WjjfEKk(CDR+SYO-mtRSuSe8Q8w)F+qIp5~=L&eyn- z`IuIht6x1X*z+*|=##6*yKh&`wA8;I3=+$s{)Jv~#}Y>@H~C0AjCyg?5<@N1VgbaU zFi1*L?}6#`Ek@ zS`Ub!*ut{)iS&83a~Z!W;&mwa?dZslqy2AsJMbO7eRAi$bJVB( z;`-tC$^(EY0hoe=+SInsGA;Hi>MTD(bXOjufZJVtM55+M$Mr*Yw!n>(J=G@yRmb^j zSCxbTG{q4qk(BS3>TuaXa@2iQCxX{chH6ez1%@3{YwiZ7uOI2Tc2oks%!@1swZ9hd z)nv77m0$N(enSBCz z^>4VptLjYWwX;M|^*N&VZvNoI;-SaoBTp;GpWPv5nw~7QH7sZ?g^ zRzr1MIxUeVBMrpE^9s9;!%`hy17+1<1HNIQ=%xxT!#hJ>w^d|!sH%u;$ZK1WRPa>q zU$=FIV8dM5TFqC;=IkoLjy=|$#{oTa5MKIN3?f^;n)QIbFkL68+cy2qynaN6d}yu^ zuk)Y{G5vG$k{teqsha(JzZ%2_*U@8m=fKfNL-;RY zpSfYKRRPv@@2;y0hzp0Ul)S|C8nXLX%np4Jgt24iA*li|5pj*XwqIFvksBXbBn)tE z>tV5gvSYzR^AvyI3{G|427j-2+^p_l>|kw0=wW%VuB;!~)!m}O2j!!Ws>EKSnEKuJ zj?&z&itIqu_KxyxOl0UP+0k9P6AKQCzpVw?>~~e{gV#VPgj1A%-Bo(1y%;YnPw!JVA;VmC8joHNt9-5WI~4<@)I7Oi>|*I)pRAUsc_5ltMShZQ<{oWL1$o^!+_&w zc`)B}^R5JfHMVloGSyG>_CLO~_hH^QPp_V8tSp>pdW?3Lunt!1>NWpowkpR)Z;nj=5ae;EfUjSW_+niRbx|mMT%4A`^$*cin8KCaUYp;#QtUea3KHhg7 zFr^|)Mq2nXkg6p51TU4QV-#FH@L$2-{Tc`LRKkc!T=PI`D-zX5hyY4#O>_%_-#8km zIo5aMC=sld2;Dfr)7kk$l_z^jj<#Jnz_z#Iba&~AzN)-|nyaCkWklb-iou7~qtEVO z$l3p}X1wtMOUL>4#`(6krOr;8q&|#{yc?aQ#Oz{tWR)12Al?kktPV~yYmz*+`EcU* zh5E?Cd4K7|Mw($MF1u>7;qnx|noc2XNEE(>mJ(JQg;Xl}l@2CDI?vTglh}^TnlM{* zV&MoIeolvXdSK&?osT)dfGc|E!R%c-lD1|RZ_x{f!*ZD&yu44D4`tI6x7CisnRAz zkPW7%Dbl%oCI(}<(>ZhOl?$Y7QKd<~VGnHm(7#Wly ziDaG~Fe&WHzH(S&pf-zvtuajxDA!!AG#zcO%Q6Qc{4-}2=#1%)lkZOCi7K2#hC?9D^t#n>VN_!5%(CRBxGui0^DdpnN#%4lXoWIC9L7rmpM zw;#7vU+bv39=KQ0d%rSRR~~v;!M3lqtiQGrI^0>fpMdAWXK}v5|EjZ$%PbQKB>>Z= zybSOcIHo$>T`oX7lcAmho^txVbrKb6=jK!w{Bo8Ij<{tTz%}5F^ECm-R{<0+tEx|ZK#?Y$^wCS>} zk}ITD1GI%u1F4urpps%Mt`SNFjU$ud zRtjuX8aB;aK;qS2ek_1w>&{u2Z}7$mPOd-?)+-8EL%k0KsVrCQrb{bO{uP{>GXk0uw;m)HA zHrSB?QWn&mEt~D-QQe3EZmy+jg|v!($p+wB4b<*295ivY39F=4ie_g`9XpiNx~)TA zOiKLwBaniNUCd;DRO2F{f%Q>4K+qR+*Gd}L};dXv?Z>E6@2Mns};;b@` z+6_ChCH%pl`6R?9*0eb40IsUq9MrWuf&bncaX+J+Ssx2WgoSH zOH);Oxn*$n3p+-LHYZeXD+U`SZj9a%5li4R$!XDVi+PA*F{x}}zG6WcY4Z~?_)3dO z*}6INcc1*e;&RoGul||l&^cN#MrMf@qjQi-J{b_oXP3YkE0#6>t5f`Z@MTA+D~^#P zD>!6^=lR*UvOC*%<8R)Mt#SYwb=rY7mc`^sHFEa7n4gCCW$7S7_3<$LwSOPBxciEb zl2B&=6HXiEy3B@GBevH}+T0XeHs4&qSKc7O3*gS_mVwS&&swU=+HX|$-YxCBSB5>l zMse^#QSdEM$O3$^_`o^| z9}fl{HWX#m7iEFG07U)OY)ZrRdD#qcczh}S$;EUk!W51vA0vu>a60SmiA+A;csUJUolQ-B#rkm4_drduBKysMyY}blp!`g&|7hW z0ON?>YbU`wz)Pa)8%nW!`=Df~=5X-Fq2P_L`>GH3T|WZH6yT%ai!!4BR{vynU?q<`J-%0C2l14>c9N1@&cueRy&3dVZ+706USMYgf7}^E)dq5nb0Vx0hdNExy=U zp5IY=h3KoU8oG09;C}6J-J{_r562sx!kVX>pU$?`FLgJqhI*K1u@nfr=<8by4880h ze>=RmIylcS1kC_s8Olt<)avjwiCJ#al(*i+};7ZG5FaG-7zjrr`Y>nHKxM_Pz)XvnXOak(oO7pLF7&&e$ zRvRED=oSl#nL8z8f*Es=8VgdRNsd8PC1E|bjNIK6y?NsXfX|#d(jy~?jHt*oKv`AO z#)u8grNxQla1>ZcY>bQeQc}}OjX_mYc1h}+VG2=S5Duxezlq}|G_wuJ+6mcOz1TF^ zc8v8USrb!wZ2E6L`OUqG`#-$=ajI`}7IDk)jNz|9uw``V1r}E;%>$3VGQ(%|OE!Wq zJhu#W1tQI7gYvpS@aP-Dn5-3f9r+!72?V<^6dnh%&5mQ;)-avZI-#{_t)0)6tew1u zUn%m2p{>6vaN=h-@Rvfcgzq8(=E?$NSo)n=0E{#Qy6!eMUn__I_T8f9TfzJV0^}un z+18aIucEvw=2u8nkynWq7G4}Xx{7i;3%7R^ZfnlVU!58|$skqv+vFM_kipfZ4HkIyZ zEZ*Lr(Nvz>Seo;!IGbRyp#-Cp5NH)-V;o(7B^@0#XTVds{#-`w@x=Qll1Z7?o=zj` z&Snu0^AC64xHSCi?nL4Nj~ ziy6jwN$Fc)6!|zWtG*!TY0<7Hg*zLHb`x|Pk&$Lh6?C;-b093h$nLrP(C+H5ssA>i4Jh9otT{sT-8$~GS|zqefjNmA z#{g4<&i-4+EkNPW&7(x{=HcEO2YYJv2W}o1xPP?&-jUv0hr!~(JEuT!)M71g@ak{+ zZk`O?k$^r!lfiL;@*5H$KCoz+XP!<7EZO55LY4`K{?+<2B#C`21h>6@gy5~g;sM;D z`{zTqPYb!ed2;CPX&~NX!E!w3q`HktjrD+4s5N1J+-Kk2<uFA@Jl3l${Gyob4_<&7I`-d#;@zTvnm#BrQ}_y8CaQ8LcfO&`*xm zR?aroEw((FZ*N-eZhP6+`*v`Es*}XP{M*5WcSDP8Uxg+JoaH1yX8~xSdDgA`S?!O` z%1Dc`**D`eYBu6bvrS;E#vrPvQg2CPqn4?bmZb?v4ntHyXQh@p(a*E-H27od<$g8E z?8&qwr>&hw;XS#Ap>Skzb4}%g zh|lBy@SCXL{Xd&N{mtgz{bST0e;c>)Pnble#%)aBvN0_wa%)=bw$zyHk-@^3wc%HcJJ{hS;BGy+pfGphvV~1n zd=wdG(#v0Nrd%5xO@PASulZIPUfZL#nl6^? zv3;SPxTAD*F~Z<}3Wyj<=exhvv$oy@^P|sTJ1E+Y0;@f;_oIkFb{& z7ZJNbt)Z|k7}l|MLtYbd^Wq6LbC8BanUXU!%l|^}gU+^^=S@|m9k;4_?-onv(Y^nl zu=o6K`%bbxXXorZ)9ns++wIVf?I0^rRL+@V&N=6d#2h4wnILjzQXrKjhmJG*%%wlfw`IoEs={d-TyH%HK%d$;8z%Y`L1 zx*^y2_ij_pd=n+;qoYNC1{0;*{g99-FlZ)7{j;(w+G ztqGIX7uLNDT3%+qgTOXyN@NxkEE7yw2rr|HpF~7KYuA{@u>-2mDzSxs4c4uua8216 z_YN}WFB=;_rPFVsfd>nB*Ili5+bbp;uZ-1QoT$$Qti717KRQzr!YF_KPBG6s~omK|4vEgz0va~eEd#rY*eO{9at#TiNmx(s`n&~8D zEPQGT7YbI~q10>@V05SuqRUViaTi701xVeP3sOc7-+{YP&>-%*1EPZkuxfm$oQm;E z$8FL!pl|3n?k)zj~wnJi8MVZup3*YzN=B<D~it2OOuk9m6-p2|#*Y|M;q%--FeyZ?A$VQ*>a)!O0@ zft4R3t4vY7iUa}6EZ(G&FVks27; zy9M(~?3K{m_G{%_;w*@Bbk^?*|Ek~EYQGS=aJo$@{lcHDve#ypoGpR6npy45_Pu`I zc==Q2#m^hh0TCSNcly8ReT1;JfTgT`fUmRsD+X!qAh2h1B(|t;<6pd3YJ_$+45|c{ zMzE0n+bl|FA~-wzD_f96rZJ1O@tO^`00+@u_x`?@da*JUxbIsX>7K4CY$?lYxP7(g z{K>1|vu5GL&pvwh{r7(T+xP$dHy`}!w;#Xr-e5Jcg{N=l!eDlHQhd=y6 zrhY#9GdT7Mqd?yr{nH_mA&-1|h>Yd$M7oMJ7EnwkZG%Ge7oSRT7|Yg|Fs#T`xLDT( zVxpG3ENLziiuL&yG=+J%Jc*;MRS09Z7Js#bdss*Kn=gL%_2<9+^QXTDG=k{}V{gXf zeE3hF1M@#iQ}`>#Q%y~qbdFDc`^Qgz|4}B8dGqKEbH4kVkDO_p6dUUIc}?8K2B(Uv zlfTfeQ#zJrt7zmb|GwHUK|35xkvSuWB4D>6vUL{`vdu>*!<~|Uyb$I{VER_(ktYXh@V^{xGaCCvqN6Q19t3%x(Pb&y}>IA;J z=x9T1yfr@2hTsK!>MBml{@;!q;BP;%-jTc7a&o0PcdhL#Hfe1Qi~JFETmmALC|L_- zpW{9XDO}^oq7aP{8rX8-`IbYo^?$~N1@1K-fr81gr0sjRK1WK>t*5wd^GY;=1x+XC z8g+1q9LuI0^e><1j3)U?z8x()G+rT?syQ@W`~7tFk*TV~(^WYv2yox|Zus^$lNFS^ zZ!fe~2l`qPzOm=aYd^*_faSAKL$Td3T9_`d_JYlYl`Scwe~sfvewOfOTEo&EmQ{uh z3tNbmK8h#dK2IY_!?URDJ5VqlCptWMm}n4^nO^hT*1NR!2DNKr)A-SlYb@D zY!gq`v)&6upM+y#ri~{4mW=-#i@f$PCP#*+YOb@N*G)};42_0dS;5?JLdy7*4(gfw z!u&=Gw}5w5-tD$Cfb1>$D)DG| zpFnot=o-_tQidI%ql(VWhp>bt2X5gLpsoU0LsCj}Qr&g<4!x;V$fequ@*%jG`;IAG zM6My?f%|TO*w9T7AHD@*gSX>Dd5PgW^j>)ld~29G;VDXZ3wVaO>voL$4ClvP1+hVg zOhdg{+cOayNMO~!(Lr8ML42?%Hc%Mp%MbT*=X}6pqeFLiTKHP19~sWe;3atqITb_( zu~p0b9sg=af`q#eaz|q8Po(-XCW)}Gruh4i}XfK>m+`#_aX@QUJUhI2zOl$bzTZ}+yr>P!`-(nb%g5EddH<` zZ(bbz-jm1e3^+69Oc^}XOVhb#!`reo(zfYq-SD+`ahUlC6*Itd5cej#f<0c7$!8g(y?wQ0o!%y#Ui#Y;Bg- zc19+))y)#*)xO0P9l2=qBQ5T z)un}e(b49h-Xi}>0;01PX!uvQ>SQaTSZx_o#WDNjhlg+e_Vlj?W`Y0X^*{der@#N@ z#ShOOK7EkhiLHf}X6}#oyZW2j+pB9UZr{0b_Tri2Cl7yr*xG?Wiqhx#iee zYp$*aR##Q~X=aOzf1OQ*?YK{|V;TRRTWLQFaRZuzu<&Z@Sz%%2UnZNteDjgH#_#Xe z9hs^5{$70!wku1SCaiKFPy+-3&(1ZTLKMs3aMLjWFH60;=_D0Y%tGT8hbJnr3>}`R z`JU3WgiF^PpR6QG;+VJS@JPwg>8eXJ)tB9c*L)>6$EvIDwKOmF4aUc(cIH=J1Z5J0 zYAtCVlyH%*k3(^*OB*lS24R~~GLFu6jiqF5nm3z~701CvqnRh!m#A7R7D}~@W!rCP zENnzA|2SQ!3=B_TxP-=wcl7#xmvaR7ke z9R3aW-BSJ)w=?x&ObaM(k-$SJ;J4uJ9bDP%ah?TSQR zs~MZ2X=)BqWazmJaCwEgF9ka;f>7rbAa*Y$Xt6{}fC)x!8KkmaJmv27uHv=MQsD2b zUTv>j@2Uy))`$BVK(MbN&|AOOTg8~O-(A5T(#RNfu;#?XKxW3Zb8q_L{n>|$_Zgnr zTU&b`l!@kNq1fy2#?Ofde_?nkzQg}MKc+TuZUgA}RWf5Kopy;71>9~R@8##M2Z~2O zZ9arRfu(2su(SPUdkaUgENkA93{ONcm)6xa21l#f6)vH5$lVh7t?Fo%Y9dWuu?%_( zN0zhmh2taExK<6FWe@4)aP! zSi8NBk=y#bNj8$pJ{KWhWnM0c|Hb_LD!v!=&CPZDmIvHx!@hNQ9|(IoLT-kSn!?`Z z@JLf+wApsegt~49I~l0Fzzh-0yV9J4lbP!rt^MtUZ;0BZs7OVTO0^WLb!%|A=bAdQ zaN0Nv3S~^G4GLx=>FjFTDJU4zEYrkzp68o#a6!-29>Y+Ltvb-de2R=1J3${r9qJA{ zYN7M&+-c5beWCF$%!~hr*kSxveK905;WXkaLH48akTR4L{-U59nr!48)Gvs{HY(pTa001SRzi41}kfS1(Adz zB~EEo%ErjBFjCaf!qNC{G``2B@UPs>=79Fatnu%D?^v%`D>++vHWjSsjlmpQyewR(jc6c-~ihdc6GnRP~wZ8c7nP z{O&3Kj`zVfUraUOAwNFZd}694_ipP2LGyX6RxClFS!t=uBr_YB^CixaKI2+X2?+F8 z_x=XcWU1C-4u=jRsG(Rz3 z5b+^(^CO-+R>#hs3W{Ui5}fvNAK8^f>{4Ww8ODqZ-$a{7Jh#H`n_>5@kn1M5gokb* zYymF}S|>Nm;zVLYMJZ3296x#sqWwjQp<>b9diJo}olZKXW^Fx_as#Xc%|G#80S(x^ zYWy4Ox+v(m0JOk9jB5&FfvIBn2K6!W5Z)1)$hn4Ub1pxzJdc4p}{JC z(aYUMV6~Uaf>rv|{S~2shVWoxWT?sCQ?uIJ5O%dE#)oidZ%p_y7%aWi(1$=MTv=_$j^dcXCBy8?ZKf4Ow&O)o?KK!MKvap#ff}(r?n4 z*BcuE5v`DoazBvRnT2uje&#k`@=vgs!?x@weDLVy7L!4cI8obs2i;;QYqgV^3;L}C z{2&V(^BPA>%Xo|8Cyx@Rx8kkx)poF`<9+^xkx{#R_$v$Gj+WOZ_BK|!r_ItsWAQ1R zLPP_2nj{H|01EQkSzD%C&$QCzXefrkX(}Z%KW;GY{gB1Y|9tZ7Kb}7SuUBvW=gZgs zc>dx)UcUPK>nE>ww|;*9AQ@QjbT@l@TKZZV>q-i*oIH2-$O*=8zx@Q|``M@O{qFbw zf;jcf2vJV~l=rphttna`04jlQMM67S9JcHwZ`Yh+u&vQQg1VF*ZKKtm{XMY43 zxpg&z%qjooU;gFd@#Fvb>Zj)$%#UpHlR62F#0czUw28Bp zC#y?4wS85crii%%cY`!}W9^qEE2y+}D;%2TZhampZf7~ybhHi}nMYlHR@qLprhHgZ zv{snSoh63>C1`%dO1|0IUOgvR_AJ~XQ0BF05&JE3@Izq*d@~FEzIB&(ZOFYg*cEj5 z5&jGXqk;wA=BTel8oN}<+Bslf#$@GhJqK@swHCEhwO#eMU0rX#2KY=6U1P~b?F0=G zx+|<5=->`SLu)QayIQGRF7qh!ahW?>hK$tkEaJD}gyeN}U0}*d77c62uwLS`W$*B5 z)G=hujM7}wQDFSbya}5MC(BQR=}I!9&k|NCsLfU6po^j4iOM4yVhIIn%4O|^*}97} z4a7}co~yqzT5@UV#wGXdtK;QGck3#b2Rq0FW5euY|MH{t5JOC4yo2W{Rv;m7rA$py zmJ_ALE{52a__DMvy%$S73P&FXkk9C7$??cms^7Y{%uS7<`ax)$eZ&$d9p8AI*xXI- zJWk-n+L8Ymb)JZR-0NiGr%Zxe5e%+;#64b^jE@ck2O93TluT4##_T;&d2X`m{8ZIx zz`;Y4#EAEl4O9HozLKx;S#e@vlKO7b5dr0Ws`{-LrPbSX;a=l;_!shp9EBg%$G*@+ zPoHuz9%e)nLa8#)T?70*6@Z>5O-z)ptVN*6371LPlkTLre2I;cjlBakf}v7iK4czc zBS<)nwd^dFXJA!X%A^!aDYl?YQL;%_K9r5XOplbC%(PA}1#KiMvN7pJdzPg<6-jq_ z%2gQ~z+DWH36(?WkZ;mkyfI!1^9K5_i)bB&g3-dFD{1Beh#nw;(fg=x9_46cWHOCr zZ+?8F5Fou_b8dloc?}@pfq6^fzS8ta1+RghMf$lt@V?V=j{Ay%9vBL&c5M~-jZqFT z*O{$`e|ZxUuCl~%MWnw3L62hs2$qxB)BL|*XZz0{@ zaOW*4K2!bfpb#Pa8(>RV>nE8W6+}d{^EzLdk^EbS>yS(yr&Hp+O`Y{}3-Wj}z#9qgf z*uY*zHyLnd%o#F8JCLW1Rf9`~7`aU?+9b#?FD1Vh*D(ddHxHk_+_thIVV=8h9_%qK zC6^DLzS(&KpDL7jX+X0&VMpNbuZ3c%OwRJJ=v@0(^Mg6hp#E(uIPa4$SDY5G@~^eX z#a^yZ{x#i19rbNQAueCbYCuD21M!LE$Le|ls7hmIWn-H8Gi@gE3CmW*Fihw4iIhfR znFqVCsJid&{QT_ko97Q-KH2{9#gmtN4`crIOeDG!OUGA(q5CTfV>9FZ-u|Ypma4YO z{OaqM^Us~SntSZr;criT@zLjhdjFG8e)s-Izj^PYciyLC`^m39{^);v`1!9s{PLZT zzx&;1NB;2D;Sat%{_!`*={tY&9aoNh^!2e1Kh630^OK+bDOdRSLnU9?NC^4La>LKR zAS3#nfBDy&XDkZQk}6}D$6O|Ny*{5|iOa5`?tp18J~f%aR0_I%)P z=l@Vy;Y$3zG!p@_S~@`t+ft5ZjgE{R!M}}1ms@g#d|N4pN*b`PrPC$CvXhQ@VcEWf z3TFAcKn)-2p%@38yMk+G6o==*NXg_+t*Bc-QC$}Z2==gl@0FSb`jhr72YJx^BVUImwb z2nYWXkG={;-$dg7*m&|^4_~q6{AZF<(q!M!kiw~wdK?b!MZ>H`BUkWAoB+(IWo_Y< z=6@@G9SkLrWc4ck-|46R>9G-c( zI{IL;ok3bgRwv5O`igQ!OL7Uvn5@j5X8c*!DCbgahjt-fembtOLwT;@{Jr`M<2b;} z8PPg}bsXdITuUy%-UjIKGUUs92#Ag(pnqur$NP#B{l$s?(j=DSjsj9LxSkp;PYxrcOL+2> zyCmr;0DM!vwOvob=}PM`J5LKX$I5vPai*U-E>ZJI4&}+Q0k})3w{Q9?)9x}}vwX+l zTPe?-t+8Sfr%7$)$jVFQ6?-kQl5K43!IpE4alhVWx;VFmN*At&a8O$9jvS zJw>tJQV{Mc@V7$2Hw2y66jwMl^Liq^*J+D$m_h98RPr@$G(`Goe@mO5DQqeRO3?XC zXY(78-wRcXfh+|xC@pSnnI^N$ZK~xuE`5hTm8F(j%dHHNkuz3`*L{`1Ppy6sPzLx# zC*^2V&EG??s|LjSn^MDVnUUVjvHp$8Ve+)&lkW8F$oBmGorU?GW&dM;=vgrJJd}PO zh(7g)--I`QjBfoH*_NtQWb1W!^Hppcu%#M2O=kp|6tE1>=c!G=hr9u`#S}0SnDVu5 zr&9kmCF^Z*Pnm%VqxD}Oy%EFnFF4nk5zdlz)}tk5@N6aA7F}&Vr|4~*>+Jkh-#OSf_+qJ^EAK1thW_fH}Q7&T-)Y$pZ zJjR;w_!uf-c-h+4k_-9>C)k^RzWmF7y!z|U&wqTg_v-nhCp)Rlm6`ja{VsRsV0V3c zQ$ zr;@C-Wk^sYr%nfJvlW7SDoDPN1~GPJI_#R8!QBwTv{u1SnP&i{39Jh7i}Q~z6gH(S z#4Y42MF}|?u{OoxB9-@zosE~XuMNtUEl5^2f1Y@jbz4b_h&h~f&<_g82pknG``L|) z|M~dY*7EvnckAL%&ziR-Fxnm*Zwya1hbL-5e7p(&70ZuUfkf3;rhS4er;Rn6J&9t3Ec3FRSc(77erRfp#YgYDtiusy_*)8%_~4jQ}LI!<~N| zc7u6|J|>27s^Q{z{UtoZqcxX@OV14EpYxSn9WBqFt}VUa-niJ+f^{u6?v9R+B&NnQ zGxxUV7WNjGpRKGtTUmd;8u&4m{3)LLIll2jB=sVg_$jgVw~ZG+b4Uw6dKP@}JoIpP zZF6TKlDWH{nVm~c%_gQtW0O7xvHim>tKHT2nu_LX3&3nu-c04~v66EGH;#@Jvo86Z z)s_jZnyNcDRnO1Dx(a5Z-)?M#f2qmCs!*`f^ohH5ylH2d_M(%n;0lvp7rD><#`F99 zE9BcrfK?vMyV_o`)=}v2Z%0+2vpUHBzP>6a#XwHin1F3HlbAj_YF;bqMC{B(Q zCdUfE=2+>*C?YYRT5+_$Amu7edrC9jGNE8!$@X|P+)AGs;()RtZXTE#FG^38B4A-; zUO>WCY%DCD<$*$w7${5)L1YC0qTKLS@U$S~Dgh~XfubnwDTYjWIIL>mODfx7d#s9= z0Wk9)1JZ&df9z9_z~#gGBK3&R7G6nAU3QREQ7Fs z(6Qbk59f_i&loj%MIQBY-Uo4I_fxelfRS}A6iuQVG=G7NfN`Y<&b_H6{k%vI)rngX z-k@Vb(sD$!KP!>6J64jiH_E)H*1SEH&R@ zvm~*c0s=V)$^-n7^kZ?X0^$B@L0_$)r!m-7zuH>0*jUD}bFin5MUVuauJzY1_tdQP zw)qD-gRY^l#})OC#YZNRlQXHQ*^Qa`?R!f*^GjsBKU`ejT?su|Pd*Q2o`*M{g)+~= zX(pXtC6X^$auH{bGff}yU9f|UWn@nisb|SFm0L_5Kuuyh9Fkd+^aGV@op+XyZCM;i z+P2JC>6n$=LV;RgyA|Y84AO!FJf7uWoZi3SU(LTV7my6F=opKBDSjTi-Ix1M)w#8Wd-F3BcPA!XeSIAb z-Az@krFj(v*Nbmo%e#E;%8~C5fA!gyUw-=CpFaHR;}5_3=#O8%|L&KceE8kxpPYE_ zw_pEn|MG6(?fU=o=l^`VK^(5MM4j2#tODI9;%x2LIgn5>ws(d*NIjoJi}J4&s8Pyv za+&Qy(&~)Tk7+2_VPUmEOACO9n=a;Wv%u7)*9qB> zoQ?AU3p8|$KnB?g_J#E}n!l!_)rE!Zs?Cw{rRGyeU?`Z9^1XUW$|o>9%gP3_v)bt? z(@@s5;f@=yFcD0${0^J0RH5k_=VFZpOsBH{eWLvMROJb)Fx&3Jh3XTFHMz@mr|y=q zR`@uI7-E8n2=Nd!o(80O$Ut%J>5-Bnp291Zk~1By5hZaeZ2I}SV`VoZP9#v$#lg{ z?l4)wJ#X+RSJ5SJ$z@;ZmGM%R8ef^II_E7o<|{lpRdI>}GK$q(bjnkBa;)O=MAg-) z+N)%Vunh_iGfOD$)v44~wad$9Igw$u?!1AxX2dUS%kPe}iY%?DKaOJ!!E1wE^R1Ne zFUkm53(Ku803Hpb2{i3A{OhF3#iR06T1rR6&;+Pr2#e%xEPnk4X%n4d*7uiWB37smbE> zWLbPv3ef-#qbEM1) z;MBtbh2@x6o)iAv9Ib*Pc`MMfvHqJ0Y}l^bX_B19Z$k=^d^_`>sX?u0#i~ns1Gx5eFnIeNz~xB4HJ2>ec*a z`PE1byzizsz{Q*gCgjn1hX6Mw)MA9?}UJjCH_LDqe>X)(1t2nGJ z8=T={F@`BYXE|CNVzL5ba~mSgHlSr41`1lJ*`aJ_6xvdzEeo1ZO2ytQI?k4hm90D@ z!Ln}f$E0FfM_33&cLeakZXHZ5DU;YQSq*?I2Gf8}hQH(X*|+_uNlAT55b@L0&XXhs zUOx7rIa6Mcf`I*Kl(kv);$+TiGQ4dmwP{C7`=LQ8Q<=K0hIX4AV{mw;6EC+n*^>G9 z7q9;E^!X2up8oCG>;HKD`X4V}{`L9p-(Kwf{Pf|Ijd;?(G(YZjclC5PG}RQ>+_`e+ z^x<!rLCY2Vat`WpHA8bKsB!jTPaP8*fiD?8l{b^b!Mv) zA5_W*uiMO#B|m2;gP>QZ^X}A>o&CQUn;+38mK(G6ZTR;f^V+M*K3sFm?95=^Y>~a< z5K~yrq_pz&_~cly%!Ot9R!DYD-dnxf+r8>_hep`E&>Qp7_H9deT2j8Y)M#tM*KDv& zuVhY&huO--&6ieb;I`gcZN35dGReEz47Xm!TLmoViXFk&p#z=p?|HFlQM+zCx!Q7K zwe|Q~TMj{#3@)v)x4q`ra{URe1aV|Fo?NIuw%9GSBt0X1YA5*apG>(DHO4+8>Tkfm;+`T57PNBSlAlp1gCMKt(m z(do&u^V6l5CW|l3ln5pYPfQl&PLv#*EIp1rY_j}>ofd4<@!iTJ6UE<5mmQudKQdF6 zGtGTU&&*bwpRK+MX6moaHe8>r$4Mo2Zpr)7G;EtELcaq}3$snK_@28#$^fNLt)90_ z>eBVcFs*T_!K+fEi-mQc*5*-dlRGdZSO$Y0Q$y5 zg#@M?ZDnL6EyS9FTj4K7V(AndOQ;3!(wbklX|6=Z9l*!oK zSQ|kN_)CzII9c3;y|*xZeB0^?D0-5?aBzX(pvc?x-fDkOl_ilvzEXwmu7H9A(mpQ3 zhR3-Me#hNyx$&}^+3aE61@JqzG&|Q>0i5znbg(u)SjX@bPsJ04`YPhXwJCQ?($ySy zb%B_d9g%~HY466|cxHZbYiVX{aSl9MUVgexL|B*^JvKPgZWba|)y(+eOAobTu%QUjXK3Z!B)W-BDPApus>mK9lwhf2x_dPV$HcD8{x_uoLS@MX52tNV;HkNMK!R&{gH z6xP15JfSIcJ0Uo(1J)~0E>QlJRSPm|B#|0+pZ$~4KL0veSe?%teh}|Jq@G2m{PX?& zt8RA${`Gao+^ylErkJZK?ru(ann2v!0G8U00e?sCTKfqZ*KNtA_exQkk8VD(X}>ba zDW&EEp!kgE3PYyNAJG&8C|`(`YBNt_4lXn=R;z`&6Ub*qCsr7Kfr1-y5w@7DOm{0m zi24>NJD_f45rC7LB`O%P4B%zvk}w)UPl#xw^xgXJrfa??6pNa+5KtD}0vLDN2h&g7 z`Cjee*=icZ-_O(>K`JuhG~1AaY_yHTlNCABRjlEY1&E9l6I(IU@cnfCp{e?BChNYQ zuII}mHdl*eW^?XHWsa4Fr7~PAgq3P)7~YW%X2Hy0txCvN2Ma z9>ENj#|uW)rp77~Bk-?aW26%PwMsAKi&#e2Qz}QQgEZoOtNubt!jSAzBPt-B*FR$2rDe zW=jnfY+w~5=cGrv#(=Iju!o?XMgHS+Q;pBz0ZB`(+iK?4GLH6N40fLl_MHv&pJU=% zhK{9VeJwO_HA3>(5PvgF`d{Wx0-uG8dL(~sw-AIwg$k?Qf{tU0&e zL_;q_>n}o~m*Ft$n_1!v@P@I>MS``{@n^~Cvqa)~QurF!PUjctE$}S;0N6*pRWj*3 zD#SHyy+tE#t9$S3DbaMc9m({U&Y4=yKhL)Rk#5h-w?MBsoTYVRim{@<6b{=uonqbS zrwa~@f3XRvQ^>ej79V8qfAF5V5JAPVX=h!aW^2@(O={Uq;!mE&1W%%SyV1SJk=@79 zN5Hzzdx;&Y=s-gzckIWJ!gv%H+5Xu;5?u_xXC!%J^_|D(BhbnN)qbgaCmsgI?k^1wu6kVY@oti@!rt1TyE^8riTi4~ z1PNa?SZc-%bqw-dm9bwTP@4mSANg#j!w9GrYSg5dpZOTK#ui$@9MQ0_%no;464_Os z1MMP*1C*)T&J!HH+;o1m(Ne~%jHGEF>D@s05mgTOKiTz>?eYnO8k$HYP1C{Z4CoOuYeOM;ftBD|(4 zmkqj%xF&a|`UGWdAcRO@W5W@J@b7rl_mee8ChAXt$@&Xmtp2RG`s7&s>B**ZQw?V) zY0lPNn5;d|C=e`6`I>bGlT|zq8up}u{SjP0F;@OPfSrYoNrSABWs7k7Z1btfrn7*8 zx#ng#izhfCkjadD`EJW;?qgicPjs(AC>xMo#oIzM6=~ax^{18^POmkcUarrjC%#a3 zlBxv_dgg|xW-y4#I4YzjNyxf=fR8>t0Qo$0$X9upe?s*zPs;BCx{>0ugf_ngpTLxb z0rco4sUD}1T^dnWMyk`^%Cx5%tCDGLn#iKHM+As#rx1-1Yoq{VM(?CY@{-p>udB^;pZ77{hW zsz1WcpCD7X(=xbZ= zZ4EH#;OY*$I^#ad+HR1Zp5B<9-I`n2zQ35czqGxyw(a+W-GKjDB*a9O0tM~Vv8B)* zPdrPcEa^_PGyTojj@28@xouo_u-~fE$)VRQ$*Km_wGjk!V zjchycEH!XE+hqE2>Ezod9*FbVIU)R_z|NcCP@7hm#SordVejNU5~nOAn=KBT2-Uj% zK{AC>5x)X+YN^{Q>#dUdww-*Oy0oKY#nbgBxwj8{EC(CAqNL3A1LP}It_Pv?#tVU* z!R+@)U--Z|2ZI)62YF}sUGVzJL|q@fyF57Hce`RE-4SnR%+nZkH^jVkiIE1Q;K)cB zSZqB8h{$01AVz5Fy(CjDcd$}{QJ_<%q%d_z-oEa4GhY_^7r?xHP}>=mC;*6+AtUiT zx68xL|Lo}8Hukn(qF#z*gLZX~p_;W?2}%;vwOFrprkKr>)i2q z+0+1~LacSiA$nx5Xi1HAVtp`-Nw_rOT7an~^B{?py5&479+68@)~s;rF@hOMe2<25;eWd{%|D~EtIa2Xe>IW<~x1nA6Ej>u$=1ghK-v%E6^fp7CfkTs29#)TGJ zZ_GDen{T>&zmek&w(tvpE10RIz1w<8Y6UHqxQ+@2B#*KG0yj7+7%eP8D6DCg)KDUw zAzX2*LTv(qn~Z{CVe{LtVvniY0o++^_q)iMipD5))NC(UI>JyVUCZ=v2}*>R2tJdM zw9u%G&B`E8{(gED`WIN8)7TjblX;(PmZPwPx|k4{$& zZJY85`GWL#d1eC9U5#ywfmy1o8N{!z0RBylQZvU>U6h(AO^g-8#qc{Nafp({4*+ec zX0O`k-^@f+db~0;3rx^>xSk%fql$e|d7K3M+LSH<$X&cw_n2La+R4 zXb@xpj|*^pbLPQU<}D_%Bs_4PzB7RC<&=`GOGDiF)@VRjW!N;Sb#h@<84c6lJ5M5i zMO_S8J83FBKmWvQ5HJ`;MMdMzINITgH>E5KB7tsUmeO=jVWgLzr7Y505yezLfJ3i> zh0bCE>>*4W&QR%0?mB-@Q)r;gZV3*x1HY?rWw36|S05N_3Xivh#+t*v&csCDqxrer#rel8 zD^L7>LP(wlqr}{aD2{JG!MPpDyo_zWh-^L$rk|{DKMm}#0sLv`p}}^1+vZT%jWA@( zqj-|S&b|FIvnikFNebYb=w*YHYK@3B(t*M_kWnmvDBI^~=Zg5LBz9P4UJgRFt#v9@ zFCW}a`~Sti5V5d^LLZyeadMnEpt8A}Li9CYtDs%s`Sls>v68`fP;no8*-F0RJr*ec zn$CqxP0a4Is^h;pz^(8p1^oT0vbJ%pnT~^tD^j3#6tVHL-1Lg9fX_UWpvmeS(BPGbMDd<|s_3 zcIYdPP*T~u&%Z*NT~|?3{%#_FMQJis#B7nE^*n%o&18j|R*E@8+%!=hP_UroBD1A1 z1(6-rUq(k$>@xPZ*JkGd%d4DgVkp>AwNMp2#U->w!B%UwkS}defzUTwdvpseY$XFb zH92={DN}R*GfVB4XnX@SH9X6V675~-@G4Qtx3HJ_z+S(p#xv~Ll^$*_2~@0*5MTxaJo8xTQAVPqM$NZ#q%oc4GxzH8W=mt2 zel$Sg()@+Q@3ov`74Ll8H9S}M8ZO#Ci>2nvTw%5p+;6))-+qOWUIw_h9T59T6?(oN z!})1vgQV5=+iy_xBlaKvYPMP^i+YWP+3P0(lKm(n5(|rDvopa>U6SR96^f5&s*%jL zz{RjL)Ii~sd2p1qxP1mC^1{?5JZLeJ#5U?6t;4@IZ_9lorOJJW?yZPB9n_UsT`x+u zEHzaF|H5Tb_*I7z-)O^Ex;UOdPsVp`pa(6OM2|bqygkAEkxVgD z5oHSl#Jx39!zE>N*dHo~(4k;n{LVx@f*tJ!x$tT;KA4xXyombbs9cfjo-!H8?Yl{* zSPuqy>Cqdhk?Y37xWWMCVvJtYnJFxD;1MCL+-jFls?078)P=h%V|_Kr!TQu-WqPP8 zIama=xh&Ef9(1OhY}RS0n9?`zE#H^J2C*kjW{7UPAaz8?$LQdfnBr!1%S_{uZfnsZ zv4uCGQ5(&cE`SRXB*RnnHPg)kQay`#!BXguKc3sBh;2g$om^H1O4@ZzPez4eRQBcHrT)=APS5?u&}Q&I@%l=X$bo$Xg8y#qk~Q)sYi=kmY0jFT8S%IeI=l$XtepJsHDvwR zcVyQ=ZsocatGUeb#oG*!Uoxq&s)`uv|ODPyVdq7P) zn%B9-se#jXmTv^OUF5JnN^9DtcK1&X@>V3_*|!ItY(aqE0-b9b(=vC=uYS!tPxIIB z^>UUAdfKC&+K9Ub!cC6VCdaB{z6utbLctPL)hf}=GNr4Dk(MN><>+Jgn+~Inac434 zOQ%NKf;4M7a9&*oxUkfSMKtvJl}^f`$IzZj?YYS3eUK{5EE2##1wlhDDv-}vjwNMh z_!rjTIc40q^}LKhk+s!Mzw$iy)G;Ly)LcPaLoVpM`wB5oQEP1?fC2$4T9PX~zbW7O z7JSr<5?@_xy|&bPZJMa4s$59$UhUcWh6}c9{(jp9fT>BG&y4xhVPq16F@BK1NprC42kSO$gY3pa|z8cL;Bj8#m>lL_IZJxvQc zXO1#5ms%mm;Ej8V4zDzMn-5RdeKT7A=jD#mYm@?56j*m@uKqH6-j^G1Fo(6=c!eJU zEH+d13Y?xV zonL1-m(PD_oz5fT8m17kaHBm-td_eqUctaG1Oa7pi7g8M6?!e}zj8drQpj~UaZ0e6s!5xj6vbNqSOpW?}IPF_E^;>s+_d0fdYn`Ha%r9 zyFfV|$-F&T!;eEZhk7iIYZzas-^Z9$F=JL>(_NeyE=&&<#PBnBUrP<s17xxu35rFe4~ZFo^-KUNml!eI zK1pqymPZe7V0FsX%v$8trh;I5DW&FkUzyD~GfplOEW~++?>N-J6)kJmhld-OoWeE}8E%G=VQq;8>#ADos)@VWQog?E zaA&Z;Jv7h}bM+)VeYnD?auahD8y${~k3>ej0k0=EF%DB_=H@mQ?`7tTy&ynS=T8&L-0X9xAIIiwH$X z{(>E}E~+(N^qP_sn|(yCDx5qn6}gTL%%~OtoAnflo;5x-#b=K-P3`cqYHg>|o;5rx z?W#gQn3JrG24h;Hh#0F5GNUn|D$sp4w3@IsXLWQ}?ahvY-fz)jV{;r<2U41~Q*7O2 zQ?lA7-gDSo^|JMyO$9s3S2r2iqny|Fi?V~bptCgxIjitSJtH6`dtTz5G{4#7KzP|R z5*#1)j|?zv3jc=OwLzEgZ)&_QK2{s{R$})R%Ye{GYSH(azMpSB zy3~GZwd-uK`@BK2pcVeEb6~CejKAmXY8RH{BRHdw&vIku+0~9S0OBUMbt=q+bVO?gfYl)jK^QG+BKmsjkB`?Dz*L`-HNR%g6e`~1t3 zOTRS>=+a?Kb{Pl-OVQX;ABntxf+uSaPt~z+=oA8(Cj=ssDKFQPk8z9!v^B5i8VLA0 z!E_ZiZ{83VLa~$LUfa>R_G9;3k5ATq>nZw*`7BmrP+OjBymG(kDp^$kk;_&`)4xi- zHyO+nwB$pN1uQh+gwXO+fQhcU~Fie+Ws>O%i5riAdu0OMaNIkL--;$9nU zek;^XZq{hGI1hy;-j8D2Z;SSYQ`Oqd^O7$;+ zjl?!0gk?-gQ=^UhxYYR_6mB653o9XhB_-nksCfb?oAM@=YWUYkaC5RagAn#Y==6>S zL}7r8uK;h=&IDdBRs&z-Uo+k!usKq~pb+*aNYL2R^eikXDpmW@6rTZv3nTOK&YvhG zC3l#Fd?8%%Uu{#M9xnl#V+=8sY~TbNDga8ULa%@#GbU#UeQN?MR|(HU!B*H7`Aqv; ze?~3^ZZbAYQbR@9s}R5N>4vK)HJA_o#;NxA777Iq5g}3m|AspXV?70_0S1|iaA7fb zyyYv-xC>-_$>HAusSgnE=^^J+`ImT4ei1sC4NGFlcKqI^m+8pM*``EhqXr6i4TP1M z(ThtO!5kZ$-=kJAgz_{96yin;1DgjID)*Gw1}pS3U}}l<%ft|rTD{zGYrT~NMsa$m zg0loy5b~mYd+G$e_0hftH9w30nV1)o$)=0LL)AbE(F0}X1Eem&Dbb4FG`MXtPMXWc zzkMa^3Q|%ndvl+Ep=s=463RVTE>e1+QXJj=*tsiLFJ|96r#3jyNHrVKQ&zM`+#N*g6Al&_YGjJa^1+q|Tb2fz*F0UJvHsxXU|?dvKhe8B z*%!Gx9G)EtO}e8qzQnz$$#9brG(lTdedVX%dDtL; ziHm+K#bcxGa$^&bj~8smAFK_P+a5scQ%jM&dTM;{j5S(zFl${BMa1tSq$vg_T z50~<(OFa`p94pf@Y$a7z^ZHsL8q0BX_?j&88xk|Hd~|#AK*W0^K6)D;RLqT0Svr!a z&VBhwpHxS6WT?m39KuBo?|X@nr4SkNjh6vm?~GS&k5xPvE88AHazb8cw|tn(VI>BL zM#{D)Dj_=^^({31wPY>WM?1L`llP(Gcf1tQs^K2Ro8v_w+)s4pIoSMxFt0)Qm+MR+ z0q&2crj^W85xy^eI%t=kGT>kUc#^FN8HlFZK&6z{_U2eE?(S^5W_j9OvEi!P^3UCY=q?cE9VSAA1TCg}B%OnUlVy3~D1-Ewy@GtjaE|fJdq;+w z2)Op;!NQcCrD#7a58AUKHyxzKzXFUjP5pw z_R$(H%Ok#;npR0i=u7W z({7*Je&0}N+>^dLy1g*9wLHGPGO-(&d*r|SaBbz0fA!&N>ajnw>woZgZD()&vEt$0 z`orDz?Y+RxUSOL7IGt!|Nk{gcgm<}=D^X^SU`Ek#`<`bEW6+1xhaE0{%Peo$L79BS zHXmS*GT!0NHjKli84l7f{nnq%EamA2gO$g*m1>;LAt;g@yd~?6)D8gCf zZO0iXTPmo!%`nuRyV7!aq3(;t`Y)H74sjjFHIMRloDnH4Q6AF&Ww;pf1uR1}&lN1p zebC*+XR+!R=aqz5Y9a^9aS4WMIRORF*B!lIdt|=u_(FY-@h`8Lm$BBx7Fr0G*v7-0 z4P^8fl7+b2k1uz^w#VjMnMFD@UHA2D-68RwwZYWK#w!lNl~xl*Pz$IM3r;AGOFXp+ z?ap&#ESoyU7zXave}7MGU~+Up`9i)HKL!7CH*`9NZF7imykVG#Gr(fUDP%ZUY{%?< zhHx-Qe7@nxQuA@9s#%9YK$$kv#VgMB?3dL1S;S1;;1)@e{x%x2SB-gXBFrYW?Yb`4 zxWQ@PTBw})jYBP^3I!n|LhE=~O!;`E1x4fmI1l60&r6h@Is+di;S#lR4=QM&%9b0#p&C3844Co_6!4$M9YtwfQbV#0R%7r|y zWNew#FH%yakWN6Er#w6?C2MJ6_ml!Alqp$flh2`G0n97%d7u)mU+cZIDhtAHTdxPo zjtp0aUGR8K&{r4m)<#B}0AhKqzb-IbA9OVYy$VmQe^{ofVe`;nlfSpl-`jw5Y<-~F z-`^B)Q_1Y)BRxb_bHo!K^+v|VViObbiP_}Ty{$RY!&diJNe}ZsS`9p23qD>;?)p=^ z>sycgn>%Zn2mXzR>zm-g+UDcH&TdE%ez+T^L;Y|!_INM8%cXh7j7^l9WmQC0?g^O! z5z)4WKqrIXFM}y)V``&IhflT6OnVfEQubdT!&7X75*(NwI!Llk1y~>Z%g*dui?SU6 zaFQ!@$&@lR8;E2Hkvgv&rq=9Alh_Aq$o6{nwJk5e*q@p{muG4a* zFRXfYy_;^Y#AH!Awkoj|ARP1 zi4zP-0bUypG2GN}ANo{w9k%8ypgP-+uXN=a|I%;0+jt0D7dn(}5mbqp9+K!w%}=JP zj4sZ4s*mwleny4NCc~IA7St%CPy4L8?+}x960PUV%MI0XJNHrXEA-8`V8F_Ls{w^a_?Nz|47ci%KC$o0N4E$l z8U7Wok?bk3%nItd)LrR^(pex#O$@pkcgz7m&Hy&WGgczQwwk@{P~jN9VzFSMqhWIy z>KVI3OINJROy+7)8b{X7c=f|cCX)EAH? zwJ`N(#P23OWp^HbuDnrvYk?|X>;LkQ?MWF~Qo83#HLn&5qqPc+W`^@OTm=-6H@#9! zmddlcm~#b9WSJE1FUAMO(TI2Q0k7O!p2l6=bu~S7TY^O7P?6R394+>twRbrNjEMME z+m-az>A3@rH=Hqeb9j4=uek)gQT!CV00qVE(oIh}Yzvu6ZYRHVIGQL?Ue$oKiS75o z?@^%Yn_C2$v#k^?R%(;*sCfR^L^6A66I1%Akohuq6^dTOIEkZr9^gJw8J2-55!WKm zIkvfqqp}pElynrXgk9`qW^H8&mU42&V9D?htDOh{slj$dW+sJe>q=8$jty5vhAX*) zv2fH?1E_~%@q%rcX+k?^Mrz?){=dLJ9l?e@Tp4wd9m;MINj2BbGA#`5shgyRin%>V z5yn*zau>=K-2%oX{T)9op5elft1K{765s?v`lzQe>aIXaBdBrXfDlwWRLOn9?(&eg zEa)i>6Q2(BoFJ<%daD;(E0^1=accv5(}BJke;56WVvYg4LR!__66|k{40Wszw6FEG zhP++z@&4G#VVExigy(F4ayyDq8Vx2Qm{Y&^sR)!0$%S%#Em#N_F_Z7Dl z*Ktqr-L1y1O4iZx`kLSBdShE--u)8vKJ#Xk?_0CCmfuZ`KM9V_%nx{i-l2eJFyv_s z%Ff?rfc`ZJO!!r7q%}U;hJ*xA7_f;I_?KX-VAmyPSl~)-hb~2*3Z=>@5C&!qt-6Nn z0FZO;OwV4eqgh!kZ^NoYb%S+G1CPT^2drj5BQJ+w?M_#O5X zLvPC!uG4B{%5}O*iq8__p*>T38d&%SxcQiDS!y{Vq}+j+mFNtyPs@_})AMboWS2qHQDJS;9ff=~8&eai zu_(`SFldlv_js`+{*fUmd|9e|u})#MMpMhPHKU!6bNrAKNRy6QOLWx*BCiGQw*kqJ zb1hfMYLlKOL(Sx><#ns$eqssyOKBb3ITWl%ql@yM+os6w%d4R4DxmUA=a*u# zLDwRXn?Z3;*6~=?j`o>++zmLvgEFwG<>f3pOHWOL6huuBT_{P*wD!zn!zIl=A!2V? z!dEF6DF?=Y2v2#zE!R@6@|3%Z8D8UGn|qbm3QD6gIs{0uppK1~7%55=RjJi)GvoO} z!A=KL^cvI&&pw>2g$0mWrevutTlpILmXRd_G{7j2rS_Y@F;TEJUWWUKqXn#Od~E~@ z+lt_ws055YacTT3FJ_$1av7K8d!UFJlH!w}OJwrKWQ{6i^m0|!g2g0q5mjhx-uwmf zMybek>e4(3ePsJyrnQazAC1?dbEDn2Qv*VFQrFeqG?x$=om^?@dQ17IBr7KfOJNH} ze*yfv?W@2eO~{p1+D*}OPs;BH!B48-O_P94uJZ=MymSpHT<{h#t<14OsF`_!gsGXO>b@-&GWJTC zms=~w{7LYKk?~Xm4sv>yBTrk_MoV!<15QQovMqwu&$^DvF8PxDaF~~1F-iDzp;F6) zUWY1TLp9jFmRm4pS5s03k)gWia6R%95gKxp08}mEAt^6mFQtqInE9Vtgu}K&6_BrG zmr+Y+P7R+S(|OX!=J2WCE8s2x>#hP2_7n*VL&v0%V6hk|gmZyaygBjmT{r;f835j| z4G-48C0JkZ|&U|xTJ z+j3v~LU+q@U)xH5JDc4W``ecKyH*B!76t|u2M3p2!^>`$f5Zy{qn_ZHH!?XIpP5MA zTVVWoV)cN_uZS37&+)&mvJ`#Gc_63uk24d1MRN z_SNUnP4FVd7R4=_iMC7Yjb?SClaL8jncg(cLX8Tt*;Y!#HW{Q1S(~Lg(1(Qtp+ym@ zvWiI9_J{N?rE(kNvJ5Ne*r0dN3@fdEn~RK!G)6MWS5>_6xJ_YO+uEw-Leu;1aak;w zB6iGyHOHMx-6qM;Lbsi7hmSolHZJ;iq<7ud6ZEzKRt+luHpjg!32(c=*P0q_U+l;Q ze9l8IYJ`?LFysp}A~qc_Q^zT4q|2S>1szv057T*Md+a{{LXp^*30mZmks_N{f;73E z=cFKv3MSmc0bXkab>}6D%t9YM_^2gt5@K3uJ>fXJG_{GfJBtnH1MTcw{lDSu>n%zOrhmu`0A(q;! z<>zfbJKu43uI)4=3vbd0)>1GYsw1rMgO{0jlF_HOvk^a-sDCSnRm9M80lE0icb`z^}G?uds&# z*yxs);@P}xkQ?P_!Pr;dwY8oryoV4m^VhPCz59~ArDRy)mO=jW*+S?6=lQVARLcGu zvMhk}nWo(_3b~N4L>>{vFmM9|NE_zkU1w7GFoaF`*atROVx3%R=IXu!DL)bsiBWls z{;!Pd0zQq!T;wa)ALI(OD+KK+>^;?>!v`{tI!u-ajpv~IEoGe{q zhqW2xnW!QrORC>$x`riTH+apYB~JpapsZ5dA z`^oV#VmFK>of#!<3R_*8I|wE#KxV3101E@Xjt3j?0(e#Q1^A%m8}Ul)wef8GdYRJ& zu+zfBv1)3$7^{STUHRgkQvRjJ4ot?vr||LygGDS~?zoB2TTdt<#?#3Xeaoj3Y#lQymH5o^9XPzoCcr!k5BjvdzE^$-Au6*(> zP0w<4p)`#u=OvM;MQg*-J7HU%he<1%>E`BA%eoln0D4&}+f0{t-{AfdSgi$WE@>{q z-Si0f?)*}CoxDOjJMjmJ(c1dUJewWby^4!-H=xq%3%iyzAu!lN=E>_TX2V7mN!yRj`&Xu9|#r~GX zfv%;&?!}=2!kSiGKCtGV@_VM&J#+EN)x^{qmyxlR0oSn9>u%T16wr+CP>2yG! zzU>8bPiyR2(O6B+_6*9ZI$NC$;1VYlmMUPZYW(Z;g|%vJCjryT*-I_0J7)mNygE>d z=enhBYi@}qmb{AZ@eZu{ybD8v{?X3hL{E64Juuo58D;rk8?(TY#o;ADwJSc-kr?fC zoKEOdv0VxIntJ6@E4^5Ab1BSX$MW}FT2CRBg&M3<)tPVirFH1 zxLBH53XC1-4Eq-(8d|Z^oP%za&TRJu;BW`yOYKZQomgu>X)Z5b1}{dayyYUFIZ*IQ zb1n`m@}>RcWVU~|-1hBq+t(}Y-%zKWZ}|RR9pgfW#s^)pr}rdUYfk^H(8LuKZ&2JkKJ&TAY_>@xnTn1-`-2> zy_X2k5DHfQMMt1^E$e_%9-m!~Uv!8-4Dneh|4P0D;5rFNC|q-k?1gs4_;d&HyE$37HC4ou z8;MKBb*xxuoqq9{LyH)?P`J|e9WURQbR@8TDO0^Fb>Zl0h*-(DGBaD9o~cSrRi~$G zHm0k<=4?6nlRHz@NK1YrloYT`%a5b!n&#Ki;BnA`W zGZr>xlp!X1*#oS7yhTbg{AIVpdK`g}$)au9TQMpW%wwTo_DZB(c>p?t)a?6ORe@WS zg@t)dZ!)mFHCD?OMQXp6R0)r7j}*~QhsUkk%L~w!I<_lWV6jaC*l-?wBKVYJ1HV>& zSWaISLc*mO$l+pju+zJjP-`MS`pE>l$_SXqYTb@uL@eZPooHD@2@hf?D8L)fHfN{5 ztfg;ri_z(jX2(c1b}u4H1+H3?uB98!3S~Q(^V~-f5LYe9~)gJjC2O+IhHBsxOj9HnI zb?Vp$W|5$85y&Z7EzL4_)^of}Z3(o?Y=N1?XjpnS)a=~f*+^*BX?cF2mFk1jzRt{% z%3jU&ax$OA|7@Of79;MiZ(zOx{?YdJv5wGq8~iH~ls=|_yJOy-q^~>f>5PxI0Amw# z^~lIiyBVJ@(o%$TmpfqITx8%%=Q)ysfS769t^v8CUMUta@FWzFF-O_hsqF=3MJc?w z-g0WKDR;g3IPkX!s2C%Mv%O2?LncawrjwaA(2e9*>Sy@?~b_RU{e`P}%N;O9!nx@ zbkcxs>|hGw$+*=cZ-h_GslytkSYdFdn-;A;y505REmx@1{a;7*eJ|r8hKPFt=A!z( zqp9N2pgiQ|-oM`I`77kT(-(#R-V^H8^}Wz48;5-qlY$@gnK=G)MmNV+h*H9RmyGWQ zkrg27cJLT5b^nE@38_ExO-UYm+Wq|_i!fTgvgfhBA|i}R3^%dhJaT;BNJq)vQKf3s z2KGkV*R1!ovk|gkk|ifopl5oLk7Y9CNx%)S(_tm{Fs!qAgGeK^1wj74IZ#y|Dq=(_6pM*YL^ ze7U$A!p_#kA{E0#Ia`DuWnJ)fPY=)(Zoay4>0sF~?BqVc>LQRadsB z0c+FNWb^h^i!WlqocxhkKJW`4mlvh>XPQ*`6AD!UF*qhc!`AB0zL;p)nrz*kYJWN1 zVMr6UZq3VtfyI}C?S{l1)-UC+E@~!1S2o3^PMu$$xS*bmwkS!|8`nCeVy=(08F}6q zL4ki$l07zOJJx18R;Sz7r(0LXFF%`Z+MH{J<7#Whqg+6}L3z!Jx_mNn;a=Z~J3YrV zk+)`=d}Vc1FSuQ;UyU-Stp97YSsK`r!QWKIZ-%uyQ%ZjI=o_@|>zADhvO2_%D9_D5 zBR6}bb$z(WPbp76&I1!ofZjoZk=q$Sg*|Mn4OnM&t&MiBUh7iNO+c&ht6NjowH({QKi)Zy#+O-d__}^1oJh{{6{I zr3d}*PxtL?{eL$9{=c68&b4|AW`M=A<%7EJl!C{Lh9Sp6t6EWd;4PJ{rgWF;_GGnVXJ13KahT_Wtj~ zhc9mrPu?4uS><4#8MFUrZEA2MBA|Pme_l)n@@~y``x_&fl?40;D`N=h@6H%n&(*yxM|WApP+G69!HexI?)h_q?_VAceS3f4oBRFW zJ04@QF-nY(_BnO@s5cGn#ah}KK?-&Q_TdV3J6%8D?KpO;^+zn2o*WW%&$qh3 zTEj82WW7b#U#YUW^64j9Wo#oFI)*93-)PtNM|roE-Me=Gf!)U)-#%>n#+CKHpVxW= zlx0sj=gWaK4*aB-)f0#xtIGm(vno|A&E*c&x>Aiy4BS=^H$UI-Ag?O|I%FA@B3%)J zIdg>L$YQD~B%thrQI8Mo`q?1S#AIuIW>{j*H8@4?3~H#&Kz}&WlMKu7`YI3>;w15t z@;PUPL1UJulxO~DjnR;83DJqyFcCQ-MLJYGA3MgDTt6j&D*VHc`Uro)Ux2KIM7bIm z^aRz#sTX2VLI7xDXmZjRH7Xu@u#{IagSx1AtOydBwMHr&R!7`cm;6j6Th@N$QoKFl(aSNe zbqNYb#M#K1{i(}}^+;(bKT5<0qS(Err6DzYChpB)APicuDq2>d4= z==rv;4ilN%M1m+_>hfXtZ%_Kz9h+k9a^dgQ-_}ML(iq7uyDQVCJkwf8y#N1KD2hd zUp*b^el*nni{;suw-$Eq z+}gQ)bNBYD+P|}Xcz^fc-roM*-Mt4}2ajIvKi*QVyn4Jt*$!NOLhB}U>rKD<+uGZ| zum6rs|C<@B!sQpxxv@W@*yvFwI_9J8x>B*IH+J>LdWbSpm7aWrvyoWx?^%VpM_;eN ztAFrcKU-CBoQAaUV*S5TqVH|q9G<#Ay6|LjbY-f6zmYy0`FmYp%^KFo$ha zI&9%oKoxsc@7E9dzSOwHd}Ucg@DvwxdT!~uAweIdT7?B@h|5UCqUoED0hya|i722T z^PjoeUH~YJx2~Ho>iXhd>lbKpfJ@-c&nn|9$*tc*r9yTH4&!o^^}CgBdj!;%;z24Q zBqBSyzLh^*hXFRN#fu-o-+FZ5ixt+T4k`NC@QlSWzD7=RG)#xSH6ewTLSagBy+*>a zY9vnE5D|R#d(}{bQ|X0MptELjOY5$NtS6swm!@<@VgG!u03E8Hy3F*VaE90K0#B()q($@3BH;G6gFU`3`9n z2aWanEG@$8@(6!Vn(_d_@K-y`lx4>9izWO=8dXhb`fQX4;iuA=+6Y)?az^Dsc}^6T zWfXjQ{ABRlX=a#4k~8_v%u&rBMRcT{L6aB4+)_~t#5y*ck@I1*NvEsNY40Z!~7xNAie|?gd7YLHRUcMCk zI`J2hDsrnTACx33@y-Ec`lktzhx1L5a3y3i)Z;h|c&b$(+gA=3o8KA(;|k^0JhFN-cmAzhEXxYtRVwNb@b!Gt>$ztCQyAN5s_rV#ClfsOCr_z^ z$rB3G+_90+*2OD7ytwkCor(JsCl7pg`sAybQwPo%Mx~}hQXna3#fWiXvCBvoNkpsD zh3Bu{EVjOyYjTqGD-KHTLFXdo+My^~35k8#mJOr0^3KFTV$KUZS59&w2}?YB>p+DD z?k!yAUQ^WvC&k-lwMCAsp1i?GUslOpOJz79HkPpb;D$?cDop zCeF#t-)->UOaiA=_{BLR)j)R~((=q!zf)Y^;lWt(`p}uDeR4=F&w|t~doyiSy1U9nZEFStbcE^$sy!mx{3Uf3QUu;;`TV=+_^j3g%P{HdEF@m>5f0|_zNdc zTTF={FD8`ZuoE5IlbtW8#9?(+<+8ahD2#+ss-w)UFI=*OJyx;Lh(c-=1x)cq-nE-o zWqMz~BrAA(x?^{)7a`T;P4H#eGL)nXtigfy#=^%8>iNdN$dOx;_d(kNglP1TwfM%$bG=qr-J(a2utG{Aj>(=Ysyr8|fKa9npzyR(|~1 z_0DG_UD02@+Nsg(aUb?xd>UeSv#}{Z{&={}%+v~F_Gtgg#FeP=>kmeTA78unbZl&8 zbnM~Pk$b}fcZd4#U%m3^`n4xxqfaNsVf5PUw0O_08%x_aZtdNg%VQZ&qKtefmoMzWPQLI`ef$r8n&V zdcLRZi3rk~jFnv~Okz5@M~T)EqnP3b@{K3CV#^c#tF7{ z^T~tjV^1cg_^;Mxu58S7tLIZ~YIC}MYqrxbT{+!3k3H%A)8n4^^i>rxHY8c#YjsgC zE(wW(wUrmQu&FrXJH;4p#Z?W(Fj+I z1V_iEf|vnh=7x@%o)A_^a00|2ILD*DdD0cD^gIMi1Zq&vfbH_-kFamg(D}i0%Zedl z`HeilkNSTI7-guAbxZo!e`OWc;MR23jIPcsPgFLh;{@VGWoaVA?kIw`D*c(#>!J0U zA!#9RIirB}BhcUbqf-IL@7=b)-tYJ!ptkSCqn=}xo>7UBn!JyDPgbq;ixskkC-CZB}HAX20I)e%*$MjI{lzF!_5Ck_8dant_ z$5>?pHVPKwCSaEmj{MU1BGlI>WNtd};an9cY-ub}I-wT+N|+RX5nFPuonU1tc=Ev* zUB9W{%!#-+ADj3~iHDXM37XJ`^WD&miIcNBp^U`j%5W~RNQHlR{^ZW=@tQf}Kn_>c z4BxVqN!IX%Dh^o+e~UO5SLJR}-~)&pB|3%-8#kUkf4FopZk*m8P!b>V)0dQc225|3 zE(owHy_(dFgcknR=Wt=Pb3zsq*I6VN)qTBq8C<>=_&j$}vX%m`pzvV!Y;sxyfm4PoE24aS`rBZcGO zFKYZ(W%3sk)_djE6$E>AyjYHZmDO4Jiv^QG{L1#1gmcQ!4)6`j9PkafgZ>{fLh#4z z(i$zth0D!lzKNsk)#PQ;cTHCvPE!vq#jF;g^d@+n#$q?cxB;{TT|Ym$PkvxVY*I_&e3UJJTcIBjn#}P3SQEl>wrPv%4jv4!VkE zU+-JL+PyN^uI!tLl@o=$v7q@o1UhB_Gkp7&uDD1+OUORZvnn~*3~247}cS+ zI;vGGS<*;Y$8%PTkxYyy=vN8-tscDycxu}GW)U5>GISxYT-KXbPp-ADTyJ|k*0D0t z^>nOrWwc}MdbfIZy*uZcyH~E7Zthkr+}2dBjSsI+T-lhq_H1%wV{&wT{MxgLvCZkp z_37znGc)F$)w8*UjrqmR#rxYg9=*67_44j*wR7*L+P!yQ?LSyidk@$4AFaQB{9G}0 ztG6rLsn48{*?0oOQiO!bB#>SOmA_mDdvaBwIONgs3?hC24R z_qe-~8ReeMF%j*^CthM0P$>o>7v(-@&+n zPuF~7fAYmJFvr@OsB%@+ionM8k)PSoX}k?#IXzd9m()r21bjh(Yh6)oke~XElNm7E!(4QFUQ*5EcEA@2t%%1slqi}DeLUcVnA># zQ^>iYLJ*(2Vew`189QZTFEV&Ae@1aty;?f^dPzgp0F=J4?2A+TbD%`vq-_f)_h(K~ zWnV3vJ6!%%T!xQps%-geOMON9^km(G+ZZth`f;MF~J;s0)=l#ha_a=VW8T(G{jh}c8Nz>W3UrP5hLsU(L6taDCaC&|j=_gbQDK zYAJR5I(@m>{%q^ccw7A?8yV)*(r!jxRN-%_+3;5rxlxmv%a@R4J~5YZW!kax0?Mpj z%(idMb-bME#B+(yBG|B{>vtq1iqD9n$o}=&wP2^|e=U1=C=$5Lf>WCQ`{S8Uq9B(n6 z9NT$DFIv~4?`%BT*1Tv3U%3>H?o7!hs%OQtS9!c06(BWeb~7wqQQbAxSFWFnnuuDR z_-!pTc8%t=f=ds}sO@6wby*zzg@pxUv9LbM=6W^OyFS_@hs5Q^_<(voF>LVa>F6N5 zel~OM`P}G>h0*78*Q8Eu%wB)7IJvbnwY4;}P3T@;-o1JA;P#`}_f`(>t-ZRx{^qd- z3D4g=dhYt$$D6Laez@WCjW0fV{&wYsdb4VY@7CMZog}N*xgXMQc&2|q&5dJEg zm9laKDF=$N=D?=PNJQ~0Hyo0tD0wovw|nXY#1<*QYCPg-T24 zP1udFf+}v$wXu`x+1M#_MxKDzg#}~BHJe#(=ei^n#uu|wjLVQHNwU$}8~+W+@^@(( zGM7ec@K@3LsC*4eReo+)#>F;)Vl}Z#9X7^(BXI}@z*$IvPzmPWyA=Q&zz6A9^4^dj5F+PxSF*Tzp z{OhZw^OCF#H@R`)tY)qc;gCrq9U_)8_}TbCPW1H2Ulsq%PxWf)LJIygr-cB}#%kbD z#gDjihE%6RdvB>}f4TY9a?8OI5X3K{BOgdnIFf9xoK&yy*$LA|GQNJ^89k=R+Ip{L zvdU2-Zs$;{8yq$Td18gLQ-kU2kCP)ZVVig!;Jd^kb#Z^OiT+Rtq7B4Pt6Sr`OqJ`b z&XUA_RSj}FbrCGSndRNO=*8;)`qdPtuGd)VN2%QPyYp1z*OzW*P^&VA+Aqf5p>CW4uF4S=|CCQ&%tjS^SFwVY#ErWAF}t<2W?O zDcAIR5#~0WTDxm$MxTX@!iiQkBcgtdo*RL-5(+WSG0KG1sK(XLH?B2Q1#6@7$hee7dkU{u_-N z90#CJ0(zJmL(KqjXRe(!>&ewK#*{W^Ld*oF73Q!z)g_PXN$><$a^_^#4uf}H!9`sMzmM$&Wo^3iD9D@@Bi>!Ur$Ed z?HRWx-R^*H))ST=A8) zsd3Kjr{gnfWprMxPR~D^TY9m0Pi-#TdwJv0?(LPGTTgcGtnA-k+q<{Ad-v(?-LB(b58Yo1mC#{f9c)^LTvyT(a?@*Mos!oI}c2oLF_R0sa*ce|OFP(T^f z+jo1v;j@aitTuPIBw--6zARtq6QF^ven^JY;JO zQfvYGPo9-~2y2!#3VtfqyJ*10f>a0?)o?z4w{|7SuScgs$C~>H5#p5!ow?jB99CRd z9y9|Depw%d&heG%4(ruMUHMfIbSzt!(ZP9ZrXv#$QOIB%GZ)#bOk6rGJRPzGFbpke zLyh^4_=p?-EyAVEY?^8j@suaMx~$Va*NSo!RIq>w&D0bFKJ!5Y>eEkq6j2T9(RDk3 zPQ980JtkyNV`@<(WqnoSy3?`87P0b0Sj?bR!5yx+0r9!;7e)}Y@;t7qVdJk?u8iQqOmH zNyVH!`?tkQI?k-rJk?ZOCTA%nXtjd`NOD?mkVpYM^xOD7HYU6-I26?ZpiT}>0{-c> z6rBuRH*-O=b$TjnTW9S=Gm z-`bt;h?;GWnrq#gZ>yW3x-pfUeH=nix*cL-J?KwVxL)gLcV@fP?ri7YTo=&Ci8l4$ z_0W?mxtSScTk0@S&%ynB4t!kLr~|;(RJ(dPWero$?qm=5^3y@*;foLuAO^wAj;($% z$r^ko#cqY~P@csWa{?L!N7C1Lvy*@TH13Zv!T|tfo9IJjjQbqSJ+60 zyYkQY%GHh+Bb~mk(!yFB6fBeRd&zi7jfj8g;R+ zRyq^>eKPb{-$IjP;X>Evv}GBRr8A36RDz4M{v6=j`9+kCz@6eaKJz1THlVQ9FbNtO zntUOX2Jj_*Rd!Y(Y|R35ZK;}pEi40k;Ljux>A2Ky3D4FvP-}x+=R7R50&|7GN2>47 zPjVzSaQTRn-tQE)$&kz}9aFSGGyL1R>G)ySPujS8xR7q;3Vb~I^vZW$yZNj*y5t#v zc*_K?qT`(1X8;{?clWpMhY3pH?MmM;?)VePU3G^ox^hl%NqO_{<2X)3I{FCO-NlQ3fAcGNThnK3K*^<=i=)Adw6_qvBP zVg&^IE#U9|P@ZI0G`{bH{s^Pj{fT7J!0*5!dJ(&XNf5zX? zoF~sqVo|PCs#_c^7h0IL!m_o{u|40uJ=ddN&UY(cd_HsWxsl|#3tRJ-l-sb#mT6~q zzDqg5Mk+`QD>a2Zd%9&;n>eb&R%MZ!u{Zge3Ah+6D(hFes+{p!oRF0!H5<0&I|_V5 zTjdHF{KX{4waX4Y;)*QR4u*7**OZJEBgVy9`0&;*t4$!;?A`(3l93$XZnEe+H{Z7>|ULSIco*x^7ft493Sph zwnnaA>3MpkUA%U=(N;&TDC(6@5)mbnLuzGwKmlcHHe7x(ewCRy&D%3!5}TRhuzET^ zw>mMuI==L5>gMy=CABd-mATx0YUIUi?`9%z$`pBIn(gGj+Mes%n(f^h?|**12O6pf zMwXq}ooO3=Up(lCzo9b+vDITOu9vF;1S!MlkXsEOB{OBI$_lh4J6o*K_^He*W_V_P zuKA-_F&vqSq1@Kguuw`Px?5Lc)#-ZwcgB8e^0%RE_kI?AWC0pIf4SfK=X))mS(0cN z)_i3s!YFIbp$^)@YK=9x(M$r->FV6MVd+(V<1QSc_Uvpdq9IWa=fMIB;PWu?H?bHFhK(l?Q-f0f7yVIi0Wbv>ooo11ePW|#*}RL9C7=KP zSw)a6?i208z)IB8>cB7>73vjb-d{p{8|rs))tc4S)s4LdBd0Yb-!8SiUT8tSjv%=C zU`|iaWt-yUc|G3+0;uspGiNWoz0t-aT};_@l6t)eYCVnER9G%7P2?yB?-+Nrtl{LV z1>NcfyRY*@VX+n}Y3SwXG41dDsS|p>he3Wlu}1ml>$y`=iK;2rpT%n5%{!$w(+nYyDga5)uk*Zsxax6P{#(9<7)!t;+{#17u zuBN+rnD^$oL8PKflr#|o;4@9&8~DD%-|F__q^kI7j3Z68FkGz;pFuzAJcXqDOD%^t z+utsCCIK(tZs)I#8#6OcUb3&jiQO@vIPcD4w|cqMzrEPEvoPR8czbq0y__3#xi{Yv z?O^`x**5<#)c$h6|04ie-rFX?M8R@!2-@Y`R0OPjx<}5>K|ru*_6|W`OTe6}Xpz zipPQA#?NgHagHUD6aA}`166qR=IoFizK=%Q(dv!4foJpm>vMe@ zvsZGk_o^iet7Ajq8Jq5Vj_}TPznJgYobOPjId9Hhij&ne&$AO(-^=-a6{J{G`&#=w zS%`9pa~Eyx6#}nGhou&y!?8s-8l9^5Q%qA5a)ei;izx(HmT7rbQGzu-deCEq(1#V7 zRX^2WzFnUQu!6P<{%X7e5&d5Xj;;b<$P&wHyTxwAWdLF~d6^jgIV=X0mGE30Rid=A z7b#LTFT8T|bvgxq!7eG8Y!)1?yU|TDRm@PAa5Tp|QxdYWS)8gZ8Fu z9@eSg_<1&Xa`lImksnvD|7?Uu2lmO}Pl6&<1vM80KO2y-fASgf+x@+_MHE=!ZzEx2 z_>|B5x_tt11pcr(KkEMRe#dv^f5v}}V=#HXQXaNW09qq_$ehrNy9+BpI!M0c>R8(<;_wPNDV$)HQx$?d3e;p zT*oWK%12qu$moE_F#!23{u?&wBm6ybVTFU*$Pze)iL6R#oJJ&LMyXVE^jvx7;7^~_ z(_lafNX_hv;+3E+04}GHUeAHxGlw&FH^lN^&m40M3Yk^!6<(>1=8@x`0jpQ)U5d7E zy1~mO?pLPW!1%_OvkGe4xfPtFpV+;Rd^ms|MgvL<)p2vq8EK>xVz?awt3?SASj zGCj6O&TR|F7`?nV(Xv0;xY~}5qOp3lS5Sb+g*Cvg}?Rad7z5lsWVj}b=qcH9S+S7 zYwU6y32?@mF<-Uo@liVLTIQu9(Q&!Zwzu55f1_u2scUzs2Nnb3 zt%ct0h2E|CAi{7K4^=yJo!IWn`Cea)5A!($xi{QXtbOb&wk82zK5a(={1@`p+|#X=FYdG%K*n%kS)_I0q(Ms)0ntyb zVIw>-R6BGAxx3f?4Tvcf+D~cMmd8mmmn#)U`V6K*_ zA*!Gg;Nr06i9!&L$_ZTltC*tFx&AhYd5#{V&rh#@yE^h6{!0Pogwf|M{I%==I9ikd zSTj5-_MXQimlFa@^RVQrA8UIc*R;}hRJwlZ#nn@>C!J}n`+IVzg6u>v4iK5Q)(3>? z@!DAn+E7<|TDc!#Zon=x#zbV!TLD8uCf6IJqBK$_;VhVr7NKTh4u=*52MQ;!nl6dT zYz+dVD!&asQW%&kUCUZhP z*fHAWGSD;q#>KpR2#1bZNL}o#GOQNQg=H(1QZ`q@_$F>{o_8rNQpvlE88y0))-H@& z{PfTG8~pchp)CvM2KXw&q>9)Z4x-$55Kn$V!}pxRuMd`k>Q>{`w={x%#n)Z&l$-`p2Qd zQQ@fSj1dpSdzPr`p1sKeC#xcT%kS&j2~AsQoKHgmCtFymj>=c!7n*(N;-`2hDBN3Y zMSc&KTceg**ak|AKm%0>$W+-$!g4$O)kd+a2x1;AcBsbhE|IqmNOkrjI4AUn0^>Gp zT0yZFqNS_qyL!a+U&AdaHm{d^4{t=hTJF)zrDkKk_^HP#_WE+6i^Dm^#x6XRm<+Gs z?@@K{Eu+J1SH9JzUM#e$Ek>+`uAPM*W&YR4_0DWxmeye>EVF}ned3Y<7C-36>#>|M zqL(__2QOPuhVzkn4g;OF(w0fpNzJd5<=y0 zN$JEM8eHa3v-V{Nr8=zFeAhYA>~U4GF;6X%lUF>f zWlj_`ZOWB@l=IRk_nIlK%a&QZFylO;o7@(mB|6mU`_d~nE%mVP9C^G>2(<+xi2qFY z*8I@stf1y0UhDPIfoEfb8&vm+{>JtMNmdg*-lZVj3w>E#8}prNeKwxY#%#N3BPlD) za(*4&|N0dq1S9EfPj&VuF4RUIn~7pOPp7T5xG)y^A)adH~wr8z@V)o@w2M?1Jc7WbqBulJLkzb*ua?A9TLg(;Q-f1{I& z^l4}UUl9DHGYxz}m-fUwD`Ao*k0RKrp`oY_Qv+`EIG^(5~59f z^guB^1fv!4HY)LVoehlmoPKXF)@}5is>^*tWHk5(O3VZfFbmhWb5xOhi>VFeL_%o~(%> zqt%YH;=Gh+pDk=uRSx;0bf5H{@-}LKmTIk~8vzl=1g`j<=G7iefV}VAv5!$v-=Ext96UI`nU$B z6jasVair7;^AGBT(*>=oe2+rILdLoC6)zl*vS`)ddB#^C#UV&W6&LmlQBThJDNYL2}Zs^a8XJ0M+a=1tj z4@b9xtwDIxMYcD0tZHW_wjr3PYA{ezaIHC>I$JDQ8Z)>IF?q^dlF^})0zkILj%h2q zt75x{vn{eFTt*pK&Y5M@TgzC%YxeR=kgh08Dly+O9_>m^R~5r8hUgLxEvFV~J*KEK z@EMoHTR|kCJVLZcvZ~BUZw~mb+hp)X#7)N_cK+f5#AScw>8LKfpZL4UT()`82G;_7 z=Yn`1EO)+IZdJ%^9+}GU7Jmtfbnovlb&{qq9F9`dn+|WZx&52?k>>biWyDIpsNn3L zkK_%b6cAioxGB^LS4*8)?Q!Rba2m;QXC2{4@K|cuUT)o9Zdcgx{*Au9+5X*`KDA?p zC6Uz7DDW*M6SAuY{!Ut)bP+nky9{<{t867W`>w+1H3xzdPp8>|dcv?hV+;zEx4u~5 zLjd`q>%B}0|%&T>hYM~tF{n!BSgRb}P z_xwpc=pizHL_mg97#ZRzIV#o;Hj2XhdAWj7wip!3Q5|BklHvRq__lqais!;v?OOh; zpm;g+`)&j&0<8L?R!t!pJ8J&?VapfdpaoId5$wkm9Jk9)&sVA>Bbnuc1Zrrd41?IW zvOe%*^jl*Q!?&<6SJE3gjso>kFF}Qsxq;WWzA)`kFlWK@KTexWMlyy_-~P4$}o$ReWNxk zm;m;@X-?S;52KAF-pL2A%gmehfgy1uOOj zMir{&{sJs15H=@6bu$&nP#6~A2vgQ{D9oHryG!R)aZ->f%5ro3IA~@<4jXf6;0^cJ zg@dKbhc_?QcvR5UoOyW#aOEZ~b}IQ%Y``hrO1`T6jhMZ4%n%e-u3+QIHKAPiiz+tQ zXv4u360}w<>RLEhd0Brx(<`uoy_-}BM2W($wJyxj;M8r9^zBkd z9L8qb_NRF8;^R4vCx&HiL<6x%PLL|@K?cElSf+%{c2}M;WACW{J2L|cxUY}&m}82vc8Vvwv6sr+Qwtyl$A!pY z8ygXC5}NtiWSd%_YFE}_D5r?%ee2a;ciCewIV9+)zL2{OpaI{d4z<15wzWWch?N`~ z1kr7uHJH}cQ2ysQ?mC?oqGp=HM`wlE^rbs?{dD~)SHt7HpXvO%Tx$AY;o=`-zfbQ+ z_w;jn|8%$KJvY1C{XYIn^{o_2Jy5Jon^<^qEbu*=ATIpXIDS`!zp^MLU&+6cNU7hd zm051#G7n41_)k@{!1}pRUjO%Q>t~NU1yM%gR`!nhrv9RHE%!2v&~DTAWoXF6-o8K| zs|q}X8z=03T|ZSiuuC}})$@JYYhLMFVa}>jJBo(_Q?1^RvSWtmG|YX}efnV+wto)Rvz)m}n#R2Yze@H<%*762q8!{S>h>S>b$Ww=XI#EMELw~PD zOh?#{#WKh6JN_$;HmpB@_CDpmP%O&Rme+s)qp0V5>91SQ$GKGr|&DZ>fWm zajKZ8$~mKi-3#wHSA5zh|0)M{24{sUsm=`9X)VZOiwZ#W(i}NLysb!znE>l-8K3oC zFV;H~&a|m8Y}fguoW?oI6oeY=HgdYWW6=VR``3NH z^^VK?q>-%;4!`oSl$vw?79bACuNx=5O-5PDKuXYudcIJsTrk0r@U2L?h2|M_!jC&jU2HA@E9QyN(KA+wj^W9s^ z{d>0t{4-m_T@7?j&(-eC!1g3C>H|i0J@F{I}e{+fyXy-WZ<3@h5w{I*DO0_vK`N)bv1fSI7I(N==jw zH6EKrr9p6A8X^v4lr9zJN~rx=dgZ;^!7(PB6T~qdCBoWFR^)y}(pUMY%Viyb*mUXf z&h?3|^;zn1=d-zMTl4#-nZ#8{jwg`@gS1Q$Mm&mo|F9=42^@}09j7oa7)l0>Eao#NC zw~;Jud0+1D`yQe>JP444?~W5@g;a>i8B7g&P{??0SyW$yS4gAeNuq8@yU9btc6G%f zpsNnL8zXk!{j@Rsqa%ldhwThfgS@?ls!(hyF$Am~F5n*M zr|s$&@EQq|n!Mc6V-Y)vF!~sTxn44loH3IZ*JEK#kl)F(_4x*KBhP2b7PeR-6PG!0 zNeuKr?jLRznwMT(j}BW^EwvB8jo2$dT+4YI_>1;N<<2-#9zl=B?u39qMbELiy6M&N z0h=z?P#vRTnit%l3m;+L-{D;bnEXn{5 zRVs9S<`>H=x3d`*?Z1!^7u)LdTnlo~B8G*Ym(%TQ!@t_I9CXZ>OtdN6>o&!?$|nqY zOO7r^3~}l`2Yg8qV@pDzWZ^Gj3w&QU>inA-vC`4)HG)(bU+*?A6i+;2f3e1~;Qk$= z`KO1ALrZqg|8}$4LJj}>DE19po+DG|svyye^yImgN{WUlpc#%Al)G_#rQQ>i@SXd} zPsI;5=83$bBflJ1)m+P4QteEu7#38qTzQTVUZ!(GH2O1yQ?_=XEjJzKRpyy^IGVXv zp=ikLTx}^D@4+qI1j3J7vs9i>iq6c4+JWKr_l~kHd@=`mb$v53mHi}vEz#eeIyXnjUg zd!dg|+<0y|6WD|s)ru7F{8>BQQ^4)L%6VVtA!}ANC2B?%Q5H9XA>O{hc2&-v@%WypM~-Z{d>z- zU(6DjXPD{~I2}a~ zom_9v)EK+VMd1~aqb$XXy?6+0Alm1Q#P%tK3oF%T;`6KcD`TUs+tJ4I?%AN z1dMy#AA`u)>z5SxXwE2{QFR?YDogYpwtaoC`7bvweEP8CuYgJKRTWm&Jdp!HRPJ|p zGWg}n&{wO&Rs?=@yX6n}JKjgVRb_^_G@-0jeXI2Y&1jCSm7dR@_WpUT|I4VpZ}gN) zVNMDho0T54fBCTEn`rpJdcC$*7sYJkx zd`x#!L}WyO0L zrlS^lc(f@mhI}eqI>EQ(<#a&BUL^1rJ?jj8Din@ps36DOtq_K&sWVmsD}COa8{(C! zg}JG?S#%P6O$|AGzlFm1FYS60|7ET=tQ4aC%3;n$A#G@R2Jv3*uH<)s!YGb>AwE(?(;*CjU!^ksK<(AlSFwwMg-FTPq=P;6K z7Esqkd%YMt&J+IThP>_=cXFCCf8>4e{s;IjwPeNb<;{)y)klI#O>&hPuYteGwPPr3 z!phwi$u zZ{3>%KrF+Wqrjgt@E-U(*PQp+y|-XGxL+O2M-jBMDBDB(3xn_l5-BS5!Oi~t<-xtB zfxR1pYWKztWFnJ^^O4_1(+1TQZA%A zI>n6Bvjfk(+w;yOS6|Ewq0e$J3xBJ#jK9~(p&%FvW4=Ltm)kvVG|&xjO?QYjbF=sn zF)5P6Q-ywq!k)Y%gyyvrP&&Vj^S8VIbh|SOfTFiLss?Gbfa9OF|F`o4@OQiAJqgcn z)@>S-L4BW^_O(9gLD&2D+CT8!@?t@7&MV*>3_2arZqFATiw@iVY*XRfukSr*|0Wxw zfxo2y6O&^aH+S`xzl+IAtq$1U>nZ(fgiH6u+TRC#A3yBp2mA1L`ycPL{mbpv|M9T% zy_Mb%SNlI*8~AK(;LlMO7Wc7og;<>r!TkJJ2_CnbKk*gJ-E%|UV-0Z_y~1>@p$xJC z_xiwB&j!ik`o&)bouv8>{umS(VF$|Ptr|GDriRW2R$e)+fy+BBRRdpkoS;8P^`Bes zJG<6*dZqgWZ`kdYFL{%~Ta}rUJXDbksmaSp6`{?M_Pjp)qp*e8q>zYjPI+Va`%(jn z`QqJx8(fC%S;kbT^z{zuoq6eX-_z>+`wL2mD1P1&%PQg^L<< zi2O6w+?fkL;Ugbo>3raFPR>?mwjQ_PUS1U5b;*aVPib;A4k9F+nE&^iokoTa;T`N| z5|h)la4u@@?CXW|Zi-7RPM+dzjyd0>ZmG>j zm2|y5ZV8e2%}8`?^v6?+uAcZ&H~G9?^AYI|@P)HRZVx9f?_U3P`)Z8h)-)4ZttEZH z5;k+ZddGEC@aC(!-tdQ~j$Lt-jjr*uC~ToyO@b>au@Xz(5&>eV(4~k7%i9Nym9>(c zeGYl#H{0LQrJ-XS33LN6f!3F;Vy7#aY9zmWB(9 z4;FeA9%{5%at`iU6IfyGo@^`p4Wi2MW<&Ahd5Lyr02_Vhax!G^nDr>wZYFr>1xHn9 zc@g3d?JmYKpz=U!a!N0WIg=a2R!7o-e}eTmf#CSzx(ugf%{VvAU9+*g*loyM8dyq$ zm{JXPy`L;qP&Mf`FIi48gDpyOW2Ts2n(eBMq}))g!}8s5eWTc-DK|%iZ_~FUX#~L! z*=@!IRy3(e<#nPcjC1_ zGgEEN^@+|vy{XnUO^k^LB~LG0jFS`CHP9t#l(YS2{}%sMPKpu0OKOf5Kt!650*C`q zX`*v7Y4Oan$^NTXaii-ab*uBEn~9!2vhK8hdZYP6`m_G;BTEYUG=<;4(ef`hTmQhU zg>vS2ujH#Q(!%s5n-*x!bpxN&7Tx>ZUn{L-(@gNUAULn&owt0laLJuhm`&qaDik@8 zt#ztb2$_VfJSAwC_`ymUo&#_J@Vz_j*6M+w<|Ao)2zx{PAY{A8)k%!E<1T zUhA>q@GsB$zE=N<|Azm{qTZIz=Fk5twkzM#DL}i@#NQy5 z+0Qk*6nu8z7gZ)_5gG8;tPu5>?^?oDC3u7pniid1W5kw&`d+8)OF!tV>JJw3x=aep z$=vWcmv9Y#VW>lbOW+$(A%>L(|Jq1+2A2WcXF{M(j_m7HXf7Hz#^w2|H;p5C!d69i zdzsKi1C$U&&^%5Wq3AMtAMXHNy-)B8-B_Y@HBkI)K8%j;+#sa|Jvc9Rn?CR zV!}xq3*dgM{-*mk4-2wkcWu=Pn~jg?>>B$hCO9L!p~Oz*GJ?rdoAd#(6#}F3li7nR zpN)a?$=3?;>+AY-7RMvA#wsfVkS~j5y`PpX7>%Ag1Ne;fpg^2qOjYl=xUGN=pU=#4 z-lcM9g@_t@?>{Dev~GEyeqZ>_M#A6cF1(%n4Pa$N#(6xn@b`^9!m=^b5sk&G+Qq=z z42X}E=iF{E9oZD+F7~tVw|KNha5B(n3OAf-(E`5k#@dC2i@z^ig!t6#x3d@dtah)T z-MxMe{xUqPdWd}sm8Oru#Hj2bXXCiRnqduaZ1jtBrr39Ncw=?Zbh8~sW_x!>FTS2` zePb|Zsu{2UPx$L+Ls*_<*gpGaHj4jBw8-}4iFnC*Dr=mSQpYYs*PF%m-k7V(HcIH#@Y6{Vge)GvxjKW_$UcJz1kC0vqM#^d5fgQ8Zp7hH=1dg0ectIFH@F_gcU7G~$_bf2c%R-p9>b z-+$P_j~$521C~i3f4b2Uk9)Vxw?2__X*9`-;UKX6e?+^K~lwz zYszU2>^98Lrvb6aA1I&(A?_+@s8y<9osWFGm-HQeW(~f6|{DyHl zTE<*bvHB;cR&h-5Q3^J(6h-xp<)ja=L-V8cT-h3k^E-}TjQbr2e6tYA%;Z^;EP9xO zOVI#z54d));BsX7C-ZZ9lWI5@YlfasRYAS*T=Dm2aXuHKg+VJNYnPT7Li5ACg?Fa7 zaJH?(cjStuEk}U^K-jLPW2r80QpnEPcMd^dA@WlA;VnE;Z>D}xsC~qF;rN+4lC(WA zT@~~T1AhxZHCq#Z@!y7gk?WA+pGUuCe~nE3daq%N%J!}+fEXPA{dO0q^~I1`(l71o zM5cIq11}EKl;@B4MQe4j4+ReJUF^n`ech2GP95<_(j21nV%zJR?O|q))^k|9{cnl^ z1y8p(b zruxKph?oYTz7lbfv!RU_1P2OFGl|74Ftyy=FdPZlki-49A00C;l=}L1Ed;ek3he$2PD3a#iwGvT3)vEl%iqm(N63z!nz-)+H*d7RoEY~B_{txbqw z1Jkt4Io6_Mg$wVHmjL^A?vgq(GBr>pu_lz~FjhgLNe$m$-b|iTuBeD}6v2^slBLPJ z&k&p~Xo#gq79qv(ub(Axz~%JsvA({Gl3dApD?m72vP-@d%^-Rp@HuP0Bd>cBQI z-9yB4;Kv4qkxuD(IxPS=5bRgrq!A(2^U*u-(y&Ni3VMLl@5}8W0!*FLWO11sIfAen z{ol;`C_UVHDIz7uPI>0FKf*2v<#8Pk+$Kf%hMoI}XftfbGqL&c=rEA&gSl9-;iOb5 z2j&~C5bw-_X7`a$Y-((w#nXFxqdm|w)s@0uhXLJW_Ph}1=VKU|jX4es5;lNfP3#uJ z=J}!QUeRt??s#>>FmlCo82F9ca=;B=fG?l{pdc8%jauq6EDU@Te+OQpx0;62;m!(& z-pyWBvs9jQoE8kYEp@$J>VGvy>WpmXjS=Z0x$a@UBVsoeJK;`%QGg>YG>uOdPtb~I zLNu-kDgLpTJj$Lg`lAw@baM%w;%PKlBT5vyfe|woXi8p6TUYy z#F?xWE4cW`B?acXsnGCOI~AcbbW@jDYT=`&R8-na_MX7O2=<%_Jr<>{PBDlCj!>9u zE7vr4y#k;UZ4qZZ(rgMihL6&h)i6FCQT2G4nNm{vFyK=!!KG}^81})tt%hlor@@R@ zonLKPvrCw7l3!dtIBzw?<8o%ugm1Z~4KhRX;=dk6CGn>r48UBjEj{$T#VdaN9R$Y} zjddhCi%mLGxhCUL!deEkCk#><>5A8Tx8uJ>wf~QM9sgpyC}xqmKEBgUN&YOxkuuj2 z-?U(;v12GRqAMvn27&uNx!3XD(xv};r|l1SlOW&4RWa7ykE%@!SSh{k!G0p&il%Mr zzb;<u_H+kmN*WAZBX*u z7`W1XO!DMv&l%*mk*d*moCGaID{{0N&g~yi)YU66_v@!ae~B9W1{Q~8ofR;RM;meK z{>cci*(FLfvlh!%Y0>bphK%Y;5NFw#eO3pWov}SZl$IE)NYjcR916^hx+0qCgeGqc z84X93lghe;sDTSnL%%&64(00k^$P^0*maUs_W*x^6pyrSb))~Qdn;;k;?(B2$gH2A zkN)(}h1vPc1fvPT?9*6h>|7}SGsdCC93jt$_UIYJ*0_^lrKFQ#=nB-n`hz45wkx%> zaQfxEsH^B51FU}(pOseQgw5&a^|&o+cRl*QlNZ9CG%ZN;%=2sC6UkrApE#I1ULh4w znDy(3Ym4EEuuYb7T>#Rhj$zn`5>*Qs>oV+H6}!Pho2gbuT4Fkob5Q6AB_pj7{Gp|; zKnKK75BIHoL-Q9b(Ni`o=t6RIZ_SeMcG_u?725Kx|iXL)WTN_w7^muaUdElF|t z&ZGp0VOAKPXL1y`;vZxE?gdrdRpVyf6F3!=zdFB1dkH@b7nGP+fpBIm=C*%Hcb~<^SlxmygZbu!%A5$Wqedn@Z~p$jy_%ERTZS2w!$7A=Dila)8H8zMQidwa7CpyOs@I*`^P zzi)3{eRH#+Kkw6lr(+=K&8_~!+wixWH5v%ww!VdZSLy%a$hPR}-DvEF*?R^1*n3tQy2LbPL}Q+VoP;gEv;hr(3)D?kL54wK3)Arqs<}Yry zMT<7>q34tM+0>am4Nrz0OVPcF()g@mY&OqZJBF;br}z5oRdSmjcK;k9lMf zSULZ;9lo{jsBYQVQP}sRRfELj7#-?4{C7 z_Xx9=5uun@3R&CIUeV1WM0!qI;;_;WLk$_FhTKr0Lq_%$K59|bN~FdENIe*%NfB!) zuOrhhwTVF|D>7vj>@rzN)`}Ttt==2~jtDF}BL zBGfwkg||83n`QTvf5EAd;g_Ma@M2x1oV22lq5M&9Hu-9zH+yeEGT?W2EsR87$bRci}JE zTXlrLp`-eER}G+?_YtJy89UBKU*Yi`UO z`2Y{VikX)N3M8wFyA^Gb-lP&*U3bg~n41ZyIjSaqCokrzg3Ei-Q7k&V9uvFo_;0ab zcR&Dd1vf|orXcNXWSm;G|&`rB?R^g@o=|6hwDvXubpYv*wx;x zGk_b144ye!?%BbN0uSO#8TKaoBSDJ8$8N>pbc`3pi6vRBhoF;JvUC{6Vv*u6f-WoT zqukeCubUO$ub+*#C?ZgzL7?l(i&b;PKK4;Ru)>sHzdL`B$av#0(>U{F+Errr5zd-F zx*T@&==|mcQ~q8K=e$z(s(8J{oxx}zY36=owF;#TWq7$u(TY88p_{)S%8Av17OX=t z)VR?*E+kzVesgmK_?kSbWDlY-1#*9(_i%aO@OFRj-^{Tj8i#P5DztD*pzISnKJ7D8 ztMDC|Li`5S%-cKMaQL{x=($Ks5TK^QPB*t1t40(~n)+vdb`fE&6(o z8>B(2>B-d-{$pmMyF9VR>^9yS)K!ROj11>;ikyRXY%s@)SHiebhJ#K{vsy7N_m``g z0PaDZLvTsfrsvb$YJH-^z!&&q(?SVxZ4n$G_|^2_o=}d_?(OScu~8!Dl7e(k4#MAq zxsg}%SCMg;zBd&~SqeXm?)GAzFl`SJ>047EJ_sg?okUwQb|>GIpJMBhKJAH7k0UHv zfXPfxL`{m^oE=tx7#x?z9%3>!U7MBh?+^5^vxhe)#)`)!mUQ3l`rmJL{%<$i|BqW8|K(=KzubVrT_5Y)vQAlb z1b-DsRkygHyF#f>g8UJY8OeP-_?g!5%!U85-14Uzt?%FJ{N#4mr+zkaq;sdCB}&#h z6#mAdht>}aH!WP$P5v}i2=yE9{knE8taKh%PdZOoIka@~^V=<7)2Jh5%VI>Tc*{rT ze|fVF8vBYtCmWIOG<{;xo_f&st)ONR7VpY5kOZd&0_M&m4`(f4ZC8xtfYv%_*Ba_c zS97V*6iap{&L#8{U47X89Te98-5AvE4JZ^~1*xNQS73E`UB>1GB)Zr7!gyR*8IL}2 zJeEFm{g8MaFp9?7C{(k(2(Q9|^>AaY@GxWZX_$-W-tYX;jop%f9GY_)v;tlwOxo3p z{ARY5OAs7J=%|=ivNJ+gT4pBJ54&S;U@}(rC(P6-x)y@FY~bv*Gor5e$7=Z?+Q?+0 zjm3`T>y@bm^s(LgMCEIhNLjCjII z@tbZo-9Tin975=W_XVI##j*zH0Y@PC>th@o2PvNU6_;fCTEG6?=EP5p%|Vk9Pk|jL zO%_)e{bDaIK_Rv^Q6nj(+*4F;qIaPo{u?2d3lf+$3d8wai5Wh_qtg0Sy*zNqOa(C$ zj)NB$`)jc2OncA@H?riO38CczDvrOua7rDyb7M(R%>mUVv`XYnikeDqA=EpKH!N6{ zO%bD)7x|hFCN3R}T~-{+2jjoNS#ViFks;(}>Q{hQdZLiNpw%^po68`|Cx)&@i{Y9D z4T7-ZND~9a)8~JmzVLeDyx6M)nHbv2aa@T*h;Ch&j%H#YG{T)Go0zHGxwpbhkWt)n z)`i}i7#Aib30LXJf}U|=Ioh41W6Q%vn)dsBSNr1^tz$5ebU4*g_N@rqh_K4w+OD`| z{dKv2M=8Rr(i=69tLaI`Yy>%G-i2dToUHOxQ?Iu=&PT`_Ml4?m@XalduWuUSwn?xp zs9)lEgXhk*gX1^zZK@cwzk#Skf4=ClM#I7V^eU95oiwwViW}g&D8%Ea*sg9|eSKp% z@OOCtw?#Pvk`{;J>$k7Hx_LE*f-z7dON%{Tkt&FX#hzHUO~VG};i>>-;A!qbM!0ea zwLOfStyGk9~6<#m+>o9=fMsrznmeEVL#$+@o zL(R@TW#!!}B!a@q^+3Jl-kNv~tjUb%!r$^x)%;Frv+Q1;Tf^y;30-ltaOcVT@nv3R zut*!PPU}&(w0fpF7d&IE?}|otP+*f{Y9YwJrxDXzOEI+7NZzibM{Y4Hcj~>su3*;{ zc^c8lO&3{F)HF@?)-Eb7-RSL3IcCJ%vA4G_ojM$Eu;yBn?4NQww-zIOGV&+W|NPJI zk6$s5mNva}5r0}tM(2JjM;$+#(su{itBU{LYSU5u*V`S|{(g=E-wfof%4p~S-w&2M zKjfDN#EQ=RXyDVr-^ELRFtWru9o6>dyY1EjMkXZZ6~qks-2SJ#tsh&_yLj<~`QQHJ zHpXo*U;W2eCfEf)e^O7nj>ojl#V_tQM>{yEaZj{^L2y*Z$2K^Kl77(s>AjXu?lymX zxA{{j{Gjc#M;(zz&5|t}Q)fBpXJf^ZFY1cw&pz(^A}Fw6t}aPgUsfsz3l=dsMr)(K zE58$5##;?Z(T&6NoJbd-%q_;SZRb(qYy)`*Z9Y2nWZ>jO>a>~PD4MrRn1q9;wKeIEul9) zv}0M1kKAg*#r})_pRE6@W-c04PAU!7XXD3SOxuE`66@y6E-hIl15Y`fw?r>d5O^I;b~Q! zSw>>mw?E#zXRX_nv+^sztAj_we3f3}Xb=a}jI@e(&lXUY-mZs>t^!tP8_A-)PYsE? zCX1)=>&To`-brseE}8gy^nuS!I(km)87AgQDZmjU!;e(Fb!1zWr{n4hq>z7}RwZEC{dS!pX*d-m5+9U|_S)#_C;q0x=^~*`&GUsyg@w%SO*G9P!*7g|3|xnMx|hQlXio;E;cF8)g7# z;xF4+^nwW&29O)wXU$@#jus_`%O4>lhi=OeEXG+y@ACCR-|P7yb>w`8vzoswSOW+z zurvItq;LS=H+M$#g?;@M#9Rs?n)*%F#=*Uxm_Eh(aIVw&N9+cZwy$`U;$M=o0)kW3 zL`zwTT*6$a8cpX#YYm}{FE+S#<$DzVmKF_mJ)h#g2eZBVP6figrn)tM1%mNMX~;sK zsz9Q|lq39&tP%heFf?b&Ku}IjY#`+k-~CAbR1EK!?Fv$y8zl;-%Il?|0j}s==V>8-YdlxTb)}lP9$-2f!Cy3aePpB4JkNYw z(Mm>pC%CB@s0KU)3XI-57>GdGUa*m>Bj$PM#bURi^V!9YtkHUhy^{&P!YbuO$IIND zkHr~VH+nXgu-zV2{pfevxbvZ^tZYA9zYDKD3}dN;Z$Gp|x)N9}RA)y9Pt>l;YNCWcHNxeZ*F&%Av3lZ6W( zx`e+0g1HO0!kldi&Sk-0tag8Sr|Bc`$|aQx3cryl4aCvs7(kACpBbz=zr^QTm*3YK z=8k^c{n3-2k5_tQWNB?M78FKURu7!b8u~_>mX!)1*ug-7=2`mhvNVY_vGV|UWcZxag zJm&!PvmW#>^sb+xZ|{A~!f$4^wAM*azY*@{L%7>#`~%X zw4P_)gg=7#z=+Hx604M31(-a0xQ3#7?Xw=D>T|HKnTDx{l0XN^32iB`8-Hqy4(DId zy%8~sx18+(C|2=bkAG-!mv#v5cPeuRuEZpf z7{JhxzT}EX62A4dIn@r6u)O#QZf_!qxcBU5(`Eq zGo&^%ZZmO1W&uvF7lV7y1$dXSzg|^Z6sgp17;v^4wmN6a)E+<+(TxvTa@*c`#cCOj z0~dYTB+gR;xFW9|+FTtfTL`9iDt)7Vpbp&{dGh<8rL^5wKZgX4{^dv9e$8L~z>;4* z#ndVzP>v&_`0LrG7+V7#kwUB=;%!FcAY+-Hzw{2%OcwkW+(TL&q~0xmM7Ljt?@+Xz zwn4fw5gKMM3Pi>(e*BjVwV@#&9rGRkkkP9&%qCZzxtZuArfy5-=6&Vc`Uim*wS%AjVE*2#bJlleoFy+}tV06l&8B@B=5_^lTwq-MSG zo$;OL@av-5u#BP6ixqY=g5@9sYA0iTINplHOB%M~0#@;flY(Hil&n~tg2gx|Uy<2+ zy#FRA@XF5XFWvW1l)mW*N#*`Y)xoW{;K&Y3j+eiP?8jU{{R=|KHlZ^O`uZ~DGmqqR+wZ)9P2T<9&Yc&MXGN34!;gHO^mPY(i3Kn&%9pug1O>Jj zB;@U9h}AOZ0{wkH7NK_rCO-9D>FGV|@?~{+qA-7HQQaU;X%tu=t}W;>@6g zcN)P<%9FF6eqRjz7UNv~kBruZ#S8tnU;B3$n_*yD6B?So>3PK5h#Z&Ex)>yr&@JPH zB&UlYuSWBDPD68OmRg1)?5YoU{_t+Zh#vA>cE!)PLU6HTUIrk-q`KdNvAyXD= z{$E1jWMpaTqKq)92?Rd{3gLy)v7D5}ih`uC(p^ZSQ$jkTuIvTt(b1OGWJ-S;xdgj2 z3N#X!{;e5X65o`P8`Bs5jn63Ht0@kZd8OcyXJ?=@g1Y3t`uTe=F+>;{TwjpO?j4$x z&unWlh|kg^7z)P|8+Rr3IhbJ5I$9&N9PpRQW6A9BxE3>4(c9{Cl%7YVLt{#&rT_2i zpJr0{pTGP^8lc0OT_{{^>kf%tO7|isDp|WwJ3|%0U!}6Vqp4K)5V`MWmO2Jzr>H+C z!K}#pF8q}x%tspl-G?ufs#s%(e)kgqB|c9yrc9IS)&S0l6f0G@o7}LPQlXnd$!O<0 zs80O+8V{L9h*~4BYYKb12w^yu0iVPvlQqBer&5lRt58fGh=3h*<2K5|-}H1IFcAd@ z1WC1W37^oc4Q6PXJsb-n17Bty#DPF?R$Y(ZMAvKB4d_DyVC$tn-+o1dHtTv#{P2;b zJoC_>+v0f>l4r#(G>W(`EJQ{TlYEtaQt5w$JAL|B1ed=If)m|<(F%6NhwLlretXI{ zMj4&*S%MctF)-E{))FtV!Vrl^IJuqk(T*8&gV60;LL`>FtT5fl#-5M}_-ZIKDDNn< zbL5C4EAj8IKDT)5ON)1$%Jud)=I{LF@<%Tn{NxozIYa6)Qy63ne<_?5$_LYUs+aG5 z1NhE<{0+nR+sS|4`_YTL@4Wzg8-ib6{P-2T(DeG?x$@WnwyXXM2@ z-ytPw>w8}%Dms2$LsHA$`ZiXPT<1_lQxjRXY3?Lao)+(lzYKo?`;^UPU2LhS;Xce? zjdVSG8~8HbH9^(qk-!+efv^xWO9)QM>LSEfR@}uVlHTbG^d_m-WUqahMiFgP4Y&1< z1{iTb`C0wxe$%vPJ*z7G3(NNW0mG5Y8`Uz53iblMo5 zzar+F_~8khX_jY3bou$qOdu#m|JPW}@9^c1Lf#*K?$=PPfA^K&`TJ)X zh4Ob`DDx7^JVqs27*F!wKl{6yDe0@f%Y=jf;Tyk$wq^87+~bJfwoZb zCdvIv{#2J?2E3x1uqGGAzV-Y6@Xg=*58t3O^2hK$GaP033tQFU=y}Gt_`Q4I{TK$R z8FZQ{A(-z#iNqSqTE^fNF#R8%W0pn@iTSf{|68by51id8ggFuDG(vPiv#QyHHMhpoi&3n=mb1sPs0V*1hJ zZGM%h76`vGzTamtMytj>u7R+Wp+rm;fvK6-7>ff59Q~_M@{51A_0nfHU;Omwl}|CE zktWu8OxPQ zDsx3Dop}pKs6ug0lKO@7T*}=I0|veT#MH@H`j}+I4{~U?83gJ3pTy|TvhmU%<8N2m zk8`RM`Q3f%lX#(%sikCKGL~QcEa3o-E@!EYH;Y`F7!FD1ha&len5JZ}l`n0E=wI3o zg!!VyARV`k(6)GyUQ2I(kpx`=qauZ4p!j%W^f^-Z8RDdG3KJv%OCdVf7oD}D!hhp0 z(1ZI-dH*^wSq(KwmssZWB{A3?V?P777$|c0)j!*LROMql~NnNY2;4MAUfs*0U=dwy!h&AJ30GBn@=rHd}UXxSa+ zq)^8hyvK4-zxl1i-vO?2Io|yeKw(9HIukqIA>8J^;m@k&V=bR|KlKJx2P=#vC42oj z<~&NK7Vu5r7!b#z!BNeIxMIH1_$(#-?RoYa<>W+rIH4I*!|dlqL|*X7aqk_Evp{4 z{p`O4)_?O12u8I2HVD>~gbs0V{oE(+J@={b7^7z$=;>}kiX#oVn5Kk|ifNV7cG+|%9HQ?lY#xy82lKGWS zZNB&^h`;&WKfnJxgN=Lw;e!y!gf)0RH^27?pK&q0qLZn4~Vx?c{+^yXQX*sbgMrv#82N_ zhAYH~1uGoP;J~l{De%RZzLuO`_&fUEC*UtySW_w8VQPDkclWJNV0&-M;tSoRXY{xJ z2zF5Oms<4P2dG-AP=ap6vN=h~nYX`=;o!o}1)c3QQ&T@%;&GJ&{0DNh44C4E>JyWr(dk6-=t?5{pIfBloQANCdc4xBmX8HoyA^_n-gR=#@WI@1(aFSL72JkA1C2a}Z zEN+&3o|8`sbbc~jk$nr5PvtQ&I03{dOMb`2o5JP(_rHj7Ct080c@|r~`7Lbz=67ay zL>AebU$awJj^e24`lZ~%D8G1IzNm7=qhI?xh?h$ll)w8O3`a5U;^hxtBAIsU)n^zI z8PMzU)i6-MW2I@m`)xY#GKd!R%$QfvzoD&yzlJ6#)Sy@(IQ(^%^@0hGsiKK5y!90Y zk?E28SKrGin#nP)+eRD|qz5m)XFpF$$t_UpJG0rp- z$#!;cBPT3{#p;ekT=mW8)lulp=f`hIXv^OC_TN<4C12_x8Ivfy01&6;hHZKMwJd5bi4U}o%PSmrl1<&!1$w-t=`h#$#k!voe{b8^n5dDV47 z84hH*XogKPRqnq0StiOEz4~R1AHq%0z>*U92KQH@noBz~)fZDoegBIauX? z898A)50`!Cncw};hd%Tx{JZn!$A9I&S^xL{_b>nX=wD|2|Mr|Kx@L>_2CN7r*=Z%Wr@8|6%*T`^PEg?eD$t`ycwd z|AOz>eDSkAZ29xa>iy~R{h93NlNg_@-XCKJcPFy@V{CPAALCDdK0CNOV|UhxOYDeUtY;b&c zT6S-Id~bYmcMm(dH#)w*eR6Nh#tOEJ9pBr-IPJp?9r57a{P6AyJ9vM|^5pJ8?D+lF z$-Tw#-TBeoeabxDm|}c_o!(!bZLH2VC)mkoXJE_Yt;NyS@!{st(Z*r7U#I!!T!yO7L*e@ohEZODZ_&DF*Ob)RJ)1!y8O;?X+2ahK!+v4$L@n~}NczXP3d?DMvemuT;G=1=Ja`Rwv@o;+i zXnx{CIF>fUI3JzTREK!LSxrw_VmYo3JwAJEqvV)&oLpiL#@Dj_N3zK;Y!9#ZZ*KN4 zu!sAc!zJhNaPKPiV9YL;oC%gIdOSYj<~-QvhObgM_e7U4mi?0J-CsT2JJ6lnJHj69 zo%ob|b-qUal;vA~N%HsPO}oR_<8$n4|D1Q{d|!6Ce|WKXaJA2qEaQFjD_MVD z?;l<69vaTO=?}UA+Z7z3D*4D0zOQW8-poZDK zqE_*Volz#|dA7sNIlA6I!*WHsMPokEeLLM=Q2X)vU+yh(WBE{b&-aiOs>ET9$8DiB zjFnvD@Y~DL@xTtQcaEw)94)U#%gfC}+1BaB=E=qO8Ac_KEq*q}!e3x23jcgR1_4*^ z!`8iny8w4T{9W9eU|@9l{xn=({(P5hmUnk-qHq{JTsD%5yhY)S#cE@*yuVo7n=kKz zZz%7h+=aXXI<~w&)#2{%WBK+Le*=CFQk?*rn+(Y@u` z?#~YIO;gUv{Rwt-e})}zEKfF9Cz~_b)>!8C0Y@Z&FPKv*#GibIQoo)i|gKjX?2276%(4*<;XnMXixfo5)wrA%%^8tJT zvG^N$o^2hSkMx>gdFsO7B5(LBH0#~J*yVK?@ON~~8y!2}Il<~r!CmmR`5ouI5Pwhg z`d6)@cb0bqyWZ!uI!*=qV0X1PbBebf%R9>pc(Z%l>M`EOs`s)I{tCOUtF>a@U5~5c zVx_~E6@wC1%Gi=`%l1#Gw|U=X#ov=hyO=4wfBa~__R79RoFbj-{Mhy67`vHLePKtk z$pOY*936~u!a2&r>AB5_Yy;G=Cv)Y`|Y3i6aJW ztTg_*r7<~S`m;2&PBz-be7z@oEWtY+k}{uZxePyVgBSm zmL7Wd3%;~%2K?=uMW@h;z)~95q7_$w(Mr^B-7ZH*vhAfSOtm7SXfC#nFaXU`6))yG zcJ}@Ro8N_WvrsZJHsoF2+s0P+cd&!|2uW;;E$`3SK4hHV-?DAb?{5dbousTB-rvKr z?{Z@gW3T1Lc(pOZ4mWhr+Wy||frPX4G6wG1ET#j6`~E6rp4{IbTBI(^;fG;pjuOSq zx@{swyfHc2oFZeTWX)gU+wjd26^wwjN&mvNv&}Jp!Gtew zXHMI%Tb{JD{pIQ2;&k`$War>y=LoAO%1<579woTRN2_nepJ0?HHc(4jG;c@ts`oB$ z-yTikddrYnYF@e?hzd?ip4S;GHd&uaq6)RZ5tBZxV+DxQgX<~SG9UN#?!@v2(K446 z^1#!{gDEmom}3Po==DRX246+?g@B=w^q>`MlyWVLnBt}il%>-4y7?G|n{tYDlk>+D zFj_LuggWDse4A53-d<0a$Tu!0C5Lx6^DEoM_3ZosLfCQ`q-NhJ*(vA!Td-zJB@Kg*Ynex#o5CJa@$|&TOU#IRRe%!>;cy@ zy}X{4T}?0QZV!|rTt+{#9GLdv41Lpd$HFph2sbB$Ep2Osn_7`O&z4*Wht4&oRg)G@ zewUmZ7t(*1YSsAnYQK>*`1%t3^YQ1fH2&0C`N`o1wjKQ-<#C$oG_sku)RO>%Y!}=I z>~eB`i6m@>JXC>ZeR&V!6Y!GH(a5ksHa^kDv{gXAw~vBuZJTPm@krAo5I&5OupS7? zCP$&9UIA;yykA+?QJR+QL=%n;XzyaLlzmhDDYMSaGH)oHifHujD;_vI*dA&2+I>`qa~n`7*#+2O`M<*2|SewaIhya!v;KvOFla|!TdlOcJdL*Z`z ze0u2ym_+W{wpXXJ#o5l%sD-wyU}A1mZCar;N^J|^i;qZdqP^A0Zf(i4A*9-=b+Nw~ z+WFqRp2M{Fa6qXMD>ql(Ecm;A1>C;nD?iUX^Q&yxH(-Tl{&ISJIXkc|ujZ@pC^c=C zVKLanE@$(r`Qmyy$KoHxIc$@T-o9=E`s}d$y=@z9W}~vT zxk4`1$XA&s=FsNF=kdeK4Q@mJ^K8h9 zxoQRFu0EKZ8=&?o41X^Jw4F1~2R6vBSxeo4+(7*VNBISZOYd@YH7mQGA7{PV9u3g^ zc{Mx1F6T!VvqNfiHmhy?%+xb5yUyhg^>sB!;4cr=T3m~~X_=%wnO!c&S8KE4hAgH( z4FEAq=xIJ-R>I$~J8bPV)Jog3<2an^EeaspSS%!^<&##c(CNm*6D$sY`x2yvS*n6| zRZ(Ek&RK7sG5g1?wL|vX0F`Gf8a9Hx+7&(l-b8_QLMuov2-~`4V!W0Yy^dw4ecpP0 zDci9sA~&Dv4n*}=-KA@%K5;#Ut6A=At_95^Gkn9}Fqb9XfZGO(k^hHhdq)^6?A|Rb z#x03Q@nUz1&F^nui;c}Jmz!ImWEAVs*1pBBDQp(!TcD;5Oha~AZH-o2JJ1>CHh~G? z2;hRUO%lEgS7)L!?py);^K) z9U9uReYJ+K5j>Mnwb3F%%yykiBP!1@`*y3}uG&taty)loT@-oAmi8y-*=SFNyhA)l zY*2CDy(N430nk$LO~@l`ogAnsI9X!GD0}G%%)6Xr#~CYm=KGV+bDMXH8X&a|D{U`7 z+h%KV&~9$w95d7D}dx%Tm<-UhN9B6c-FdkXs5 zk!1uR8d;1^eAY+onvtqI^*mS!e|hVp(SlurMk4g_{4)H#O-A4-K0*_iNL06x(d?UF zJX%~n^z!li91C6p)3CKKh2#10FP5i|7e|O(OI-zgS!wWvSw|`V0mqszZ^vh1_r4KTCrVT z#=N09iZ*7mBD=Uo!@zBW)9$ln7s%VtuhZZf*`1@fDEYNwkSnNV8HA<$9KG>fppOEr z_Op-J%8wi-xUBFugiTu_XEj_lhOXs#&L=+bs=KBuwf1?<7c`l=1m+XnnV}6odEz`> z;0pahm3T2-olTafM=;}N-V6=^)KxI**TML}StF5x-<}3(C z{Mz#@Mq~aH+!uc-t(Sw6j0x z8&yY54yWD@gW6K+GBv;OBupR!FTjU6qRgoHghRki!$%9KDr+PaQg4&a#GD@rzKfM? zPQDf(2B6(?6yS7y=vX%9 zF4Z(UYZRkcJw!=%>cwWDnpJTZewrAh|FtAds zh*HEp*5KN6f6`Woeuu@_Ep~g~&@*pD*0U^r4W4t(gj4Nj4hQp|=IIS(hBPV4G&geG z%W$*qoB3P3mTSGo#0@^HPD;?m_QQvOTsSL@n31AEFg`IwlQG)ldHYz&84j0}$u@kV zOE!PAe46PU?rtQA_+0Ys%LlfhCefKVA$Fk|E?fha|ZiMXX+gKie zeA4;euPB*jrESgJTtyvgaEHP12l6+SAsn|Wa&o$V%jYblV)%-;_7I$^HuC*Vlt#Uq^h-6`6HQEKg&L#R$Ij40x|bVSSi#`jQo6RQU;sqROSS zPrBeiID0vcTev8!4~5->^C$o5o6P2S5WHX9Y5HQ|g$4MoArGICb zoymA^#l%);MnJ7uIjpa=nS-9*-d^J57)Lj`t{ZZpNFgyIry`aTiX*P;QuDMrQ$fH7 zi|g(^h8@O-mE#<=358hW?*U>{uHgAW`F^cKA1_WY3tO#piw4XxbY{t)Y!LXGzx*k1 z>kvbFx?Pd;f%0G|t&7JiILp!Hb7p0|EXH=AA3>d0Buw+=(+p5*g(=DDAuXC2Z0c~mF&{K0}{Q<3&vX$G41*5CHe{%pGfc55Ar#Iz7g@}gM$#x#(XjYCJ3gP{ zn-Ztuzimr1H$?N*z}(sXy2d;31<4yUDYJ~_eIh3k!42fOm)O%D_!a2pKZhPmzJJAW1IE-sgl=PLqF zq{q{=u)bE{*aea-9X;X$<%j~by$W7&G(6sZZxihkc7xrBXd&1tI5||6Nwu*f<~IJ$ z`Ez4OT6kltAyU%SChsvSI6@eCYseyjrKX!w7Hx0Y*2Z#c1Me|*u)T$~Vuf;KEZ|0p zMgp_!w1tMX?jx2iwuX%-8Ti_^SKHgg-|Z#vg}jG5^L)ZrSVRBr;r1Sc1;nUSd70@e*3WAG`iFKdVnP!8XQ$4J?1@rw>14@=z&iZx8dmXNdq{YXSn2Im{)Cdq z-n?1iFELpV96t}AN8Wk^B^GA;=+=0Tv#cm>%WhJ@%av8?nfQ&_MwP1j<23$0MO)Xs zsi}qx?e!I}YNC5`V9t;&i3v(LG~cQ6g;Etu)HF?j$m-VKclW}WPz6;IPT(8Ghh)>S zDyc~x-~v++*|2kx_q#_XC=4TI?^O%CoxJDgnKIGjV|@&iw4FsM1fvbT`A z30W`xPLJHWNE<-j^3+s7(;8fuZudw2?eg0@&n^CKPCz*-6;oDZncLz&&0im#9pz+p z)_1bwvL4TKNS*bztlaWmOXIl4eSJQeo{c9L`;+s%@qA;y+hTK!B|aPdn`QAXxk*WO za`JFI#^g1rqmZ+j1!7#yn-;2OC4dvAEj+{SAQ=2P{wq}rd@VdHlIwV`lC1Jo$Xf^= zjaQ>_Vz@Y#ZOemA%O^=_QW!Du(-$1mMEE7!D3 zi()+-9b#ST9&U9#TiEUJZNB1V*T0G0l4@mM3)Uo4Db8D6ah>0;KUq<~uo!)jWX|Mo zd0A4vN(N}1(L&6MDYzFvJR~GqCPAkOiZ?Z3yn^u%HuzS}NflbU+h?2l zKjnl|MXQAGSB&ICQ+?DXP&qtqy(LY@NT|=&3C6&S*6lmc)*A2wBtGTCo=*nmXsh z((Q@}`KOhBo-Q{42HxFpS?OeXPz5ZJsA$z3CY(~9hqhF_JABVF9`9TQm%s|YCVSnc zp4mrQ*OV7T34yI$sup9S$?wK(IIX!L#38iF5Cq@=8TqCS_>YuNE zM-HJ+TArJdzMS2iRzdjZeq$O_l<4pK8%hc_P&)_ZJ+bSbOwAc z_GjmNES2v%-<)EL4HB^?fH(;D5*M`H(S%=bdxM2Ve^R;7t(TB;jGgCvu(2(p=aDyK zB@A7TMl!pgaq}wE$xP2?*{1TcYDbOuF^mX9^B7D|K2@@s!{5;JXmeg&iZZ|Ts<|rJUaw-{T{a(wjXom8lQD0Go>P1fc*U@2^?&>#nwQAA4=}#_ z*%eg~)vPVqw&E`}D%D`+;)dp4#=~5G=AxeKc6((bcmrRqCB~n^maOW3Rt&gc?n%#P9wh~D>ivT!l0aCR9jO|cDD{*udP_AuXn(A=XV251(x9~Fpgu}1bhdwSF;SW~ z?CYw0$log5YSbsGdLzF@?gPH}pje`C1XjuvUfVsUO`yLI zZ^3tCT`ABNXkK;SvNNK?!KmiwYZcv5)Ul}7f-C5x-CV@I98wBsN=2{IpIzLR_vfY`Q_s1 z7cI|u$kS|@Pq-&o)O7bFMXkD8p@Ea-#sL-| zG9<-&Jl|MbaG5A@d6kQjlCiN;yJm67tHC1Wj2C_R7dicgHZzZTJHLLYJ%C|uJH|)l2tYwU^f8WANKGOMZJTP2q^;u(h?##9w}KAvm0MY$)51ttXots?Fwa z^R^C`^=v!CIYj13)AQzdaqror2A;F@vweCI4wt<66q^zfXsd9WYrOJ8+ZWw(w3I_t zy^5UE5*>|a>^igtO!b z#~^pR{05~8*LSu53T_mc%M~Y0naAlsN&(|SOBQC!IE#V&N?Wf*+biEe&_R$m4x%8@mD7twSG(ON+ahemT#rtIwe_Y z6fV~&m3*~P%gY@KwggO!|ffcr8Tm@3GFn<= zpF0-WhumG$c^C2^t$QIAdDU$^V#+_g}1NV>||(r+4ICS2!=hML#-MW zTqVRWTIx)G;P$P?t$MVeoA5l#^j9J|*{V5XWvoZch!=VoFMhl_mN`e7Z*A_NX1;E9z~(dcSX85p!>wd+)lfHKZ_i zo^Ie7Z=|B|9@C@qm*1CiZ&D+}7=YsJGE zUWC`7b9_WxqcyBDC~}Bmq30gVAeHk^%5;P~13M>YZTDs3~=dV1*QNbC7qixA9VeW8=bj5L~Rb&?{1qlgJZEBo5Lj5B;z3W@&p;Loe`%>!m( z=o25XQW0SWu2hOw>(Z;0#EXOGbzHdo%o(@J0pFHSrD3n)lxUx5+*G2&eK?Lphck#u zq6Q)Fdf3uTpuk8PZe{WNg5W$qX^Vp3bObhq{9N^0+T?jVvTVe(5~=t|EGECq!(HVTDet!T3J}eX2`5n;L4s5U0GwFnBbp(D)8Z$kKB9_~qi3KMq zi)A`D`4Bz_la!<9hSX^cg3*9DKJDgcL(mosC27mS49%3hdTb z_Q&S_4E->nURV&`wPnR{IZ!f}psKWRvp`lKmI<#RX({OW+4=1BVt#>D3Uqqls!Oj% zn2f?|!cedqSV~(9&!$hy0}N1b@M7UhCpd3nv1#2%R#1u8EtJfAeT$O&*B*>oV?L*} zpeG*Pez0|$+~aCN!mHtrfSQ!aW>OeZY?~qjcQk%-ng&Rr?}OM95f}T z7QMYxHP|w3B78J1XvUeWagXoKPVUWwV9mBrgEsdSb&M6K1|u|@S>40jSJ+JcXAPQB znOYI2!;Gnnun`Y)D0#5CT5T+w8K545lH_WQb$L&ajasjL8CXM{CHYwsA7G5Me7rFq zjz7r}6~=X6VA7RskWp#O*uiLpvl?lva;(SYcHZ(Zk{P~ZG%3$>N!{>tpnN?WLS`u{ zI&U5_OG0K(ujnC9_vKUf+V6$5Tp;8lbeL zYIQ6m_3~-{u8#=#;1nrRO8v5a6fQl1Q5}dsZxMDa>vD+O->!YCp^}HgDxYnmQjN>u zp{U3^(x03IT2e*%SvUl2^)9>R7&mt@ZHAcO8d-ax+*r70ZjXmTjumjYJ#ew}1nF5PAjOVW@#l-r7NQZ5H*0a2F=i{ z3P?zd6=o?)f1=W=9z!y5-i)!_l(LeOL&LVrxL6u;yqyx}CuBychwq3;E(lgU z_i%J!Eso8#qhUfLwQII4?`M7 z+*)61cf|tSfymWB9KCrQ&ofKsS&_b_sLi_jTUwR&(!_f+bo=;7b{P4d$#i~ zNJmha+t&RV^V*__skn;588wTXYPtryr&6oZ)z`9>ASm?goiM%gm#=aZKH)1iD|$L2 zEc`Vo%1TSXzNJao-GB&xl}^^8z)5ggXZ+^edYrVcgh!n?Pp&lba$@0cyt_4454qg}?ALt(UF@xttKT-NWK9 ze-?Qi|7}-CbY7ivd+HhiWhJZE)zz<{LnOnkU9~sxx9;+jv6LYcmKb#BqgU42HY_fU z>>k{tvdfj7v9;THDcOwbx3sXM=izVo9$utsZNwCRhg$FUz|QWnYGoQ9c(%;4J?D1n znptW_%H)bk-Co-#BT`4 zTSMQ-J!Q#;KT1+Ap58=Vqx&khJ1kig<1H>%70n^5b;Fk_mQZ$bsR3f%U%rYd1mBc+ z%Ys1JvHkAfKJh+D@hFa5v^(PpUW{B0ux%LB=Xjxvw#h_>|%2#Uj&>P$@5 za~ScJ?B4X~z9#0wgg+p-H8mC;lf}KP@j954K?8Rv+FPz*3EP8>!`1x*Oe-7sjTg5# z+a_EH$|4>AFGNL)G_8{D#Y(;w}Y)Ctog2; zdF9^0M?3pAr&?7PBxP+lvX+tqHD0ia6 zZAv4}vCCu`-J*P3q&P!@mwcv8l^-{pZR;6;biGZRIy1A9X?i2XDM5$h?3&^L{wjd+|~`;Hw1YWFgWXriGBt>y>OH&Q;h>9eAl(g;R*R{3xpFCNU}Q`YbS4mta`ijZQw{z$(~_LBte#HWazOR`accg z)5@UMuo57+_^a4%cw`o6I7*oZWT^6Q$@6eW=is>;qI7C#WvkQ{?On%$%MraeVsy%$ z{0v4jQZ#{Mi(R;*0FRf+ms4tLKNtIuy!c+KB1B9(tV{527SW?{&4 zp1g>}WunxI?1i&iy@T~awOJHt0CyG`bK=XeZQQqHC0iDToC#c>XY1wi^j6PYp{m1_ zm0&9*Ox{(#_cb&(vPy{Sh>{$B3GTp;nJ3ngl|J7^mFSsoRxmhb0$f=i7#Rp z0qmhbR$BB{inY|P{k@9~hK81777)iHo!NDdJ?rkFXVfL|ie;PDad_QpxJ7|2kT*2j zZ%lz&1aD>@G{M7JBr1@-#o0E>M_1Y5?=Vv^|`xr+R@^;2)K=5M7;TXAR31p+1Z38#nA z`soUJ-GNS-43f$2nMgaC9O*pmxA`moPxdl=t0n=9E+q4BQZH36fR5e1QnfUqPv*|Q z_8vzsRGk?{50+GVIz($`vw{#F;4;)GjPepu$y0^;5a3(#IWT4P@b0Y3+=-XdiV^=GHh`&K>f;VO&^HYRI~tbJOW%e!3Qso9Njq#a}kyioBU;cWRG1wRh>V8Xr>iJjVIRsh&Yy z*x1i7_l=F~IaGR;GOSuvU)%7&`zUGyu)E8TjvJ`mWOl_P?H)8C-7WqWiO$bw=LDcx zmXCKZCPuQbLcN?uN`Sf5UDsWAgWx#FPIt~2e;zMc@o3B}#srQPLC$a=*|@g~*5qZP zM&|j=Budt>9IEdjY3kEky2_ zQTUv@HTeme%WF+1Vk3J>ub<9PaLE*Ya#nlR-Du$!Z;j(xeTtK{MGhFEwIdiPt4lI7 zCRjmJ)WBvbuzQS?N!5B4DbEVy+az4&Pv8spL*e+Mlcfv?`LOuAu{gay&z!$Q?R)4H{NqPY=c{gWv1{HgNY4ry1>2MMWhLb&Rn z0SpZZWl@Q)TwCQGwIMnIM-*sgN8+IUOoFQLl>1r2v+*}4mXW&}b2Z~qeg)bdYyP%h zP(J+fU<=}D6+8uWzDkm_JP*}s^0-*|!1=TP3NE3p3w*bj^7?7av;Fk*OgqGk&iX#r#I)^!pe#->(8g+$W_nWjz`Oh zJ@6q%S$F=HYc38tyxeXJ07Ru;F15$os4sZviWZ0CNREwf8`GEEsN12$cN=Ns>1z*pf~ z=UhbvXXMGO2YemASuuj2=5MKHposz<5L15RSkx}4?Olq7n+?}*@j6P}9FO0-cF&A~ zHQR4Ow*_DL+xieKSqXV7{_7|(E@G2+5qCNoxiYwrz;qC9gII#^;xBU^VOvXCe@hz6DXonGv=W<1V+O&6X>FX$XJWws_jVSRMyjj~ zuGXdk#Bpo49BKKP(~p>?&}{E>woOzX6sMUn3@uCG%yGHeN@3C-fEiBwO zE25P~wx6)40VZr;dSb+T(?ebRc?*iUPH+G};KRGiVCb00Aji+*fGi?Wly zH`f*Zb{4ueP`E42_z4qc&9cu7fJ7g%2kMMURIy`e~%EbD;1y#1|Qd=~&7C;RRPS+j&i$s$R+51nW32W!lPf zP7TogUEV$4>bOb^A!3E6dVHR0s`IxaxX#2WpxGs768?fzUBzIGi@15~TYG(r)K_%dl9lO6wUb)#yWJ9h$(pt6=eq>FkLb zdn`(P(a7fnWXVR(?v5-gr#dB6lvb1NO7_YIBTL77Tv~WEwHVjfSs9x;+MM#0aOIJu zJ!!AzZXjPF)dJRz2WMiQY6-A(cSr_qXyG#b1dFjfx=tKAg-kQie24Wk8&IEIE@I zbhqVY1(a3k4eCATON$N_Gi5IY)Z~cnECxEn8L2{>wpHz2tZ&|Iwi!}hqBsf9O5snOO3xO#*`+nPQ2(2px; zvROZ1G`r+;N^6CyWin(XLKw@{weqE!y(5SA?ah6wtXc$o4^tg4cA)Wqk7yl z9J{wsOS^)8tI_;@0ST}F8u)Z~oWM&z%hrf(!1&*0FVQsCq5zK8F*P551V;Ayb z{F#jCc&*jp$iq$31}HzaXXZWG8lQU9N(X|ghp+an5h*nmbUfHC^I15~6bxB*1dGCs z()Ud!eI$vt&$6Yz~c6rt8F=ar~2pFJ#H4ZNnMn0^IlL*sS2`OeKGMaS#a5_DkB<)UQ zKSFvt@}{I^k}GNvSe2G%o3o{nnKuHg_!YC_bHuah8x>*pfW1a3?obA#@LZRNP8DncOd{)VD^@lQ#LcOSfEQcHnbMr|~wW^fN zrs}Nhp{_q2pJC;nZ`s;8F<*%9iQ608Sc2DQ`{-xE?^)J2%bklO9C_UhV^N@(4jDD7vWs^cKUS2itJoA zk)@h%_W~AnTMCvXu=Q^_pUXRae7bpjws|4jIy+}bNlk2YoD{4$pSwF+D{atbk2%SJ z&&rN=g0wx==5Ol}tnoUIb`Fns3FV)h>>Z!rnpRL)c~;|#$r=E#zjWYwlKbYfYog5 z1OCdr*<*t1dOM0KqhPfTO3E4Q{1t**$aSzH{x(6ao=WIkj1(z+%Tb{1485gk+v;|C z!kg(#qE!+Yu{~TmPqToXzJP+2yG8D^ka|0VUB^8@hTcYEyR?OC% zNk4}wr1ey)46%xOdw|L+R$T6pUrsEX&F#u882#7hht8S9L3U|p)lY9dSuodFIm79< zTi{-2QVp*+ZJs=PIbv}w#2=Kv(K&RVpO+QL2o}4)1^FNP=ac$;dh@_WzagC1xU%`& z$xHGW-|S!T)X|3F&^hjq+94pQ#Rr}@4tFIhVR4@PTDJTvwK_~X_GQahW4GuTh%|O+ z1ggYCpXRT=YUx|OC{{S_ZfPOpcVQ`Yi8jX}ynS`C!< zmZNO9@-}&|!B)Z9ORkXX8u&?i7V(SB6bXUarA=<_EtCr`PdRryaT-iIFZ{D|! z=9{C;e-s}wVAarA8%*h2ap5Ab)0o5D^hj#TcfPnsvNY+?8f;2qQaQJ|g;t$~RU>-@ z!J>H#Mcgx~K%Q~0crj;^lrS+jSy=9&%)0Pe{gN~z62z1T+g*2Elz_Y5#{6yZ-xa6V zJks7P<6p%Gp0S#(k$cvbRc)kh#*@?(gtt7;meugr$ye%UbhdSZoo<|mzaY5y>zpfX z0AX7=*egu@rz9(Hrdfm8K;}&876lf0E9RRlterEm4P&5_-8+9laNN-u)oMs#*0IWk zjyfLjH(6Whsg%8Lc8{-jXej!1sGLK+DeW27yRZ)QD9gh9b-%(;i6swz57BDHerA^T z9=kt<$4hV76rv|JOsZv88h`DqtM}dYSd^?In28H3DBPKDJsanq0HnSoDmFD<(-8{i z5xmA99-g*y*3lb4!$d={oYIW{yr_{Uy^=U!*Oji=*O}NrQ#YYuQOpvKT12adgJ1vk8Xp1$0&!%aqE0?z1coz*u4!rC-Htf8lQFJhvRu@URHZ$YkuCdWFLveeggeiI`E+$ zY96zCeEk61eDDDBUfuS1WcQDK*KOA)NV_? zhG0?TO|EbAoyu)Het>1* z-k&J!o8N=NH5OK7DSNJ75b0zsIg=$eEtwmj1xMB)I10Dz#n6-*xN>!WlR-HcW4U{M zDJsNlOJ!}bqr`Q35PJkmc9)f-c-YA(b0sic0xKGSvW;^^S6`(zNxjyP8p_LR-+B}d zkBh~bBq|-X%Pv6B@>aRcy_*s%ZT?@+9>_2nvLz{7$+K##R62AlYWFs0-Q|zMvjn!*}T8 z9q_kQt#-*IfGne7!(tOWJZ%n1Ui{QuRf{rlQd7aHu|xSGkr`wd;}d0nRU5#X*Xp~r zG{J>1<73S=`*lmhT9B^4lKd2`1EW^*Ed|XfRrI-sm0M)FX*#LG)X6NR5How7D}1W` zs6P%-y0Xo$WGStF&|Uvp0-vP+5hoeCA_VEyGjF zCr>Zi-3}#1y`uK)n&hiT^BbpNt^LOBzKO6r7(68TBu`70rB8jYs@+mffUsM<=G=0Z zxKZsIeGYN$4t?5b8`LLcDc4++kH#0aHGk6r%+naY47FZ&XtTm)>Xur-R;ITp{BUxG zwtYCc!EVNnupxhWDFjpdbJ8-5^1Ym1V+!wjkPVilqsNzsW%laU*e8(-AG@{B*S_MG zVdaiR@!U?Y&Sf_khXXOubnNTX;04z(uo;U(yEh+od#T%LxDAlK5b0C`kj2SYnwl*{ zZg+33zEc_)>MS#o^ruhWXWl$){wnhdwE+s$SyH*6@$MpryIzh~P5HxR$evUyF3rJN zNzv3}Y{^j)EQ=;hU!j~2%^|iNo?%MvpJX#FmA0zxB&wDz@N+}5-Vecn_{98G{&VW^ zhTX;d?kobf>0fEjm{H5%8@mjWoNn;p!rg@8)*B~<-g}{o< zM!e8bweEQoEeusvfh{BZ#`Yk)T*P@TPXzRnM`XrPjzc2>)X_>)To`msa<=G zO+^%~(fLdE)WdyQ>P9D1RTr4v-sT05rJSm_!%b@GCa(KYv=YMlOuFcHHdktFZSc?* znuleL6y+0kfsPJyEP}STI&QRymtUl@7Ntq!?evv56 zzI2;Gjf&iyz8o~JDhdF$LLAD3HTmuEpVQz8f7{(#doV&hg@Xk?U&|nyE*gBOyGx~d zG>_t@qw`(-KHNWzxnTeftbYefo>HzoZJ2m>snvNb)3&MStz6235v%Np?Ml}Cl@~O7 z#hYE?*YIunSFUL}sFhDBO9^?`x>#nV?pw88)n3gz6%G!gmH-#*hQC>!O)jyxZ7JEW zaA=q|_PXCqQ^9I-lwMBnTDQp|3;fkUL$^ns58qQuGz~%dOgx50zqFyaRcQmV@ zqqkA1O$%3UvmD5y3h!zI&oiiL7bIr4v53tuJsKHM#vyE%vi!+R!V7Ji?IlF3@ulmy z4iMWDp3yusyvByOkM+`enbF{ux05K=j2A6O^p@PwQT+}R*BH?mCM(z-u0viPGd};I z_^&+Az1I>e4ZdsgM%&^;9*&45uk~QwKsO^Be|OH#W%S`C)O#q~Utyis&ACxCTa`wx z(lkbvQm|;7IRwl~8eRTSA!hQWk-G6wx7n1`N4TE(!5oEoP~-M#&z@crDihTwZ3LdQ zlk+DpDV2uxtaC-ELp)DetFDP>38PgDxrLv4CUsyqn3|BcOfOntSAj6<9FEdz(Nt5) zd8XFYF0EKrQbNvzKPjiYi=IKCG)RrIrFS{U3UR^<)>TeYUs}n0m_Dkh|A%M75p9xn zp&^4X6O+xk4O<{lZBdks!dAg%N;>sG;qGV+k)uh8M8`meB5L3h4uI<6uy z`doF0b~}qKtqc1E>}}|644rmz!0h!6HR`I@fNrD)h;4wZ&PO4#r4MCCE!+L{O0|VJ z7Mew;=NTVb*19xrcI43Uq+Jp^a-SekZ3P$iEa{)y$u=Ga255k487BpETLzBsc^G9D@jLNQ@3C)Qn%(CYJHkKmSYPR_a-ze+?L*BSG00`^Lp>n zrr>qful62b__5FSZrBWdRko*n%c&(=vDHv-AC`Z+5kJJ#^7bd$E4nq*E4G?|;rIv8 zJU7{t*SD#=DBR>#IgQKecZt5hvW<(aI?Z3nH*UOYo6t7f)OwgZE`{!a1f9@TJw0`- zSZkIrIMPvSZ{mZ@D%(Y_Ry@z(aJIn^bFq1UxwX;wy9dr12bDu5$hMA4tc#xR8Pxg; zfnfC#GJ-W_aHtw;$Df>CYDdr28~*0#<%NEVyo@IO#LNqgzdC`yw^XnryqT9FV|1jS zZ~WEpH!*7|Vh`LAUEJcuw^EuRF9eP|*d~bY0hg@6+`LNRZ(%Fr(xhv=o2k*iRJ>`aQlIeL=IvnF>NM3|(Hc}x z6c}Damr3YMyYa7s{*d2w{I_W4^C!(gd4C$hj2YR!Sf`f*_Z7JLg`5S%R zw4uw1o^4Bc+O7T5uE?EC>mn@yBr^_emTUL>Kt-m75Uyp}w5@WGVw{rA5!mWOq-$x~ z)2$b`Nm@A}RPNil9o1RvQ+2|g$H%x{99+#;T%#QqiW1f@QrtI}lVjjEL+%Nx8GOg` zNm>HK-?3dnp1?ei>*s`fm5a?yW7*mdlAKL}YWpXyYyDWZSZ%!~%3zeHj%S!>Sg!+_ z74k@nw$(CpNHm{Vrqwu&Sr5@z`UG-x?{3gPm^{4R7g+7|wl_Qay#}gEWR~P8EMqBO zL31Z63(#V;^(=3hbwC32&t%G zBw_!Am39>ps%+f%%6fvN9D2A*);mK1BGpc6p)fA%xvSx0QL;hMo5`mw(cvz@>Hq6; zM0vKCIcl5@c;wiIYLNQS?(0r!k<_~r*OO)Yu3WtFn!g;n9NiMXeZtwpjI+_mtI7Vl z{`HEd3*6Lngdf1c%2F18DF>Jd4>yR6r_`46+Z|5Q!e>P*QxI17yUgwqI>qQ2vqgqU(& z&MG8F$>qs({lA8<=pSEbK{lw32f#RX57S0B9jG#+tosHkk~86}SW}>hFPFd!g2AXn zWz+jj%d3;okjU-cIt9*zN?+PuJ`U~NfvDy>;(vn3;Wfy%pFiyf<*GHh)o>a4(>JF% zNGFnM{yg$c9@_j;VNKczS*9@%|8v>~T&W)qJ5>^U^|h2bUXou|y{od4&l9n&_J)5Q zo+X}yn(uE`Syd)15mmULC}i#=_s30$it|2k=3qwdO*5KK?sTfFEtpihD3by^Xb>rG0w)l{mZLhUT`E2QKsIfP9r zxY%mV>*{eOSj(cyb2u@#dz+m>t)ugyK2QC;x1zQ`-aWt~U5A>tSXpdMKO;lgNmdLp zIln(!++SenXT&o8+Ya0)%9_c@8Oyh0!FwKaBRN!_v3FxhNH?soqdTv3$S|wL$&bflB@+y1QLYACr_?ywON(FB*1Xj)GOIP1+ z)#ul}nX;ZG&SwR4t#g@h@OX4~Ji5S+w$GUBp*}H)o10o1x%dl|Z6FwegVdy5!QvLE z)x<_ZFauQH9%m!Hjl$w^HVCE-iFT%AntH^0{Htu!+j4mr+=`1#9`aW%=sf@7Z}fFK zD8bxreHJrb6P{k4#u`tRDL5y97|qgq9_f_{Ve?`011ki~SNgWJ{ zO03l__~J-Cl`IYN^6s>^V_oB=zG^xv>{#Fe-#dPK(H`caYI$G#TpYcXVS=3 zfljqI>~GH%rT%XuT~j#JYb^m$3|_eMH$&}xE$tQ`crn`Q@>cYD9ic984`S}Q=WXff zDvIw4NtgI-t%81%?if{~xP6!ytMa-z3a6b`Q0_pYqD>u33%8$4QjweH`zG?nI=QHR zn_!d^wZ$ylp{#PQbe?Kv$d_J`P*uo&4EQyDd)F$s+8h2nEw}o;%-{5T%Gt`Xyil(t zGLG^zhP-LL)SYb6htA*PnxncxaHDYRaFmPHMfIn*uT*W{#(q@CfvUrU;9I+}am%LA z>;|dlQ%&}TPndRv^8+o{56O*P&0+F4AH`0ffaS>KEF|*iN?ko zoVaaYl0(HP92$tDMRE(Ktuc^4Yne{IqlL7vwR2Ip4f2ebO|~=6-kb)@d=ma1Zk`-$ z93O2Q#z?pdN&7YyznZmh$lRgN*?_Lmv@-KTV!Newt$!(`iQle*(a!16L_(LyTiV%q zQ;^P*cHI{#2!^vV&zE$$x3DcWWTg|RhRX(4{pcFfeD`eFt0+{Zj_QxP2F2PvNco8u ztJ&rb3#{;H$tp_X=ZaWQa}BkcGg+O9pSnL06Zm>WN(v97Q*#nL>iL{$ZE_k|uQOAf z3X$t-x!2^zZu(|g|AyM(qXj1T6NxbC*$REC#%7%goX?S0ZQKv#OV>z`lXI;&TPfsy zJU)4}e;k9t2;sPntA&udog~<0kthdEw6}b@N$Yh?kWaA3(<7TsK*N6-F0SgMf6HNO z96P_uvkgXtJrPb73(VqNb=HT=v@z<}DT$ip8bKH>F!$iCrkn8iBnUVhWuC{}30@KtA%nCN(J1Yp*)u}@fVhY>8b zlAim1ksIa9(aoQ1mMy)~5I+!lDZCEH=M#p6Qni3(UZHBwOsjzbkK+5PHd~@p4r*5l zw`2OnQ}Zz`6Wn(>8J3oi258f;>EDssDfgidPQ^+SztW!Jcxsz4lw;|X?C~JBrD0oq zXZXotzAX+1Hx;a!%46-OoRpapCK6No>|EMP1(Vt;p+nm&(Ju$(%-J#QaTvSK$L_h!h} zp~7vgH4(f$KO%2iSA&*TpQ?hE-a}{M(6DD0^N{nb((maOf306SaQzq`^j*sLhzgGT zPValw1H2kkP`oTrD7Eg>U}IO;8ik{27OSBFeHk+Xnq${22I^wbG z#Liz`U7b(jt;*<5t0qgxtGO^d*HT-qh^rD^#nOK{%pERs$Fgi~nDT^k#N3l?&(ZZs zvX|}0(7-0mjdtmtf33H;wmi3q%+@Y`2CC%r*s26_m*Db);83{qY&k|mF}@vg zGXRC2rGINQPI(eO@R#+u>5)r0bzMGxiZaxc=|m=y;~nKo7ix2jVQyyZt&u45Fq^iX zm!v7*JmH*eitXtHDj$uxHlsF-XiKiN_**En>y@ePh2W;LS>Yk1$G@BJ)oHiWC*YK^(34mQX&xUvTHoK<;L?VS3vz+(XN)Fj|2kKkLQjM*S-Ssx9 zf7;n9>zKXd87{_Aj;3Cc)!8~F%&zRFXV)>Z8kfFi7?%TtZB&UR3q@&W`18*g`oJfT z5Z19EzU{;VDQHxLoKH$)ncy9x2ge(kk+oP)0Ffmcr*~1A-<@L1yJL-Bb$4Iz4SC(q zx8*I9jl5TJRrkP+o{j4CAXFNPN_ke!#p-_J$XG8msz%RePCAy#OvILMM=I7Ghz!bb z98Y*1qUm2yTfyJa0*COyNZHFaq^_82t9g!uWA_Nol8A5{#%dY%!9k(f+r;PGkt(WI zPbKvfZXJ=F_FSWFXxzxtovCazJ=>o3p1d8t!$|Js=-^^&b+L7brKeJ&!zbIPvK{p~ z!hkj3;=~4>EDOOS#-&tu-EK9!74Sv>4o15QyPY!4kXG_M>+v8dVpk(iX7{{7)Ei_| zT<)A}#qH{PijsfjtowMIS$1!8sGaw#h=$8lUS-WQs8kcJ@;rOn z(kn`Ns%O4ww2q8Wsd)wEkG-5NZS%|7oOvH&SCbVh;3G^KdTV>zAmGR{j$*Hy*Y;`( zxO*q@HmCO!B3&RT^`u(5Jj1TlUHxi^30o=dj;;T=xIhRuDpeSl650mgS3Z}r%bCaL z*;tTDT_XCUxnB422>lyqm!xW>ZPR+`Z1!mIpb zPh|;tnTU$$Hq32|lVI%I=jHWs&9$(P3v`NzBBY#>Sk{bU!`Xb^>H!WR`t#v!{;K4h zZG5w-K(fxUR7Fe)r;;|TpepC4GaMK^Rn|(4eWB7?IRjr@)5zM_5wWVroX^IG&OTL- zk98=y#iMgK>Nd7S6je3JCvk>A`4${;E~cC8ak&!?z`32zo8q?iuacp2O~LAe8o0`wOq|{K}+*4Hl|o|w-Q0_@a;I} zb8V3M4V_yR*4G?`x0H31uV+|rPIbym;L8JJ(niQksjR}Kl^8Ay1S}gtyO8=Gqc|{E`SHlWLC`{Gg??=WauCr4p#(o z|DX7K8N+gqBHC(;xl#ag=JU;5go%Z2l}H*m5g(y$wYu=N+K5SHK@|1aGyNDXUNF)P=zid~ve#_6|sbrWr6 z$o5GBzMgf9#<+v|B8?@njhIu<+)8FGOCiYO$o_e-YgvqkH6yhoxO$$=+T4*FQN~>4 zSWP7Zg&knT0j9@xjl3P1*?>}=eD<_|dG0wJDwnnbkcjIPhP9qh?1zW6aZxoYm9=0W55=|TI2o6$H0-JMcN$G6}yYb#i z4e(pdGXF zL`z&m_ZFn80k7*2`FjpCeZi0&7OZr|#8si8*&W?`QpGL;zRO7~zE&zH&xQIg!B+xk8@jC?Pz~}>jhVaE$#*00vIC=j@PX#J%)Y1L3$XG~{LSmq zRcy`fY=bz;iTYOIBbCW=>#OEeoiHwWN%t>b?_WOOy?nZT27-_Amzfk=3esgXf`eW; zRq10_Xk->7@^%ZR8&(>81>d2l{JT&!&fF*2mNEZgPi9ZBctP`8nSYJ@Ab$CEvz#0W z#(7qRGeOQ`%E~)lO|utyGNbeRJVNkS1+30KygPSuXx4?YjWQ#Wrd>F@`=m*Z&hIth zf?6hAcz`1hWj>guu^tOUWsa78SQ8Pp7ROX+21W&`U?`O|X5FT+h{D1rJ6_<}kTd^L`E0mtQT6iU_lu68j-#XinF@ftAdHx`~6l zF|DXfWok|ascni;%dBb(e-R^EV%t$4*0^9SV{`u3jcmCIn6a@sGNcw`2VxC25v;q( zL+s2@iShQDpl#%0(AJ1dlSD=2M))HkEjhto8<(aae?xAV(sc8PRW}$-0Qrwd6bk$z zjm@605;sQZz&iC@_A)1S3$NmE;aF&cUz3~D2wRD9o=!ls+EF!QxN7G5`SFcdob1tx2$cN^_OPrbhKew6N3 z@S!g*NmU&N6vm#A>S49Fhb`u#v@#NQQDorw?v1Dad6ISAer;Wpx8ahHskf~6-;nQk zhlxLDsWU`3C_?eQv>ffFUiI~p6?G>+PU4@j*4E5hphN=u8TX!s2?Y&@3t`P>AHB)k zwyEOY==qGDeH28!?O?+xrGkLl~fw=`;7dAYw~_Fo^- zDr88@1w7d7t97?=u$l>KNeya99wN zKo^$W8*i*VFoycR=jc51{`-l!f%UoP>GmkE_clIT$bWFT{+o%*y8bH5Co>$0(z?BQ zR(pB#R*iLh!Jazs%{3^Dm3Wx1GH2Bsb_H<;qru;a=CV&2G_O?|A93qnbvi;>r{T*0 zF?Q@RQ@apHbAeOSQW$NgFqy(vH}BLgF4H`j0+_4Wi`2fH-F-g2o$MrArwzVm@3C47 ze|^!|+hqy&As2#OHCL@1PqE8n{es1?RPo%wV(0U!iS`bMEtAzggTJyT+oYUk6s~o; z^BgI#S#>F07n-V-LUtRUQz@(d0@k;g8tUiW?9wSd>8XCzX3m9Jo}KD@G&_>U&eC-? zMxeS``qPS|=S?bsnfUQR0r z#^U|U)rE$u>1K*(g6tBgnKw2F&PWgK`h|H}Gr@0b*X`ujOj@g4w=V)>yNQGN!e6)G zFHbR;)yQFdRr*Xt(!d6@9jaZBT)=pf7ds13&bEgH=_m9FI$e8j5EdGI3-{)Y9(%TG`6r^y@ByYhof z4p(~xHizg5m7+8Ce5|#>pnUN`tD{bpH{&-vdYY*^eSO6vS&CUSqMgee6E;i@l@5X<3`~?vf{(8QEqVPB74j{5x-Mh~cPZ0V8`jX*wxNuj9 z3ajnfZ$mdGsKVBY1q@shmRT$$`qxmN8U?c2wSICykaGgQzg}pr%Q;N}CXV{+jj&$9 zZ2!j@QbkfTS@Bu5*>XVP9eG(tn(g=tWgZl^EpF}y+KaxCWg@c3;E?2DWi81ns{O$V zgWEDS%=`2?x!XorC-YZR>=ghO<9TL5k zsRN&60ane8#zn6XtklAiiTmIV9?S)KwM@R!0gIhJJQ>8GHr`=Sc*pCtClIWqqN?1E zc(Kb~fyI1rY(zxG(?jH_P2e|f>^6#Mz0IT7Ac&A2sAdum1QzUSLJDIj@Os!;N7YK1edGu4jEcls6Zt%M<%t^-pcT;3 zIm0w1*b$Fj$+fs!UAnJpqNk`VA@WPnW-a?%0e7%Z%QxdYLE~b7=>!auhsbB$!zexKIm8JPtcg6nX+ufxJWfPf={E_&x6slZAmpUuTq) z^loFbwtm7J(^4O9YEtoaj+T61UDVw`hyUiW9Tbk+BB-qgK^gE9$;~R#3R=O_ zpMU&#>TH9=^cr5{ZP5$ao3OwTR-3ur&uUUp418Ow(z13guguFKg14Njw&-u)9>0Hm z{QTwN6P!*@5yjYES9C?aF&M7l3=nDH^wD+wi35w^uWs0%Y|fTZ?|>zc4ygiA(oeXn z(HVZj`arPGyc*Yxv6@>7V$873D&+p)Xfly;m#k zKlO?5W{kzKi9F_H{#FM7uT8OY`1(Ll`{KC3xBg!hZOHr%@<(IIgc;chc&XHzD>Wbw zB&FDe;8k-07k>kVEppY_2@m#afO7oB7@|9Sf(R^CJEnGr{|#{4%e+=(;^qdl6d{EI z^RJfG2Qr(Xz}NU2TldNow#~Wo58B!chzAf&V|79{4%)Wtvmy2DS>e|3Zv6|qY6+6A z7%lL(5Uh|bGKOwcM&tRz^`n|s{swz%q`p&_Z}ZeMY@ zj~#O;2?k%ggN?r+FVRQ;xqmMR<~DQ&?^et`kWIFL?7Lne99a;o8(5Dp2=3Fe5!6P; zYRS*JVSg0gGxq05n_bhWMvGzw_I(o>u zVpa~?!v3w7gTPk_{tEBDJ=s+5X$r$D;eCU@vHZvL!t=6Sdv2(D2)<$;?qoqTJO8rO zYZ376XaXH01BfNqq%Xot&|~V%I#9?f7wR56#CDcP=L}Z#BxqW0tx1F=4hD_uMl9u_Ee%!vO2|$fp!+h1#I2)QILJw43 zlEK0un5(Rxzi-|v6nQm`dPxis(m<*BWPGnJB$DiA;)@zX1vT@rwWH$yb^6kAT}NO&$8yfxYDpPtgMjkoZ!hg4tRs|Z~x!#*vp(pl#y&t_9YzyR26#b88 zr%KdxByGR=kq;u;NU}5FBSe-a}hO zNLI2m7VNmjIxOO@O>|v4EMkH z?S(ZK>EU<9-*DvhTmdyd<9>cBWPmPN3#D{Kofj1EY>BOs}z5Uc8l((yNWTPQEKb1 zxDEZQ2N^sK3m(3_l7OTqcs;k>!v}%^L5D{3IFmamk;oQlhE@(!1y`R;nL!R?aIrpD z)6!oOkMlB(zaTP4!G*gXY@Kt?<@EL<8%GP(ML+9Sb;E6;&qUXNzb_}M9_a#-X%C8| z0xQA6##}M?ZjcUp^)T66tt=0(8#5hvt zTl@9OByK0Qv?&Y)G6=j=x^!Mkzn5XRb3@>RHb4C(D9e2yVOq;?zEM383fGI?5`}ciib7EY9Qf;4hDO)e_2hz%$griR@s&0OASd* zC?Lz_Y&B&ZapoO)mjyde;CmDxsAP7_$d6>gT-Hma&|tZO!_bdFr$7L?A(lox{bXt z5VYiZBe-1gL$Z#*6@3D&?`PmVT$DSZpY!&r3?wgUn6IxMLDz2YM=;#)EZA1-X;xZP z&p9pGyXW^x&K>@nITYSpzn#cuyNORO6$=gcYNgT9@#Mw?-Hf4~{}S^&43|J?u+@=T zT&s}Uxaic{f>4KdFZ;H4JF18k{LP-SLjP)rUcpympe8kmcrDRkd2CnWk~gL<^l{}$ zXZzNl8zPK78H+LgS>a}^*5S*Ye9%>$m+x4I?2xUCXZWvEgU>%WiAhri_f)bl zAK|YqR||F{pY7q@@ppyQ*&dQ6ELMZb4(9qV9hDtg9*w@C?tyF^QEA+yr_mePF8i>} z$cdrqNIb$^OKHX_OPwS;M=ap^Z7mL&ng@An!S=BdrxN(E7_bZ!84KD+;BC^IZ zoSMly{$le)3L!ZqGX=i19g*k!#=%O(sXiBa0=^&ccg|^_FH&XBmiX-r>sM(htst?r zr*2Xg(jy!-1yLJ(<=cJBd%$RqO&*3Wj^>rvimRLi<@fVU%#~ww(N#dsGp%3tWWVerTAOUi4fuuLmt2j^w(z-KfxAg1mF&%)PdL}NxkVn{x4xP**k zi+y3uKk^B=H)Ke6yZfjR;(1qA-F4u8136(GuLds|dbzE+@b8T~r0>z|xT3n5Zf4@E z4_FMU>Y~}rJyXv`58s%t|HY?V&+>5j5sXGv?_&o#r)nga?qP_`2fZcr@Vp#d$PY^c z)0O+IT%@P7r?iLX^G9YE0u99q7Zq)URc0>&XN8;2!%dDbdoKLNIIN#|fjJ2kHtc9t zKy~U|k-?kl8VO=H!tY>x-ojsu!Lu~xuYusFc#Z>@%Bd6}$Zj&?1q zv4Iq(-4&;VOYAvlkI~{9|7>lRN*&l$DuK61W)OK_HhQX6+M0zVk^cAf$JChms-$4D zgAqQA$I8-r_bTxslmbym;Bg~Urt)$?k%A}l&UO5(34S`USu$w(21r&-48&FJ<%-+cXa z{pHK`cemRgpKr9q?bEH>@LyQ4syo_TOcO+cl{Wu6K{yFP;iRlgoBw)IGlI9w9J*@V$L*eS8G0UEZ&}6DfVsb;a(;v zy4*QW2MX&kWp&s62!)B=`gD2g)+}HLP&KNVSS%OlcwnAia8^}5+^#OwK3`wCUER5X z(Xm>CwnxkSyP}=NpasN&1zxariWp+T` zK(PA5YV?Zs=h$u3mkjRV+RWNOP-|r-IyT-#{(O8;`}^rp?ZPxXp7)%wKwF-xh6Q8f&vt_;{>>lU? z+@ZdR8WtF?1$KaLTGtKJ$2f{!3RZ%BdOH0jebVO%n+9`(bewrdMIgv;*y}UQPIVruQXJhf~S=?In-8Jmyq6iSF_hjS``=l{^VNCD`fQ&qty_s!%PjrMBK^&$7JlDYjVIl zmB?^mjph+||8Q7i>$}tUZ%^Javx5**Hv__cIQgu`j0yTT69u~XvsSXR%78rAavq+( zJi>7YC(j6u>_bdWLw+BeV|8k$aD$>1KAi~aLk!i0%VLO!3W(vzFj-=y9PsQa(81h` z+b@@QpmS?zgyCYT1`QkVoCvGQB(k+xeMtdSVyxNC8No=uVryn31QU-u!b6Ak(U;L< zp(p%Tr`5^T6SSmHq8aWIm|jGu0!e)moip-T|1x$wDCz@>&v)W#c7eGhwOkqcW1M z{2u{mHoePXWG1`=T-)(?m$8jyiVi^R7VH(-3j%iOR3kM% zrBpj20H@De3mvPDZ9C|V^y*o<+6+g1xqeh*e(H!LRiQU<)8GibdPDwJx73R5Ep#W; zHtB38n3*$6OX}g9HRp-?_5zfA1>S1E?^rEDe%;>|+WxX4JoUfP#I)2;uo4D6+}TE5 zkay-^FdyIXS07Yup>RON7^gn0J!$mk(I0mEu)oxCJB$(Vh<|&kT0g8aXuGb0k63(v zd5Xod^9a$|V#DVQ8O{Y?brKCUkOH)tJM7i{I2(e)qcz*FrV<`l{FjPm;Eg4FXwGk| zcqK;U248Ko**@&-ymYGgg14&6$N}UA$gVq7CmCJNOjGw?BE0DXJQROvX^|p<*0~sL zz$uJwbG`Lul6S0~bqLtHds~RtLIqtwEG%YEERfX=a#!&BBg=Wcgf`k!V~Sj!VY`kx z7Mu+P&nyPXRQmYlPVLjly_)R9BEt3do_UUj`g6~WhO3TcLZ;1>xysAgc}jWQXn*{| zGgD#k_e`j&)|rn&uHU|>%GSvT%@rtgb_<5ex*HnnO3qlG!E{Xnp{6Z~d46tSRZY$I z`Kk%~aQ>QTr<8|;jOEN`XRbv$SV@L2TZ*~V=%6A;0g^@QwM%uvB0}`fQr!s!@n4|lR{t3Zph2C1>3A)?y+S}EV`Sir_sTAx zB=S~ELnQm`Gp8T>Rke?VHiZqf@}E2T^6buA@xb0;P#9^+Q?oPymuWE(R;shK&V!`I z(o0pzBg{=c?|u~}lEgO3*)vbIAN7%ot)t)YRBLS$D1nsm+Air#D~C zT5Zd_j8D;OTOP8f1S(;bQLYS@8bLa;p`3f_lY4f6GzYLRV358Dyz7 zo1UD@85xIfhIk&cRGTP;fNGtQa7W~>ufst%I4O9Q|o;*=UU-8t| zU7aBuvr`Eev!ZV^F7zOIrPge#|KwYU9ow`E*zHx3ORha~8$Fepjgyb;je1fo=T2;C z=bO%!=mX)}aNF9Yd2g{ocY8d%*!_4#pfdsubj}0Hu%Np&WUH;NqICneE)=V>Z#c1A z+BgiwE7)yjC^@DDjRO27BWY|tttco#jV4p*pZDW;t76gD-xm)OMbex{YMlY0GcP3a zYUUmikl`U+KHMoRS^RYxZ;5d8j5MMH<&Cqd;o8wwY3CQgUsshBf;)674&3tG*prX& zH-6!z3a7>r()*H{Lv*DVPcJ-Pv`>5jYMT&B0qG2f#XNp)B^1fR$ zDJ+SEN^cME2oTrVxO_-sb_N@$MUZs}XJAQGmB=iHWvx)pnK~P-wh^l`C^@#}cl|4& z)LIueSyW}Q1|M?qS&f|EF45T{xYSz=>#-rDk1@*VSMXxx2*JUGLDmu`<{&T^q-fHG{f5uS&2#`V#j#ASgjWSuldH(t(%cOiq1KkgsDJv_Uq z{-N7@qF)SQsVym7Q%MTV8vmdB4}*`+c@S}s$PlZ86BRl*4R`}1MQih(@6*tP`Od3o{5BhzPkBzdEMIP!d68jo+<^eIrT2j zZm-Z`v`QnjDeU!=+Phb2uOHt{d-imtp0w2SmL_^r2^86=qn_|d*<5CQdpUQM?w8d2 zRD~V?&XWN;J{t1hJ3l72=oVW$s1w7aT=x>*p`Ka7k}NiLTJ56_rnj`d2q+0qg1?5~ z!mjN3LFd$7rUG-93O{~wZ7atiA9fbt@~y#Q<1c5{rG-$xpPzpN(A7nEng(hN?(F0j zCEC`;J+UV#Q z5=RSFxvrM_Ye_9M7JR!7po)7pkyN&LW8!WxnTg@|po=`@=5;n{iP&?n8A_%Zh{Lkc z3LFrN;aYy=hcBP+oieojaaV@g0g_-l`_*4@oYkrMMfl{%u%El_r(UzEV8@zK>8@6{x$zOHl?tt38Y z6Gx0+xCxxD*}k!$N)xa}akJ6Qcu&mp!;_Hr$HT4Kx5s<8rx&$}KDE6XsTh;5tiIH| zatVuWp`t>o1P9ZfRl#_Ch1)!a0(vo4j#?JxKmv94_j_VpF*`L7G@qox&YQMq_6wZrqDSI>XmynW>|wrVaT|K^(Yj(tX7lvwq44<$`a zF6&K$MazYk${~ccF>v$TsplXVE&2q(5}4877K8IQ>en}pfWNXNr^Q066yv9#Ag@pi z*!=~0If~CML_Qqbd36uy38v4|w%=*sj`o}?9KlT?MWFL@ia8rb%Ucsb2>v#XeWqj3 zqzrcNY6p-PRtS$P-sLf4cLIgAKTSu0Xt3$A7=vqYK$~_0SO>x=qqm-V9%SA#*;2=x zbl`F-TW)#8>n~Px^Am^GSCr4^I|)@hnsaSopCEfjPw-cAOnO2QFyDBqALSQQDLR?L*qK=#<8P-Rw6!_Rmk28*5D9!UDX`01c||)2Z8O7ybO0#^jl9a1 z(aaABiw#Zg;5c@TlEaigW>Vl}3deRm$6BjLl8C6%Y)=kH24}(g&lmFc9%f|FKKeKO zH~7m;4GF>ediCVisvVjLSk<1ti2r^$*YfN4%b#lBuD*Z0Y!sp-lFZAJ%#vE|1Bv9v z-lC0xWz>%^CcfP$$?TtjV4XRdvvI@#n&$YT!Y#9hCF#6$sLXw zzwi-dAm(4gDA=mlSBJmee{%Jz)Ubl{5}5DGKA?@kw{gViw+^PP!DPbDhizD-80XDbIY}%hrZrx6= zKAv2DK0Tv-IywFH=JMm~ix01b!cHXg)GDW!G#9anR?PTqegRnq8I>5a7|Sdr0iI8! zz%zU4y`~X=es%rw`bH|fmQVjUfBfHPFaNlB_s7-yKQG+`(|?}7{qy{tVfxbTP3Am3 z@vzmHwc337qUN%$H&4WOp+l-C$C+ZYAq|;KbRX9Ei5U0$KW>Nks9&7F=x+{uYRFS{6ei+A&sv;J*4-bD>AFdf-|k?ZgWDE$wcyR+|wZq znoj7pQ!{7<<|w=xL=5Dodidl*D z7_N#IQ8p?M#dG;n$O8Y!MNbEtw#NxpwKsFqhcMU)&TV z%2YO!*Vbk!(prMW!+*g?^Ize(1c;U*)L64&zA-Y#ynGoG*@f$PtVU&zL$OLlFUNL` z?^rPH*sW;t_+e!Rlo@%MvIFR5*2JzP;OZlSbZTJ}R-|5~I+fb=e~%#Ie?$pI42!}<;K!pupBM1ON#{JH z;&sNI28&UZD3SYjsY|J}H?d+7ivs@6 z6%o7`A!8v73L`ycfB5Va-v|^6^}yU?G^`)lmqYJA8Rm|F(OO^_-xS% z-~1rglA|?l)IOhFsC_xTR-1WAGXLhOeY?aTRzGxU^tLaj&u(XrY4=~x7D zUre)%-K%Lun6Ai7&1NEmmZhT-_HTl3$)++O~=fBNJ0@sHd4e_Y@HdHe9!-Q%l! zwWrs2YNM;$SMNzde z&*0r*DJVqn1{{r~e0QBkE1UKE|Gd;0|1$7K`_jAsA1Gwu8T;svUy0 zbSiZ)As6R_1+(Nni3K8qU~b#{a}?~@bldqyZfu8se!cstMzxK4ayNUvN$3C^`=_h> z&pcltCLWU4>SjjaM)fD&W>DDj z6Q)em4H8u{Hj1KIv#t_DgJ7f!CyhG)&+uEad@+vrbcpJk{!nv_x9&9djfezvEUu?SxQ><*qnhajn}g8czAK# z%?X*-)d4}<(}t=5@dD_ScQ!#50v_XUr(+PH3wAT55t&Et>?Yo-aNx;hISA`7+wYEC z8mhYhS0_;ebG59~ZBSS?7OA6HTK0G<2_MNZMxsWmLkptoN-`)MR71&{VZs~M@YISB z))0&jw{b)scM@wFAX>>7bkZoZ=McC2ue7Coz!}s&YZx?Q;lnM2JB6{%a**i5mM^@W( z%e8-F9zMUhS98#&4BD@5Y?pp@t@iNh zO6~d8?bBa3_kUjB{`2zwugk~RejH1f*xx!r6L*0g+fkFTgRB!75%w8;r8iDE)@m$@5BIvme?0JH`aPYmC z`c~~)Jy?Obw{`ehf!pe*#+TD!ROdMr(Y4*C?BFB!(5K5eHK11edGk~4$BpI}Rw>KO z?MsYM*()bG;ICMrBE&>>1LX$@<{l&m76?{8^gPSKX#;QVmbs#Y9!Ra_K^e%^d0o5>Fnz~jyu;ce1lN1>U-fsSegcd&wcXUI zaP`u=EajL%EEyvEQQ)$6&0qwl^_M0@CV(^0yrx}hq0@_Nfw;O3W1~NjMhE0I}vd+8X&s z+m_a6zV`eYTHkSY0q(41cET%Ry`r^UhFa(MUx_i$${{){xqjptG|PbO`)I05m37bD z@V62mu~nK?L9MnyqOF3wFj-Kzv)A2x*GR0Os7d+YT16^KYGsA$2f=R6nPm~*U@iA6 zp4sdZW-10M-aW)KtP}YO1tt%-jN3B}?Q(L!+T3}`kO)i|mKitJ_3uv)YBH6lwW20z z%UaGo?3vN5`xQBi83&d^r*58<%cGcK=a}1#d-dh~>hsy9+NaZtPp4-e-kiKUNqci4 z*R4_l%{|XF^R)@7xT?^0HU0FKAa8qGY&GrS(JgD_Q2_s`%s8l&jcVAtE~Ck;SxT(q zG_KPwqY}vDe4`uiAj;?ypno|%5NcIiZ}NWRfK1%%-{sIsk5letB3pwx_z%xHNlGF$P%Ym7_|lo;$R zw_~@YcS#G1AAtz2+E4Pi+1XZ|9V}x0t}VYqq_!}|^_b9k?Y7^^u!+PcL&GC;$V%p% zWy-|qjqTT#@a9Ir5zr;f#6d#=E|?u(!gQC zc!kDugoW8oUO2Cg4IC3237)!cWW7$BMo=pB<>T86q410AVkxC}@|fx!4r&YH4Y))& zgPzpExD+W6s|AJQ=gm9|23gCB$b`gD$*yG;J!Uk5RT$UB*^S%QYQ5Ee-jN#QI@(84 zE5NFateD=aQgkfR+M?}patV=f4HKh939OG7U)12ULO=ncCWu_Eh@3hJ33l~dt`ayz z;OHug;SR!fBBb?mucTqPOtX zchzhTiz(iloK~W}Cc=taPA^r@vJXOYmAAEz+CZx_#MV+*Q9$m*rSq#Y@Jo2K!iEgmWLs&rgQ7AKTqENb*lFAO8(+2 zWXQ@uWGGqD&Fq|505Q1w(B=B|gKl^sr)&A%=HD{Pfx-}8 z2V6OJOVo0qXLZ^PvE%zef1tlD$kU1ubFN~{68&85pVC{uW@#Af}9yNjMaL(@G-i&XFCFy z`PYYtr~9;?d@$ZF@3ZhX?l7NW$5w3A@(w$bk^JA_GEvja_KQFOXSo5`D_G1@8oQNt z=JQP!WK(*R3t10WEYCl#-hOx3Rr8XxBD$?cWrKjP1hun$Lv>AQS6gtb6>X%zphN~)jLe9 z4gGv9$!Pxh3$Vsg?P%eM&1I&OB&p4r^MH>?PA$15JtiZus2ZDat?QdyT}SCu7D2{V zB(ptM`x)THW0{0&cp@S2mzq~#?i>|=^u%1*N=dDuRL15jpi_NQ2_4PT0W&>Q=eH6o z|EYtGU)?BfiqI)XFDtO>>{^W2q&SEvkFZ;g1$lp5J$=7?{C@eQpWtBkNft2G+Za3G zX6wIgRp5w)(5SJ38rb!q*vv|a+^{aakFVj*8-tFz3oBoUtd+s&`)~0InN8!Ldh;O1 z@zWpI5C6El|L5i7pVu-_YxyWkwW7S<+)EJca)7a>@$SJAyO28e>f7}284)&GqVKiu z1*hskh#pnF%bm^rZA0Four`KB_9w`tULp8inq-a&$4sUV=%Z+*Rg*11T%WQ)U&q>* z?5P8jCo9XX+E|r?k48*G|DDs39Qf2~HdO+_#1P5wzRp5u98_o4zkTXF7h6Cj?tg&Z zMq~WKV4qOWaGYExp*&583&`-I;z$MBbkCX%`OAZ?;&P#mO6>8w@K<*}uN2nrv|P}9 zBut~2!oiv2kNVF?;ZnXLHKMn8_#J6^kJ+uOYZ{RzZGs0FwYh1mvii(fNp;w3Ham7Qm1a-J-sq*R1~!@3>4k ze%rAmyUrDil{4e9Rh^Bb8jS%;K3t~`wR#4=9*y@^8N=r2v$$xJ)teJG&Y1%Rqu}Y_ zZ{`_P=m>Px^Y?u)OgHA)nMbl(+iU)wrR@jc58nuuhm+ET?>ERX{WJWeqoAI zdbw9W2M=suvjxKAZs0<`^5K3j*>{I8j=TzS5}+I9PXSYcdn(LLR$>J6Heni zl&BDRs1qdi#QCu^+Z1`{LdR+uzEAqy^wA5y;Fm_n7+rDIka!y7TTJDa*T9=-Ayj{LyMULG{-)QR7t2@K- zUw4VxdU$#*i=4e?c5+3ML>eS#a6CIU3MJY>daN4jpfE_Z_rU3R?xfJr#kUA< zj^WgxM$Z>)1^bMW^l&ZnSGJ@Yuy7~67yj#RtiB}R+jYM=)^SsY;K|crr0DR~W=~(` zckP!g>)r;eeOkvYhm{Lh)i@yNpzJN; zmpYDOCaT&%7L_6RJL2V7Oc{uU_>_?HvePaEdE>E#9qdbm8Q^)*pc2 zYMW&8QiU?MvE_wKasNg5x<)>P!~dSvRLH z=S{|LvwCC2QWS?`s^#u+7$ox*#)b^0cR6*Q;~lepUPioE-bffH_&fg!k=q&<-Rz+{ zf350YX?7%vY&0%wA+NzVf@tvf(;*8fxU5hsH5e|jB2=o>8G^sA)c4`ajXedUCBoM) z!ss;FtJ{X#N_5yDDG^N-j$y&hFam#dNP@VeI5Ko^5ZEJH8wj=&)IwR zzAm|8z*PaF1;O}*8|~mhJ<`VdHd-a4Ph)n2un^udi-2z_a6N7#2JM_xBMAr;Yo1Z? z7hy6o!G1c;I3EZmua?7&-GDE8_*j^g=j*BK)T;e%Y!}XpWYhWQp%%+N7|)SHqgprv zx-nOFJ=f;Ub>Z4F6#TsUfW<&}WgoqYW36gp59V~O})X&)kU=; zjPB!X(UQKgqjAb;?NWcuAwDO2vV=2o=f&tmWiUv!k71?tjYWrj6>XWfXEa>n%Nu8Y z!>o8R2p>Ld5ggCqz6brJrUB?V0?kYlY(}@T zr(uHDQutWpJyoh;k^&rLMJ8|Y6dsx-HYn3oH5rz{UlCZHnWM%oeeac-Ur8&VF#H!U z5=()gS7IS2t5HjuT(|gb*Z8$F7qeKnt>`VxbWz<5VG2jP9F`jSh9%2IVHdN)<|Y*W zbZxv7jJBSn!FSs4F<0^MO2Sykblq`HGzm9tm*IL&{#+|`W28M};3sPFU4Bz0Z~1S4 zc#m^t-8)~UL^EM{K8epjAFgpD>E073PdtDt;p2}O{QVty>9_TX3_*MM-;mmSs!MzL zPn8?3FKNJEmI;wp@W;X{_*>?PkF^m~7D{2Jo%go0-+uUyr*>^Cs3mOp-%lz=q})mS zcGcj^WcrX#q#Rx~`0tz{;5d~x1e&|;aWwW=mCIiuQl_$jv5VqAFukK_<{(O0<)_;S%dlq?d5WEW-3W#+fGKmzX<$R)Sr$6^T zTQ$A844Vxq`SNlc8|6*C7q^vpIc}XJKMoq~j{B_7T^Y68Rv5NcWQJ0v;bc+g;91t+%x?8e?F?SIBU>l!9;Ex{E>dXnQc5M)z8i%z|*oWiq&J_VTFwO^B zCIqL(mu>Zf2^4Pr{r&boKhsv+b?B{U1^RORQ!S;W|NeeWbF|4op$P@DOOix#Nib(1 zwsWoCod16Yq4Ug*aeKyGJua{p7rQq@c*kE*m?$hU;u^Qla6of82UD#!9wmv6JB@ThaIm$gn#z-G0e^vG zwrI(2UK#w=3J_VKjIS2`e{wWj=b226F+STs4d$|3XUl=%<~EggHl_EMDl+hHSny%C zU*XFeJ#MVBVqd^B1j zx}W==Mw!QTbI+S!sV(};V*!KA!)HysM+U_BkKAJ!p+(!qh1M4gdK!*n7RFDCXBPA< zRzh9z)oPh|W<=J)U#!7YPQW6qrR5ScHB2=~UC8B>?^@fcS8QcV*Swm0fZHCV+%E$* zg2;lD=i>b!yRcU6_}gb6!@D&ZB@Z^>TuPcF6C7rI^7Qfb!-rS*5MG5>seOKP>+GS^ zd$r==5mz-lfsA$*VX}4^!=U3e9$?Kpp4|tiJ#5j?4jgCuj=!wWc|6h;N-akG>fz~6 z)%|b=6FE(A;T6}BcSTNL<_`NU+Mk}R&&sFNPYG2%Jhhi0wNrb~9q>Z#6JB)!tW?LCH$Cqjlp&v5hK;*IEREN~1 zr`bY&gufeB`a$&ULwcO+`~{8l!)G zzf;DOt7tawI2ukZ%&lNx6h6Y=W?LM|B_nTQM6obqm(z3UXQ1=QO{_=GM(Y)NOY2zn zsU$SX=XqqoN6U5#Wkg!jg!khDg2euQu15} z;>l$xl`;*GbnY43o9Zd5)rVwEl7>5EEoS31*UF1!U1dZ&T1Fh{=TB!)<$D!;*%G8Y6bln{N1lHR=Kp zu>=0%5hem;ss{ZQkGExuLu_Stq8qDA4F0BfrN1$*)m=xmv%ITXkEi!O~2o+a|cPYvs-Z7rPl>%+UW4|01U{dSaB zJae49*ixvXrbt3DID2?qs_~VpKB_Y0An&6imRt3OydmtyMz{8*^djc?z`KF&N8t1V zWv|30LmYyZ>C&;%kl94;fKR(@(e1SAy1UM)>uPWjA-P7eaBxu7M`;YE*$TanL5&Du zvFNY^t};`}!Ib227u0UtwWF_l9-gsArvpaw20F50_0pSkT5(V~1w9^~-=rS3_;OTV zy&SAq-_GA#c_zLw{lCg&uwbJ7Js&|)CK#lirl0L^Fwl(yvM2H%M3|jHKzA5X(Z8P> zZKeLHp{JZX0Y@|M@aopGUDa3p5G{^42hg9i;dKmu3Q0I@ON*;IC$D z$|L+#q7#wN!-D;Dc}_C`t<_gIh<&)ooA}Av9I}1Tv-KWzd)1EM<9jqz3RTrhi^fGC zXnhaoR_leV0A>TX4bH}K-4f3Qb6L_`Kef)X>?vD*w_&n{%>5la~uqtcmPINX9S@Jv|=L*6^Jzi-~F5%Gl; zdglEp3kVT53y{Ot2HTNBy4IjJXC_b9u}qBRkx0g3gv`QUZ3(T=Y3%||Ql}>*Qgafn zCcEFfw5-n-Ld8tsEhK~TIyt%uk3SS%`GB1(a8H z#9uWRDY?L$S*$q(cEiosf|d@s*eIx{ae>;br4oD7-hDoiQeoE&`)Ikw8d1}FYI%@f$2Eu*HLgk^FEG-qWS3%1+*V1QbF}vNPGt8K^waO*t zE*<$VLo{QmN@j9185jDL$X<8#G3xTy2w__wX@;sqL}+BW+iZEeRaw)Q)Sc3i8tAA) zh|SZ+)ryjv5lN#g11C|$tvGEaTht2%ab~VX|2#%vqVyd=$jAkAxmNKv&$x5#0AC<@ zbY1iiy$#-ge(D}1XBNmBuZL4>AB`6pqbbj5AZ8ySqu`P=SXfQtY_l}p^|gI%vE?Be0nW8iB^_8LNc1K(087!)>i|IN+=4 zhCQM1`N#KXADE-zmnv8@*frjn6xG!>Dq)6wj0xJ&&T0z{=MejXeMt6~c!a9xCn!9j zR<0ze@8X{n%L;}PngyQf>&xIw=FuSR`vCWWJM(2x#_;~n(z)J8G&bX1!(WGh^}mk- zh66ondS(@7h=7(F^=gB3u5_`)h*u&*z}zDa`-PR*m;LJ=nI+gax4X1K7vCy!FfmD46&cmK~ ztF^?$t(%k$ux-(De!3TQsDO-K#eBHMG#9fnImRf4P-`IH%aPol}zvPDf9k-cF1n8t=hN zvU-g5G>uLM+WH>BcldQ!Aw!<09{epd2Y+ohD&n^tU#@}G^)T%1%8ZKiWKEHT(aVZMyi5OC~vem9s@G8xvo=LP% z+;XF{5;gsDc_$yV!lzfw$x*Ov)9z7Z=*K8M)@f%sDI!#D$KUee*wc!YXI0M8&jrCk z+qa%m_??kA?cMo1wPyHkwJ4YO&18Z4R9l1<_G;wP9`lS80+EHe5n|NEDo@dt$yc%? zU{E;aC{tmBDp;Vo5P5lf>qE>wPGV9j)0MK2rYP1?MJ)9nmu6LSB8F7H1Z#o9^g#9P zblQXFnW1EbRvm_8mqw@lMjx)G9Rx>vUnm*=>q-~>(f$TXO8UkMIwSP9c3VLS>JA1E?s%i*ecSm=$VsJUvZFXMH3?o*i zu=0f;-#n;M>t2FO0z`qG$#HO*cSS`?(XDnm{MU3Tbsa%rLUOFbx-3#k{Jah3Ac_&a7KTGgZvgJqyUNY`=E0Rr;w_}eXh z(wuY0-*suN2zk8^?6?|rtwZwi2>T=}`hF6%weCb4g;^RnG6Ch}L%pRTY?U#zn@n({ z;TLuXd>!4yUzSwy%q|7S!fL$3S|9v9Fx&-^5@p5NqUYhK-Xwo5EN08d#^1bnA?ci% zXJ25d6#=fB$<%7~{Kl$TnjErh;$yWi%HbG_9pj`ed*t&6q44Rsau3yp1snTT69mJ2 zog$$`3F4&#yDCJfwBISFVLS7?OYz)Q8Lz_kU;C`ZCbQ2dU+69IA< zvyuiICYFT8aD6~nFgM`nDFubgERiAdNvRQM1+W%gsfD$|o3%2`+AG9j&D06{36F4y zi?VT3WxDu#%ztw;=D&ly$t|jN-8OKfA4`#FiB>_dZ0s?%oAw5Im02TrRqjA0_&d4Q z+yrT!c*tQ+=P406l`({d(8C>AwZ1y@VFj#7osH?A3CmA}QWcdecNw+t*T^KDC9$K_ zjnTI?gf)~H2$r`R1%{i{uMpZQh!JZfJkh4eWyLLWG4F7+zRkkixF99>dC)nDU^t-R z44mx?9h6oO`L5t2y{^3H z7W+0bip&mB`16gbe|V-sY|-Kxm1FVhLO*|~7Ny%aha!_N89t)*g~)oJ5Md;tF1O1- z>PFf;qL7&1e8*1mnZ{JPsA1GR-+buGOLr5&XVru!AX*QrmX${58;;20EE z25t@1r>b*h7A&-Ner_Omc&l0PnuXAIwqy!`yhKvr7p7iT{mYrtP{i5X`Qlt{ok6rW zgX<9Lj3H}9J~t5wKW|m*&hc#!Z<&Hnd4ACchufKd**Qt@zt`OAmj>GFp98 zgB#3T8Vi_asX5eMq*1%c@|?^dQPkPNm9Lc9l;(Yq6)XYVT9~g%5cYv4$fMmY8?sg= z)-G1)*sJkm>1=|_;xEXXB-4byJ}xZQ3aGl{EcPNXI2%##+%)-)+E{fQeiU4M zd{{;$bX|~iMrajdL-59S*=HAGMY>0s&fNTS)7uHG5>u$ia8zs$8>GQ+3PMXHG$?oqMjN8?OYO;CiiwL3A+*v$fM2k+bI>(ez*O zS6yr6AIXd(u(SO+6**KIO9irQ==OHUCJo-%N!p${*6iLGx0j_xWtn?sN?OMTi;bA* z73dqXpSzrO*z0~OYEz?lfWjK8y7cnbpxoNK9j$|<_7w8Uu(lu=3&7liuP&WgmzF?~ z=A%;5kAJ;>dUg8z`s_tB7u(9`S=I18wFcddq+%xZOZcm)ZajdD3k?dK3-A zi6L}JijlDz!)Cpo8BI6OnFY*o_@JZ}xuo8ESv<9gX`z)?XvB#Cnt z)_aPF#99ZYTR#>&rBW70X=ub<>wJeY+~gJ!yEPG9^7uHJS3hSMI-63OgHDL;nrGn* zLQ+#{iol;c>`MOML%R@@HJMeb9E;x;ZC?Umsr`#1geC|j)r_r+|9T}lyll&M^-{rL z*zwVB-uUR+8rwX-=@K?jR4f>R-MiRM<`G=DzF=amZWMgAng@bRcTyuP#4dUwye&eGC=ZXw6AqcS=YtXk%mhRgB@-qIdlLarGShRe>zc z&X5{e%z{Lal0pH8imDq+HQmvzSUGuri>rP+{~)6@IeNXBm;y*i{&aP&Hnpa$R}_Eq z9^l}`{ZxCC9MgE|_Fyh&)*Dl-+Ss&-A3H=mYSatp$8PVdXab9OCuhv{@bc74(WuIO9QeSnbNur%s7gBcuf3X@V;6x5DqIT8EC39pqb4#T}?9%GGYaD^a zqmbMC=|Cy9RBL2AG6ah;I14&wsJ7;tEPMGYRg@p!8+kjYxou^jJY#ks_z=%oSfA)? zi^Nv+wZEz7$=Q&8<4rbd_t=nJs&+JQsCBAAVarM08>4b*6p;hgF(itmL|Mq&+j}X0 zjR>#W=d*J)sx%vvPZei%yYLsBJ;GnnQo&F%k1#mH7R4MBQ-K7-x*^eY+SMZEY+t?Rx= zB8%$K<@VBErHtLk25TqJUC62@v_0+y@J;e=*mXJ|ul3>;Z(Xgf%uVsUGy_#VV14ZfTq{#jGhNVcq)xzPF)__8vjI3@VU!Xy}L*|dz-{1M^KygPW23c9&QU4HW40IGJ3M%+cPBkz;f72 z$6!psNv|`OB~`Sij*3A0HTQ7gsaDX{g{nEBb{i9 z6eEU%vOsVw8~Vf>Xtq0H{2>G@mJ{2ekrF}*jR^I8bt)u-v`0FXxz3De47Qz5P`EFmM-&tI1^x<@1*u>wELgj#QH8SD4d?YVl!3glEmsE^C=41N zG)($`2F?)AO*@_54mz_#u=O%N@uT@29&pEa0Jfs$!xZvrC2FF+cccbedxzj`Y})9( zXwEZ2eRUpuzq3M?P&i{4ZVvR#ulqle3|oap zKv0+1--BKV$lJ;rQ^C^y+{}U`BsCD*l~f#x)}b+;_j9PlvA$70@gZh)&VanNv;==)L&UX4@P2F$nMfPNyO*Q3g=8HG zd;v!ijz~K)fQq+*yvv?!%x&;hXZzi`u;92AJWlU~H6Gi}+ zzjZw7Q^uR@Xs$|tDB*g~9@-oKE&j&BOavE8crpA1d02@zY{mFnn3nFeqM#2eu;5u* zgW_A$D}ZDDE12f+@$FJC+9lf9)1o6t>Se00aBPAQB0%Cu(n!bBB1w|w>XaNvpO^Fy zmVwmlj00;Wk+nIW-+Xbqpnbi#{C0lpmYI3!53`J2`a+Pc0u=G^4iwUPv=nyaFI!W| zNV22Zb)Iiz&IMcHi6(%y**5Ho;42%Sqbg`mogp;*KI)~wH}S@)hq?FFx%7c=vOn8L zQ$6t3L0+OJpvL}JggrAPdKP1?yZz<`W(z3Hh|6R8N%f37ch)P?TU|*niIEOht~Luy#2+K=s^TOau~ow z0z}sZTQwG>yTVvKG~&h3P!bxlL&3|Mtr2Z;TLz<+qZ{vNYn=UWEoa#97kd=ldr;E* zfQd(E9^4fEmh&e6sGO86+VJ1rXP&FENx?O34-xi#a#Wn@2JQ`iEpS@(BC`$3R{Z(u z8}U>ia@TW3C4Z@*pK$+rpKgdOBCPQ9X6cfZuHS28@7M|LHJFAeuh~Y1-F`KHyY*!Q zuTCmK`ia!o@U_jfu63K2(<@eXk}o&-tCg{8YSNt6?dPcB!kH%E_4CKqieX4iW)Lto z=T_bj7hd>#cKYt@wDDIIr^@`i@*rnwU{8uT<|&tf>=f^>w*sshz50*=s2s zDS;E9q}F5Hmb=**PZNcevXhy$P*({AlRNBXk_a?XTq#R8FgqB3)$j-_!&i+sXY!0p zPE~dz(+-+AYpBM&%!vCvkLg(cBnrARDi*d@KiFy@#Hw6dnGI`4wlzr!JM2O<8Y?nB zHUCVM=`j%7rZL|O@HNBbi;r#D!FB2|Inev7&n&=QoHrfGH6+!dk{OYUv)A7gqVV(P zr<$fTRR?b0p2$FyLx}}qn~^7A7EExI?Tm|z4hx*IN9jNiSz9D_+!8(2&wRM`g%v zL&zMI>LKpZGf~mUH+P@jJW@l$RWj1(Mc~D^^Lsayw>o|Nda9XuYi;IVa0FZfO%h<> zQfYC&rqU^azBYFoM%}K$8L`&V$M~1Hg6mtEUcX-5rxC_)7EE5Qf_Jd;RE@BaLG1Jhtv5dwckD^;=t$MmMls8m+@{ zEJbCqq@}>DbEc8<6jj7LhXubuanidzSSUA9@Ri0x8XM(Y}zp~hOe zd2k)dORmGsH*LIC8S23F>b<_(&4%2HQc1bht>&W3McZTRnVQ_&9kR8*H2itkbs)D9 z%djpdIS}uUMt6jUB?B^n1H}cxD>)AisY14PU{BsNMmd(~02NIUT%L|O;>#>J1v>ar zd8oV)TLOu{O8Jf}I}XGhjJ%#ItJO2~K0{j?R2CaxX)KZxO-x`qcWzSLGhcrCdhwC! zYM?Xxx9dT0IFve-V{6|$r#-3&EpvPe2L+(uui~7w^7cf53e7(fGQK;#Bt}cUlotF| z&ifO#XgD=CWR?bBCS^F_ZxA2cFZ!o6LD0WgoJ=S*QX7r!O12f62IrN%XxXU$8%K7L zE0DLGG?7{hqXofpHrWpkO#%?x+LD19iRHYwgCifX56{wxNF%`SQJbxEPo z!@~}825i*&I+&m>#~QU|Bn6R~DM{Z;A5Q)hyHu$5JeF!}dY5i^|Eq(A@T&71TIT5> zUW@*K?nhjQl+wU%IBeKxJ-_k)9!$!X9v(-+ag=Jn*T~g*j?hkh9^`{oydKE|x(WD< ziNip(0?*w~HQYA1c@!~abRL%K1BcI=vCevS5FAef$H7x>Rd=hEX<^g7je}L3J6sRT ziU(BY=IaWR3wg%`>KYO1eppxPp{V8t z5^=}@f8mciucW^bbES3EfVCX_^3`=Gas)4L?}n#=-MtQT6M_yHx+8C|614py(7(C+ zajU?>a&4}-WAVHJ+uQf;JC6o`i5c7X=RK3OrF+oL&z(3fx-FxXF{S0wxAm`GREfqE z;Z@0KRjs`L^Y-B{t*CaTq?r_c`?-)C=)fPZKd2!;k-fqKkFVG^zudkkoT+>;j1LYd zz!HFths6S41I?^<=bN$pF01lDh69syl9?sYbN10zHRjl&5BWsnfjv-T5O&z^uiF!F z<*+MVC15IRx%sKqgR|a$$6xOjmt2aTW{e z#OJ_;7yfERO{PeZAjc>&^K7z2cl<>%BYU-71X(5T)KvRUjr>HQ_T}tSjXa}4L&sb- zZzz)`HAkuQ5}Nk%Z-TEHd%XCsj>R2thiu>wMg1F@Njh~kh2lbxOTHmg%Qdp!D|GZn0XWch>rJ0U5+RBcVQGK5^`0 z!EwGgRF^I^@?o2Kq4qR#!>zoTqQ){N?R0g#tPmgKxaeAe?q9y#J%7D_QPUPRfhpGD z#VE&8?IDJ?uYJ5*>RJ#PlAYfVGEN#%qNj$g9^AVE%0p_B>vcFyxVt5*NM2nPk^Y~D zZ(4%0fG@N0g64RQ@xJYGS=`Ai4a$sen1a^z9;AVwkN{`uE$YdcbH^n$w5~10GYjCf zzF|F{zbYkpzXE|B&vacYSgt#~wHO*eLk$NL3;2RmSJ3&_fX)iVXe8fY4BMHb=3 zI9~sL`VlfKAY1lfk09Z)5!@5?x(R|=Q3}A`!i()S&LKyj+`lrTF%_q=aV;1X(g9w| ztXVgP{?NGM;lII6GhE%0*7vw6&|Ftf!xmlS)s|`k)L1U!gQu1)v?X~m0PCOiyNW)u zFOMJxX>r}+;Z+TdtMcDgcs|tV5XdTTj#j?h z7-=7ib(#|n|8*R);L>GhimJp;J3A@RsTGG()7S@$)}ZjX$WeyoX?T;tYtA726?9^Y zUjP2GQVTP99mZ`DFKsy&{TJbmF+v#S4fUZtZu8O{WlxI}Ap`(PqT*5#s zc{Q0G=u{ouiHjSSY8}Z+mrT=gJ3%{VkkRUr*&GaF4G%b z4tIHrK%D;6#626^9lw>~m`1!MToPOZxqK44qT>I(fBd2LaPM~ithVqMGHZgIS&9Y+ z&5?(n1{28(PHZPv!5(gbcH!a(KZK1v>Sa6JMR6=vu;+Wo%QqNj(f5o`St= zNdw-b6&sl~?6_2sH(7)(RlAi@K=eMdaaI1uzlk4HRXAcVg$5dq$Nzg^zQ5y+N$3_N zpKjnluc8L$?^Ui>f|5{Ac!wr1oY7aYd0CLX8)mTyvn(bWq11foJ)SE3w*>R|qmB^S z2NwHk36HINZaZ(U9$`muXa2-*mse?b_I>wG&}?1tgDu=Y=;`6(I}A5ZM$(in;SKwF zcl>4l$y0E>Yt3=&fG(`}dWXsp&U6@>#Np|W%g2A-JpXz3^6K&J>!ewK0!Y%&_N&PDOfkU3KtI93Jua(2975>I@7YatekljK|2KAANs`^-GTSWA*w(Nc zW5HmV005Cwd->b)-3^*!QeHj8q9AF%U+x~&zTQ7&#S|5v6yV#wS{Ja-1De-YLfO(5 zHn-NdjzVaN#)4TMTyg7YiEVipF8*I*8T)kHCHisp@xMi6!)rpYveZVg)NFC|GPY~U zkT3%2+h2|u>*hypwo!{%N*P$^_v5FEh)$cN$b-%KXrDOm*wU=$DLf)Qw&OR}21NmV zJU{<%cJlu8je-n6oSl(9C+S^v&W^}(SO7vUU;%CbBIuBaXuQD8Fu2=Pchno^WzVnr zNjk}4_9A%hEQv8r#C}kFM!n;*h{0CSwcCYAdR)DRQO`7_CiQ^9vfw@t+;gh)Fa6UH zmg+J1R=B$QXHC25*vrOS+}tvr>cMIM$A0eEsvp-6->>e`8N{H;rE&zzaovyD2M$*B z8I{&)HJ1JAq8seC1>QVwRr~9?e&@f{XCf#RHK$22p8t9J^2gcBALsA>x>TgJ?Hlob z@w~F6$xc;^bmk^oBas2+Zci34b(4ZAxPe9`w%3@^A>yW{pBf-&ty-$9uN)u4=uA8$ zg89Ak5P`^^oQMH3`ecZ5u1aB^?I>qc<+Y{R@|grWlKTxNx+W+vG8(!sy4`L4%;euvHF6)Al=TC(fPsRu_PZb zdbSvUiH^o!EUeVZFjn$&B?Ieg%<^JA##{f^D2y&Ghwra`;l!OH$B7{7|g}XP>n0K2%Nxc|A@=ZD8^oS!9+*Hmu2pd(L*%z+wJ|= zn+F>}Ch(7unBm`0z?ejg!WxO{eyhd%V?7g3(7qg-6(h@jAO7teYGfu7TSBH1(UMem zPX&*KdLXZUYGvQ3bqOB}qWX+Hc-bOgT@CxQ?8zS5q2b>jUw%Bj)$-eenRU+Dcn>`z z3Ij5#zT*kKrn-n@n{QqeiLGUttV4DIsC~S7`EYIjg?m?G2$ev}bb*IB=WS*^Y_Ma$ zb%>F8v8i!ln@LCddUp3kiNKlRcj2!vx*=Pq?=*uv@qshO1srizIBiX~;%6y=3KuND z4hzO{3`kA3(On{kuUB-mO~G#29`?r&*!%zJ04APv{_(%q5LEoVRk~2Zm=nn?2{WT3 zPSyB&z~4Sy(u(J7W|5$tWR#-EWp<|e2Rj>xw|poz&i+B|viNZA*pxW4_7RiC3c)+a zBhQ^wH|E&?!s6{U(Bb;Tnmg&~&-2GW&eWK}fq7Te0VHQ&!5^;Ps^Jmtte}a?=#1Op zeMhrzkDV$&e4xf)iiVqUcH)PZBe*!27%dab1SGHJL@C>Wo5Otuwdwzk{Kp2hTq-f? z*cw*-cJo0qy3wdLQlt+o?6VEF9$4e=!a-Hc(A1~^;&u;~4>TBP&qJ0$N`7Jc6{_aU zMqVmd4ys1}w0fBRu;??qPwe4yiTL01Zn+luK|bw@r`l}D6v-iXn|+Y6F)ke?2T2G^ z$hXgIy@hc{q%M6-!#3UQ&d%I(Px7V2Lp759mG$qr>1A(G%fIU=9r%2DruK4no_1|w zo0$i}IACt@^x%ip76>xDSU|=WhwiH-Vwq?t1!yHWSqVfk5g!7a8sQ(@Bx6XlR1K{3 zz}HyF$WnaN3ar}o0Zo>q(px&M(td|Kr&y)*ShrNonk=MHU3+=-T1a&AayYnlW`q`V zCu~*1@ON2+q_QANb&~gB2W#O-3 zIy$XnLI$99W8FhkQ(K2ReuKYbnpP=~R=~jLJ2e5aF%>39*M%3eJj78it=@)cBDjc> znmW%IsC5$L%zm~!;4@nq36^^sn|3g|UR~SX_jogdvWj-bzJV22e%sG)?mxZ0Q4>Y- zbb;|-uq;MIr``LK`VW0i8}V-@9qC46FcTgfrd`l^+&|NM;aQwVcYKki$ zRY%LgzJtBG_!X-!n2R>9^Z2>8S{y%v?qJ~MAQqXGZ!~S^o|O!NV31Dwzt|%8#-6_{EvX{4_N8tk0BYog`-?UE z_NMaHTFGuPJFCz~DkoFZW6-v*RI~+oDCQF6JrD}rC%3v-?Yfgt9*IPYS^I6%8i#YG ziEgECX||3e@1#f+#x4SxOhPlq!Q0o5FRvbGiWXKZpQilqtRE*gPmY+nLwOT;2`Gb` zqzD;L72BNfX8U}_a9xm1r#-o0UZh50$s8Svbb#0hJK)ly6LO)!@k5p4OQPg~C+W;J|%}0>Du$g~)(1h9fPjnVesGItfyY zGiR}BaxB%h5=4JC^lwdrcgZ&m{ECC%iBnX1<+c7>Mp`t5ob`gzW|9E z!)x8>q?igHdhYZz6jh!djrAb zA|-{bilTe$psm9Q@dqg#N%8ztavN#j0ZSj&*wIRdszV?gZ57aLXmR0B=y~TU`Dkne znuUhw@>f}1<>bj_G z-QFj)@p)H2uRf^F3l!NjUij`PUMnq*h%FvfX8^76yRVk`%A%d zXG<1vNUJf_J&@uX@%*-P7Jq-ULaR`Qke7VkOZ%O<&*EuOc!BS#S7~4Ekv(}(^$3V( zBN_tnRJ=EKD)2J7Lt%hlo=wi16>I1j_zGvnU)VJGrQLu?VGx?OkaSs_du&~( zz%gFe#AN2o9IhpBrls!6;l8TTCqrCHYJj{3+{w&BJ}^9aC;fMG*TUI#pMH=g4#%5p z5%jH3UCp7fE~C=egU0J+MN9ZBS7FHw>%pTccz+2|MheC@qPn=lLM7ROy0jrFQjFv_B3ml?q(09v zfzHvqqWo3?Xjk_b^~xp>I+NWbPu8V9T??!OkZWvs;#uS>Ya2ezY#3w4U*g5ObKQm* zD#n7^*AMEML*^B9cjb2}&@A#9ES4USu3+^9I&I?lCeNxo)|2(i(FMq>;P1=%Ibl;+ z(?*}v0rkw$^c(YG?9`mhF~Sr;r_$^frbG0{?ozjub(|W?A?9bfB1a-w14`zf39qS zJ8gx|ot&&`lLfxlD(oFY2<{J)x2a|t_s2&A+2i(ReYC7ks)?te)ib?-=5dBxY4N?R zz&m9!6Hz643-Yp6>b`4l!PBWn^qiKg$g;p45*yEV#%?_5_G*(Lot~C*)z#dR^Wa%G zzB{JezG{?1Ff=q&`3SNZWZ3As0dYr-rAFAIPv);%y_8oQ>;3HI?d)ZTRh#?;^b8_d zI;XxHU4dpRJqCB}?$B*TyLPx@zM9V-?oK)Cd4cB1u}!sj_gcw+yHT@2WH@o|b*WKj zo>nBpYX)=SsE4JhodI?R;zUnyjfX^N2dfn?Rq$u0GW>8MY?=d9W=Qtw`bb|3t=8uS z`$52Mq#9<^#|JvwlB;v+#5b}n%Encb)O=#JZfcx#S7*PT%s6kFBGV|W%ZfD zUp-qv+aH?;ts!WPlRucSj{0dR)T^O{XEk;vlNk%G^L(6JH3*Yo`1MYCRsClsL(;MI zEw$jF97)>|`b-**xJIm>06UifakmT1r=+KrP|1D|9Zw>;@8B!B?YExtAnf1-)2Arsvg4f zOePfODlvDR-{O-b(4!e^2*zWr;}^~WzN@e9XAUAuAxOJcjsL0EkI+ffsV0t_81l}s zj0f$uJPNye#GA&m&>5tlR2{tiyjg!*ZTBy=KYDbuHn6|naBNu?Fg5tv;R^F?7PP&& z1zhkMXNhAz^En7uxXF@CbY+YO4XeB)6{j>QkZt@ei6B*yJZik)pweKs+_oYt_l)?% z_0A3+acyzNwQcMx>jDEqw>kl|?x9TU@u5D^zL45Kko>FUR|_i*mz8(z>}x(5XUG>Z zW2)D*uE?|HQR5VIPTiinB6L=j{B$YXyddW=FuRCRs@6s+b{a~3he^J38QNB*g&qeN z0#?|M$Nlxg-bz_{4}0f=<}%2|hMlOp-HN=9%5pqgzn`tYp02e0c6#-GvPI0Xx)lh{ z;v=%;PsyQTD=afp-APlVPI(&!8FdSTgpPM%eh#>P`9cd>Sav$dtOt~>?pa85ky_J2DW((q?V*WGH{TEbkD#Vw@- zG3otAdZfOftYS20wydn0HG)|w7`c;`8xwueROK(sUO@R2mH8|kGP2gSgND)Boa5?6 z7PM+}I$3qLvkhtbfwy_7KkXogYG!9a=PAxo-_m!3=2Ct7MAIIoi@EIu_*8BWXT`b+ z*@9CBM4QlQ!WMaW<|~ zMmk$kTQs_089L#*!;DD_De)3}pfTF`Tio@HR+MlsPSD){U4EBzl*Cp))4qT5uXIa- zIUz{$9(9Fku#fdX0UKGip+0vMqC_GXXpUWyo3B<2kbBlkafcCxks# z=+mL7)U|2CbXjhBRU0kSV;b4~b8@I07+pBzWzFBV9)ouk>A0jVhB{-wMZ=tkT`^0O z+BdMcxb!B4W?qP23ZBlWI*bsQr5sjJ+XpXnG|y*9B~K~VZ)%I$6K{9YD-HRCAg@U8 zF}jAx|7t$jXRCO+<51vl%eVm@k98M+oqSRZb~>mGHe1;>XzAO1^{ZT8?{5^{lVcJ` z*z2$L0%~5#^!fZvgQ)1h1(zu$C=5mfqy!}kBJbLnp?WgIL0Gnp!c~AH7FwUj#9gX` z|G50^_l?Sn9LYr_@mozbYXpAe7Jf!TED8D?D6L9 zc&ni#D`##@v#w%EWMEdYzwKv6%xOshwQ}B;9!SUI1!RS@>EbBj*S%kIdk)B&4fcwJ z_?YGGhG!;YYW95Ljq!OKL!IFa&Ttl%aMV0>^CCPX>$Ve34>U)@gwXkRz%apj66T>&P%vziNq z6S*GFW@j3w6{KKS_&cJ>+EJdohJ(Ta^kRLPtBY0Hc7i>9?i2KsZ>|A`N(3Ttm6Qd1 zJGdN7L<;n5^NXy7)X?(nR{U3U~khilhmhLy8FmhT@hz%STNS0&@c8smea4-mT!|9Vwuou}Y zA1Pm2(COA3bYf&(;PQ4|V*GXUhelh@_Kuaa;mAv^>{BRg{LLT%dtjVZN7?@g`*uc? zoXX;#I{`y7rEdYUt-fekP{>=|JeDF#TP7aKt$N+QYsmQpe+9w1E)s2N%{>%?%TB&l zzSg6@3;4qFnIzH36k<*)!f3)X6}9-ePkZZ!PZzhp9BJHtIDYtea`)+6zO7U24h21S z12HBw+uhMF(;lhUpI$Ji>}}6C@YkRmkP7TVXH^KvpCtdk=6Ti8SiZnYnBK-&rB_uX zxBXnBA%19WV-U{K-sYUsemS*r^mwdodA0b?_IBq1F2f{?;@03!i2ve2vz1yKLw91L zJ~hQKMg6a|cMjB|_w+L}()*$-i%nGpD;svB8++wn^JYr|r%5o4xVkzCux=zBL;q6J zvKLIrwd}B*KWR1d40P0@dDKTJ2a+TRH0^8D&6?8Bzf=O0b-`y(!~WVFcqv`Sknp% zL+(O+CF8C=R)3|~RL6WZe{_r|fL?duuVuWw#grgbH-Jjl)bVi6Z9lkD*v<4j2{Y?B zKlq!5RKtGL7w3I!$brJyF>h1Wdv~D}LpwTRX)-h0ZG3w22Y;0krop}h$12fP%o-7^ zLcN?hh&e@hbb#bSlTF{!{p&|Du#2Pz^vux)#X_)PV)-4`d;N}#@&nU{sPc(~%ReDr zNgWfmoSaX+>kOl|Pa<=jDFm!l8#J|TH1>dO1HrcqSNG-ckfaP)=W{#A0>nHVj;U=dUgU_3 zYq=gupVp})FAnrH!tUU2^{6zk27)EOOD1QF@ND6ij}X0T{ll~UcK$_!-hH4jnvgSS zmVP&2!=iYl4++{j3Jm_jSA%-^Yw`uY8+;C%+B4qXan~9_COPSsv%FhT!vejpvox zLw$w6G6gNe2?BO)Xx6$v%Lp7JeT|iIey7gNg=p*$IUykwT z$g=4pRsFAi&-mkg^~c-hPmhYgdO@P`b?rn6EMaS%2mIBRAEg~w_xjfQE2+^oLS#+p z2d3)k^-B2rQur$f-d25l&ubAE0uIe^2bEY6;OJiV6&~Dd4>OmvHQ+|xE1Nfn zGs}D9tRY~=UyhmoJI<+{(Xp8(8w$>1GuQo2J~jW<*)-HgTBAC6RPL62YU+s|bQ`BP46u0i%Bu@{vbhY$acsSfjmmEUe@EjE z$zc1!hGvdL3n4+A$AKHIsxWcLI}y4P)#OZ`y}h2j6&Ba=ps?{5uA{cmGi~P^yG#FE zm?Y6e71YY!bJ@O=ubhPG5w=8gNrU-TY;znLP*S+8QS~x#LE(I0x5}F%hrcr%jIcRI z;2W&5J&a+g8Seaq{;v)YcFTlT>ljRQ#(+_5w>-) z$+73@q0#W?2p8yCSR7^PP?v~*@oYLL-@80ND~;*RVLIg+@|l8CJ}KGC1>v}8;* zXS;)sHobP&2^u1di>j@!Qu8DX2^GQBr(*FFWwr?}W%W%f7R4+~MJ*9tlkEmvpQq;4 z>Om>iPPX@H-N5HN{4FQf4&!1E*!dkJoX>xUv-AsUSu5hmKc}o0pR+KLT#G5bqld%Y z`uOsoEtL%6ubk83u|pN=oa0&T+)sGs8F_!yif9S28039Deqg*gFDp4(vgA*+YSPw$ z;Jvhr-oL#+dU!l}(kT253$5Oa8PBc}&Pw#cAQP<{BLvB**pB zR{zq6$mWP2i7u8G%g)T3^I`Pch08W$ z1m&F}g9d9m(^dA^OX^v1bG*7fT3;Qml&`-1s6J*FqL*(!DsO7_bfA?E+?Oen#4^Y% z(i%Mu;-{9p-@dV9cYgXRjn3cKJ22~_3ckbNNFUf8Mu>A_d-rM!sVz!_Wf^_}!6XT* z9N3Pk*926&HDkeFAiKBX*v%U!ybBQ1bHm*rS;wJJxC6Vw-|~*r7pvLt;4)F*_`k#9 zHE32pjcV>1*frEHfuv*k9R!tFgJ!{BNwKXXijr{1DU`#18jyzoX^4LX@4tLhoBEKLS;~G(>;>xi^>O%cPVJdYzg5R z3vJ+(b_#iu&x~(7>@IyvS5E?!J5y29G<;#Z$z)_GX7*?lh7Iy23GQr-5xzVr1VeNh@jnM%_=^QdJm>yDI&IpNl;Z>%0Bihr|2Fps)i|Boc&uPa>tw(^e_xJ{keB?aK~!B)IuG<{r9)+zuz|=U)O(n7=nS?l4!~P zB1*mbgHDJ}XfSnt6!!VJ0o?;*9I4;M@dd=eUut;qd28DMEseBXIzNX~X$$_otmSuq z!z+hvPD&~Y&571_v6n!G0aRqBqM3@@DuG$X^WEY3&Hm}tfyd3>*_~R|O$*)55&?7a zditvIeDdO<7T?s7q z`xOgL2U_hf+*cT|AxiU(F0C0JiEt1HRvEYifAN30Vr>adt!*C1gdXlQDC``PLh$P> zAKW;I$0|EtezAtyXlM%xH$dB0l?ROzZTCDQW(EZaT1PlJ(%Hs&_t}K$rk$0{oL(F- z-eSL<-&Hk7@!zTWRVAzwnVd^c#$yL}@W_^}R60YGWZwS^DI2M|>b<=3$(dImuihpG zmA3jtW<}Bsqu1eeXUJlw(+PP8*MfItr*#i2-Ar-Foj;}v>53AE#YJE zyLhr7T|ZSLZ|%>I9o*kprzwPg@?`qz0r#+2{Tv7nE$_6R5^;ptL#bGH2fq{(MBdN5 z5Chl>zKe?%!@2#iy7_!5w$t4?vPN49E8L?aL-<>YqFB~AiH%K`xJD&sF2OVRCleXf zSSi>g5Q85|UgDqIZ|+{T{xjp>zpwxO+vZOPc2mjA*-RO2-!TGzs`aG_L zFQnn3-7fjmJC$i9I~lWCzU|$IvzuR(Np*Vr@kA(mf23Z@8$kvvM{TD(0nw!SP)w02 z6p!#5AeXSi6R+kf7_sC38h_JM0?(9AFM}})5R(G!uJ2?>GLvU}PwB)OM0Ew48jY%+ zNaZc2+a&)TTkx0Cr1aM^{sP%`jLeb@lgVM@Ihzg>%UxGVW_9ClWm)IzWKAzyIAko1 zLe+F7;#qOUKE~~{{^}X}>8WFwEG0eXFc0xoIwT|!Lr%*h&gTuR$dT7$Esi?nOA@XC zWo89)qio#;kq0_QY$;}Z@GPoTRQz4!v@mxpxZoYXN6yfYfVavABnAA=M2KuN=A^$NvM|lY^Y7$;@FPPKPfmWMy@RXtU?ZU6QL&l%eMUP zuCWqKz_rS!bNb@ZC5t3fvy9Ybms-l7BI?fdH|;HP6bwARXPz`0HJ8v1WA+D~k_z%q$J z-qv7??t=E`oa2h-qN9p=GJ3qk2%LsmUhQ&np4fa>^M3s|Zw=#N)&S!UwL={f{;#ep z{g3CRa#7)LmJe2gET`J~=g3fx4w2wXjYzsJ=Byx`cqq}#m-U;*K(q$mfym&mtywU% zWrsuvOQk|E9+&Uw9{Kz4V=?+fYQDqq{At7#oPmsR-Tpi{ICJAjGzc!l6#gRJQYNO; zvEo*5it{SjY#REr(ynAVUk_^v8I4>q|& z#es*O|K+b2tDjCalozMZm8v8_#8%wFUt~}%*}KHdqH$`<^z~S)58S^T-NAj$GoQX*YEsJsF~<(5D#aMp!CDw@00?`G9&Q`YW~2}>-?fGuLnj}OsG(87Xjq5mK2 zXDWI7bN7q|ZQG(wiDJ2~K9#@N)w|U8N@+-S%)<+mC%|OHpH@yGtBQCrmBO=a5H%t( zH}5@io8$bt*Yuw^a_2|3ws(Xnh^LHCF;~jA(%kUGDD*UaRK6ZIaH0dH8SuP_^%tA! znT94WeXinMr5s=AJTatow)UW{F{ zX2DLwWwlqX2QK4wvdqc7T-+R#W*Zq|_M+n_A0>ra>44{Jq)@Ag&8sUDS%2v=ZH zvBlLBLl8XVee8fWShJ>mBr@RC_BVOE>IkhcF||3>9l9%dB_k3jT;LYNs*Z1oNJJ)y zi{%SLbT=#t8Nu21Monb(C-zKt_KP%*=WAs(^p#qgxP5`P{iBTUh25zvR@#PAIE2EJ zNl8upBVeig9FEpg2Fr;17v6u-AS01j%#6!%qKovduLs%LM@Blgnf4l~Fp5PX!&MKtA5Of3%5%1? z!5x*KTix~oyTpki`#2iCtzJWVS?w=Rj8jF2?epS3=v#@qJy`*UvFRDBS^a`HG?0B9 zQ>z3@99D0QzxG}mPn}VNAB4987Q;f5+t2>+cXV(XLEqaMj1u-v@E1J*(<|5tPnG)I zWG)FqH3Yuj4SneQNxfd1@wR()p2UKuGjub_0#JjdwFDNj9ArDxQza3os?RmxoXcNM zG@1X7_-`)lURYQT`YoI)KPomr7CDlJcW~rL`4|PhQm2|WEq8=(>5Dv7*gw6~@4f}@bM=0Yk=CLy zeV$^~aur!7d95t?tNrK7YF2sgnR7l-nA$1dC$%9`CSuhDyMkbio6p;u{oC7vJ0)d3 zs4ygX(Zxt--8$JYSu&}-NaNIGqblLhMr>j_&C}8w%E?PemcF8$!sr;iv%Jh!>x{zq z&IWWraA!nELgS)dDohOc7XFr3-i;sSuwy5{p$U9r$Q3~+Ri>(|3};Cd59YKk0^B=i zZV0v~&XGFI-%W%UD_3rgy`BrRj&l8sGDmB{?(Mu;(5Rog3PglY?Fmqx0t z0&&JC%_c6|CWjNQ(zNRMk_x*fJ(hf)9sHWe=LXG0LOam8_-mODI zYAtwY$V&$deAu?(7xa8vt3SDx^WF${(=Ou*|5p_87)l1vP{{@g$4a9+z|3a9_hSLp z)3|i%H8nn*b3XI8vpWrH=+v`9gBgyUDJ`UUDB7truefJXbK>Qk^#K#buO2Hfx>=u+ z%~T1c3293PPt#````(FvI&NMM4Nd?Lx~%PwYIVyl!U{6Ha&;uSPEz;_M~&=D z{8htC9{?+ScxG)KM+=oVj0*20@dl->*&-rZWtjJS*K&c4!uwY@2iLbpw|DAabo!w2 zcz*YEajzlv0DK`AOEPE^F*a>&V}HW;{TVn9phcH;?Hk-VW~c7S=**B|knpanC!qLte1`S-T2&!D@Icjafs14|3#}n*w#tEzD=rL{(Ita@YOgp?^T`zyPH5O)@F4YA55mM^X{7X0NZyW7bJ4?j1QF9zD6y#VVF=_SHF!x2Vbp}RCICSKYp86Yfk82Sa|m0kj%t)OZc ze`TPykttI%c4k$~Iqji+TYJ`cyXF5Aw z3*Y66F$QcBsCWUZpzl8=z?TXWD731z|1O`8)*AFO#mbturStI|BS9cG9mpZ}=y9?S9&lRQ^f+u7zL($;t)>Y{n>5f%+yMrqpXLB%O zx2~=I=w$gOf~+fQKzeBF8%S1FMUzw61e5$tIn^C9lL0w1Jeub_SQ*qSSx7G|&DMCl}4BF*Ijw z6PZHNpH{cA`ODgu%5}n(iyy1Ake#fAebF=cN(v^1y}W-`OT)eE+fQ3!!8b?OcPF=Z zC)X;jl&2s}myAOoXp6jK1Iz3zp>t`WB}l*YY%AvmlQ?nd(B)g zp8K+W(Wr`^THjFCyVhDt;qBGeH!biN9vdeYX4vMgBeRMVz0{HQT>~!BTPmd3c4g4F zcio$cgU)H0TMjQ7Wx5A{F`h!jixcCnNOx+V(Rlm{{FJ7zlipl*qpH2!J=mR@_f{$( z+Wvg7`p2i$PoK3Sl@>CP8&x?}6*YK%KKYgTe@+Y2_PGYUNuS+3;jWGfYZ#bX_~tPjCZ>|wl4$+G{XCtmv1=$wII z=UOQA^aB_rq>J_=0H|0yuIF-yL?obY+S3U~A0seyEaBIHF zOS1lRq@GMT+^&N-fdc5th)Fbl;he(K65`13mRTKY{&X0i5> z4m|`+5y=bBMDtw#YTr3pv3gYcyV76~;M$I}+1ZK2`C&FY4N~i_%o4}yemVJ~LCj9y zMg8U^p#;eWXSi0Jl z_6BDQi<4`uu2ve<1^>4Bcc45ay=qzM<5i5@mAMoC68vra{mmx{`ePfAaAjk_gI>*c za`Aa_c-tm-J||iN9b(GP%B^5p8q%#cLfweE6gZBmBK2oh(~Rw}XX~GiuYT6*ek)<6 zp{=qLZROhG^{bqbcacYCj^-FcC$H~n(UTLef#MOkEaZhdO_MizD0Sp_j{kaoVl?tX znc?V-mfys%K;B5SuHu_-gH}5L?ttB;8#A@Hoj_&KWF;dl=9>YQ3C`ebcaS<9HM!7# z(Q_l+HHGy8N6p3&Tn0Uv|Jv699dY(LO8mDi6Ni?tJh?75wr&PXKqu)VEw&|_gzw{V z%ej79T|X%UZLD3slv+z~cmnWjC3@M(+l>7W=7O+0_=_&W3y9q%`O?5+s_mRo#lnK< zQjww_WJb9vv$Rt@UuLN$aJ#5ouwbXP1~^-tW>X6+@O9u7EKa`#SY>@USZJhRMsV_| zWfCxyr_@}u%YX2u+oL2)#C(*)RYqP~Y9zfmF(x=J^@P{xz8UVGL97=1#lxi*Sa!HZ z;duJXfyn9#qR_A+ze4TG+MdQD@!r%lIKx1Db?ytwG_&pccCcTQG3In&avtY@UE zpH#TSBMZ`1#4xe`lBVSA3ZAq;1F>};j%=s6W{X@ib7uLX8-@-uFx9nKWzUEY3OA0J zDW%t3QnpmH;AAkqiw{`%n3;p$3Wg5b-cCAwIkL?}s$4HkumWPc*v7$O4SE`fk*@f1 zYy&jXyluI#EYv9cEkZj<3-0M*h-~m3$QY4vMc!oub$56#2)_KZxl(Ghh-fX*o>;JSS|B(k`ued0!Q!}V29HQ+5bgKZ zl~Rt~nMxy`uUkZRe=5VJm>=NV+rn8VYt?}Jz0^{D`o#GArmuHj4!mIcbqLB0piMGs z2k)3;=KQNhTi+Y_D>|+}bevcF9PflNw;5SFa?IyP^Y|yrO-=?i+#s4?$&jk#eb_&_ z`FODX>4V1APX{-@96jz`y6Iv(tnMzTPJ_?yr!QYm7Vs4iHySP$AQQ4=vz=2-u%4)8 zl^Ojf9LK9|{H;_*oup;K{_n0Wn#_b~5@sxH_MD$kv-p(H9w_Hl9ucM5VgQzhDkGH_OgZBvEk|l7mmDW zKPV3b`?}z=WVwM}rmtxwB#x&|oVPofJYl;IDU+3=PvE^{Hbi)BmY{pFRj_KR=T$9r zbgE@do=qdC!vBMn|E`ANmDzEU6vMGP9o;OuWXsxl8S9VtMBL2CSQ!R~hhhgVzQZIY9*y@zZn zsiP2(3g7s)eDcTD@A7-5zWI%ivBK8iBh$4e?UZHBnL#@4R$D)!*zK*a&}?SXkVIxRb6yx) z27~<8ocTL@)##sG2%g@&t@|mT)K?j&8as`@&Y&Q(Lh=|*SZo!tFBq-6Ek40AKJ>=m ztkAK=AJT6p_}e;4b&Ex*Nei`dE4EGFlEy|mbuNl(XP%`olz+ATbfa;-cXzXYPlsVx zXgEwUX_aW?Wf~lcD%lSz1<2%_{$HF{_0SNU#T9iJR}0_GELt&~Td74vK?7p95RFD01 zdz(0oI$P|nwxTm*r(=31dk*(tzF|EYk<0%*c@Dm(+37ShXB6H+YBWG*T`{X#G`x}z zFQ@`Q=YV~hROW=$NL=;fac}WGg;q+GB3BCJ;u8U)4#Vqgd{0A+WR4AXnn1AVWj+Cg zn`ze)Gjd}ct7S@)Lk0eF{&tOxzd~fp3~enq;;)i%F+{^AC`^%^<+9IZ4-6%vFX}w} z7OvHjXay26uS_cJp8>w!K`IM>T?AwVL65~$XBH@v5OP!pBf?yK*K_Vo55MSaHw2p4CQ@! zR2NDoXfY~RWU|t`csNRZ$wH`(H3=>q@_%uMvESFN{eA*O8wXDCHSlD z*e?XhyVi#q;4gAL_%7|p3(hc<=op5%-|kqYuSg1LS1U;r()nLOQf&pJTvw-($d7C3 z7&?t}ExuKLV)5J2B-~+fztq8zcT{Gro{R;myJKNj2-kqNAwj#yxFGM4qd;(aV^$}S z+-(Zj!N)d+YefcIk5exr{@Y^iSWihhQkJ9(qCea%Qifz{-v`HD!qP??pdop!+~>&X z(5d*=tVR0{oe7q(?}OU{Hz7H+rCt`8%N22)bpw8qdO=2$XZ6Ao>|TDnzWRJ?2tK?~ zI#%hQi$|rS40|3`Ft+FfnkNeu$@dV<4M7X?+PlO*WwjvC83-QuYwW&MS7&?8@_*?7 zSt}3`7nTGG8B^sX*p+f<3t20ZI&3s@IT}4Hr_ttm7`gU;jlTneZ7t}}zApTB;JdqX z1i=m80&zVq2v0qqW}DTkS%kB;`b1==raIYyH zXX^McSBazua|ioc*+;9dv{6kbRpW0%If7thM~d^W`tQsXNooXkpkDi}m29j(!9hMJ zKI_t552th%{&w6}S8UL4+l9XXJ#>ky^o#VAjlfJ`W*$=wUF<`2F zdoo>CPQ8$WsX3_%K^F<8KO2J9l0vtKN}0%q{-og;tJp9)5KvZ1W1^FCK~lEhW6haq zZh&ZTts~my&{fIxqgJ?+WJL*BCw#k*^IQHeXB%22fBL$_TL)6(sCpX~x9`OBmOBS8 zhLTB`ruy9KY#^F`rrdTtJ*iwx=~%Otf|8@?sDZiUhgfU{XCVY+mh_W}ilgHe>mNl^ zNn1*7)*2;5{k#yia#p=2Al7dmU=u|*mV0w0*YoC9LzO92oT3%SvSzWaZoKa3kHvQK zbi@+dIH?t>c(c*QdGf8y?Nu=fk)ugUSL5eKUoGIfT(JrHCQPvQ?~ZPih$R+Uf#BOi zvCvM(5(Kw5;dD(j4|8K^5A;BsONd^D6 zq-LANzmgy=6WX3>@RtbDEc@8%^Cx59rhANeyziII?;7QaCnBtUgMMm_9h{O!0DD%k zEq5DWPU>lEea{E6^q()UKdv`FpRN9NB=Ei4TS*GWl-3T>mVw23rzJb}u9-Z3^`+JI zI^1dr`OQv_teKpb#reTs5TvC8bY8an7md-H!E*ZJD6KNZd4K#CpAesIqS&q|u={ZO zbbs=wad-TH%_cmRKTA}%^Qo@1qfemn0>MjWlmyeTo_wc)p%GiWS4Y@oTY6QmURy!< z$nhf+J3B7&rK(r|)zC?Fa^bHH0Fm1sX?Q}9))=dpuS8qq?MgQF726nzYnNy3#Y&pJ z--!yDeq;GieR=rHXdr~16hn%Xg}dK@#!0sS2Y zNu5`-5?SgEgXxkuODJ3}c~jI!ghv8Hg?CvWA&zZ$V`957$)hd6*eO3fM=EMe*!O$< zz5eoNk9pN#9Y1O-xc`P-ho||M^o(I$pzLM%WRHn0ynH%dKOCMt9c%GuXXFNYWbWo| z!cTU{QUX{yf9LZ>5_L{%4}8pQIZxwnMTDhZoFIDsN){HFOOlCFv&^BnEYZb@fQ3{& zNFg6qCx5@#d|O>QElQWD>WSsdXbEW#M#MSDlD?66!KF#ce~gD%Q5!g)7_n8JlNNbF zy144{ZtMzyiH5}#A8DsBuFhkR506!oTvOVUD)`$F>=hFxY_)D|&&83udL|UJ^mEWL zdM5f;XY?IL%doGN zUmSY&2!}w%S9A#G3QtJ~Yi&ENk>rtzBv@#zlP(n`7a-3w(Jmfu>OIZ?jCklhz5^DYQ*Y z8MYZj=04fSgKjP4MO)z6v<*w32>XLer}s8WZ@&8Fc>D9w^{-l`=v+C^5>hEZEInqj zCY9$^F*%vf7(7@?KWy#j#l#8xsU@Ux7aOPPeP>t&uF~cnWyI{H}7PCrsD<*wm~HZR0OK zb=?^2Wv5g6EUm3!pub8}eP`sfO`cl*Xc&{6ZR!u@W3?G1Ev`ux{sl{-1c-s-AaapT z-Ax*;)hKMto&8@>7`%pp=Ib5dO^i5Rv*x+K7}OS8HB?m`M}0X|xllH3m)>_kr+Upv zf~M4|VvHe~wH5xxHFk0o{%?Woyv{-pQy{Jj!D8VrV|y1DJ;jUhwM|@aoeMn%Xx{l? zSvT&F zcWHnuepDs5)ltXnG-KG(IvZFFmKlhPf`*7L%mrn5%i78nvZJRviFCUWw3kKDf<(7F zGphYT>HlE^_$y_K+e#EJc<8ru(%_Co0V0#J5F~X+3Q(`VnD> z)>YtoTQ(_Z7Jjgbl{z; zL7aah{BwRwGuTv!_@cb=8GoBGGmTmDK3U*ku_nnfSn`-tMdwQsW>k4aTv|t;!C&o2 z$>_{dZf+2*l?p$ft2Lt9M$f=C! zq){RqptM+34A+YyRzyObF36&*sxHESgl@)imk zpg~y9Ml4L9cy3Dsk}v8BFQ-EfP)(l>S8AZ4HX55>_SZk}UH)?VaG<`CmIXb%u~P+d7)6!+63yX;T}Xcgoosoo#^=MHMaVaBcP9_`z#tKD52+gWH;wsUY{$ z_SnT*qx-Eb-OG{3&g}Av4-EQHfxP&(tWxB*7-yIFXIC1dSC!$9-WRN%&|N6?xo*HG zNtIlAmkq)B(`+`_SFjCb0NGeBLQTLub=WmaidT6$;y5PAs_MDM_``Cl=nu zUmsj0`4%o>K=j2znq(nQBkOhM41u%4P0$m`LPD(^+&3ec$;myIoLW?~wviV3)k2^2 zz~pnGZ3(8szQto3E6cNXVpVh9*-2bj$g9CQSykb;tA{bvMtS}-+b(RCxT$Bk@#gjKW!hjzu?F(zp?Miv$Q62 zdV^5Yq;qQMy24+9old1x?UX)wbwbXdA3H|Ni+$wAU;ok}3p#T6$e!wrbi-cd$#9=K zU_xp?ZSWVHhkGlZC2n#(lSIb3rPj$y1X=Hh_nw>gIfvaoUi$bsekS>KXqD1hc2_%@ zB(@Fst9oG#?MKy6FX`-SDxXmv@OW^0x3|6euoBg*#d&X)bG4_! z1gm!)qalwbPEEazdMSAeKA^+*2q0U(vg8yE!wJ(%fAU7O-2sl)@P)rf^t9F$a@TU5 zpz~y_YbNJi`;AM_c)0nb?0K)bfEBIDmN97+*z%|gm|lEd-9s(JAWHez+%o?i+Y0G+ zUygvj`><)i1A}pT2%fZs#S8NGf4yK6^I!gmVqP*4HRN%7Oi^Z?#L%4olA+B$XC}hS z78*CVq5q-oZt3;%S)YBh$D9kqmggTmA~M1*BnJQJ0nX@aZ8gWS7#Rff`BbS zb$kvZ?wqfXmBAXm-9jh8ZPdn2ZrMy%NkwBd8dYujrs7F}m?ekY^`ekBun02#RTfrx z)^>+sup5XC#}f*>C|C;=raRW}+lSvS?;C&9Te32@wMwJit4y$KFS?P0XKGc*c$a{v zWo!9XU45@<*$R2aUr@k4at^TdCD>w&SrH8owuGjLwS;{VeTgQ-_?J?$FHEUp^7;y$ z>4%nX7|bgW5DvEQD!z8l5F=yQv-0fbcvG#NWXD|R@d;VCsEh}nr|;aq1;CY|$jG;{ zAMcyGB$)77pS0?OBT{0Rb*79!nLyIFnzuFK8?5!G^>E{F6l}7*Vk9VGAW`+j0s(D# z+S+FLwkw+=8n~>BTJF;kWX|836TOEHoVLXu#$rkBp?-@WNU`PC`32K*h!&q{@}Xjn zjcH)@5-rheTM>O$V$on}5WU&%UtS$-Z;rNV%E6$P1h1K^>k;|6Wz}Ok+BrC-<~f&X zk&#F)*oyy)cU<^OZ|7{0R#>JcmgjDe?)re9n z8hcUWq-qQFVXCp@;i!p)kCmV{^`1P!C+&McxH}M}mQNkRxm`!=lj!R_$NQ7v}12*&)v*eLwJ&i5J(3jVEs}r(a>f z%Es*;sOeqnl5Zqf|FW+8Yw25EcWjI&Xw)O`-#*ZBKwwzw%b%kHAQv^UJ>Kx3B4e zZ>8-})Et8*WZU>lryx9$)CH$6$M*B3#`XT@Mho{Gd*BP(S)QyEPX;f|EaT03Ks(+i zepCo+4p}ZRL*OgFSfEJ^SIC<|J-!(#KG(rIk^#cpg>O~t zcIzI0v7^u4N#`r^Jj+-IMpejp{QPwIbocr0>etKdFPqIz=c|97D(A}XYOqqO^}Ash z?I%;SZt@?oJd4Ge*HAc@=5(ciC8F(8`KNlz1fi7ig}lpov?`dYX5Imf zGnMGNO4lP^ZN0Lxf6O0Z3Cy0aG{C^tvSah2U(bZ;uY&an*~3&*1)LVRGyY=NF>SEf z*uur(>CDnCXcp`aPgZ||+U4z%V}Mp64w9g-XX1OY_QP~zFIOqhCwzS zu3TEyF#7W*?rjQ-sN@x!Js4@C!1H!SYVM)d`MrL4zkc|3bN_y= z)*;>Xmhp7rzYN*bPBBQeDyX@pmc#|!kW=(|;ldO%)#sA9ozKC_)Wd?Rc%u$U_!Zkp z@2o8YB9@g6vrur>!x_wHJXRvJ1ZT1C@SAX5>ng>P*(c>x%&rkHwlf=4)iVJlz0JY! z;l8xMbmb*E#{DdsFp@PRBlk%}n=^o|vpSv6)4pD^CUo8So3;d|6{GmQZTC7WN%v#R zCkz13s?Kxv;yky?qh4I>es0F`V&=orH!d5ap%6>A$Nxp5hhEi5bbOT0QVY!7U|va$ zM6rlHvr5tacix8-tUL%sHkDbVAr|`PaP38Wj@1?4mZ2^Y2u?26*_%|**YmF$r9&#B zBo3+wVLZ*^nJcTx|0-{nRXe+rb5YIuZUMBhyP`i=g557l=JY7@+cX!F$jxxfuZ8>KdrBSIa~ij&8*JvK5o7o6t3~6c-LR# zDfg&;g0AO7^pFtLj#@JHCtj$YFQ}i z3|S8=vmGw9HHjk{5_XGxzNMWEEK+L>X4KefqFNZ82e^zPoMD@@8n3dQHAbA*Fi+T&yzI0?1gLKrpxQ*X_AvsKjESQrO-r z10)?OUfLPK^B&Xfgty;Y_YtLk9ZwHQobybER;hpK z!Pd$h_Iw#=gR-b&UCT1}pUF{Kz%+QOuLv|fMBK$?&KpcEA$Z-^rY_VTu@)sPzPEA? zH>SRIz!+uIj0#wVY`1S3ao`HiH1}fJpD8<&=)pm=V0x zMT#I&5TRlRXFJb*HVRL}V1v%1!-oEa&#P9G46V3S_aI{+27tB|Yu-M1wZZF$x9y!` z@tUa8a##CGcQO9fdxs(H%o$A~N;_>B<67EOMmpwd6w4I+tq=!6GxXrE{on-$SmTso zM8(q#;8t7}h}3ze)Mf9hzoqy((%l25c+G{>nh%m+%?))2WvK<%LT1&!Voq47DC;Vs zbMzIDpek4fVqyc}9av1_etOH1*`jCgzbN(zgh|`jlkMMF1yDMn>WSzC=oucH0R%^* za%S!m3OqR@XP6`#^cG^)9E>RR%#Wl z!CcZ->vw?2or`5Uy8U8z;i#BwuhHO|pq*?ev&zLJhkq^<-tm7OBj!BqcgG`64YI5b zPtKhxCr6OSRW!pj)nj%}`48NKsb#~uyY)f$btQ_MH`3)d}xR!q5ylIV+d{rLN}moMy4 z?*k}LEfU+f-U4MW;HY=4aNE^KEylV9d@Jt2I~}F03PeT*nIX*!#bAz%v~Wd2TXh4w zp|4$iAZF=3T_P>KL9QLd2?T?8bh2XG?6)h1G)OKQE8UX^#kb&}!8XICidgaT!TL-x_(Xa_yG-^Li%9&2A3LOg>V^dGsqJnEaN? zilj$ncdFDcLf%XE;|&pk9Wt&RP#8VX^MrV!`LsE6uRRF!bzMzq`cCuE1D7F{k}~t2 z_MLQe^J$~Wsafix;3JlhyLmi+a9@t#Z!u|Q%qvjkAdW}s8fJVHtBe!s9U{NYI8FGQ zio&qO_{fdF+US**2(t~tEL{}kV|Snw)a8rf$dY1dEg~sGJ$X-V zY4ur2;HT?fjyAs@UVlE-{I@|H9}}yx-)6mNqi5>T)LYUcm3m-qS(Tb&esQsa;)i1O z^%mq^u$4F7+j@^J;EoJ7__}gI$f&KXT*;UA`In+sEDxk@07x+lBlWl+?fi(dKOWPc z_}-q*jx$c5&Q4W`bborGc!Wa|;gAK3+QGxc^Q~>AE0bvq!n)^1R}-hpNk4!3+ z-)2l)w3~I%uY-Sq!fZi#MOET%Z4Nhxf$LXL8{_4y+9_)ra z;Mw+jaZR5Wom5@_#6=)|m5UX7I1v0r<-@5U27k?uHRcxDdKQIC$b^Iv+Q=DN1l&VaL-yy`!Ski=519gy%iO&*)r<=Xo`Sr!P2wV6 zh5%YYRUI>ApJUp_rcWJB=OX@~Exn+V1ucAx+Z*I%l>}JmX)8L~zH?&DJII?3&)jE4 znX@RNTwRqNfw|mUJ(+V69oNswZxPG)=|t5HU7YSY!+S(C7YBm(*VrnxFygI|JLHRP zCtn$7qdih-tEKcbz*A5Xq+naPIaTQjtO*8?)?@8f7q}bZLj_aUP{yF_EtNrAi>iKvFdM)eM3?6S-___!5 zlCC2BbvFLq)$ND1A^7v9P*`1m;S3@1umR98%vI@-P+sB3Cfj>vw`yggwby81?Lw?# z0>r{o2`rv$anw^Bd320)4s#k>a^_B8&N}089=gmLq)gNwg^y#DpbNzR9EtdU_#2krt3il+^tH4|k%ih0f_{1-isZNpoh z&!e*qPnqLq)I+!TCzft-le^ln!xQ-h}3Y|r-a zz(kNaW(Rre*#fx9zAhh}cId2vgj|5msV1nEiyg_8VTpmdc)sZQ7DTtZ~-8hsnE z?)+!02x4>7n<_+x35n(;27$M$pEiN-aDj{Sxj#vLWIQiSh^MCB==PQ|3V8qM>F~6O?%EMI#o!)!TSj!`tS<7E) zd^>&8!2f+a{-Qzbpm7cPRjy-8pB5ckmQl#-s|+hgY#FoW*S-ji`I9=VDjh`XitM7E zx2k$0q2Zj1*@iFW&V#?jp@l(pDqu6@@!SKE*OhY>U=m^m9Pv_Fx7R3Xpq%_xeX($J zs{aLfyHeO`(%|n%pvq3j>R`P|Y<)dZ53$F1*}~sKZ6YSUjk{|9MgPTh^)}p2$xijW zg6$#lyVPuA?F82Cs_W)=eA{pr$kce|(&XT8S0@?4D|fW&O3cNfuZC2%B0}{^&DM+7 zPI(#4NK3b1y|V?5<@^Sqg})#Y)TC9DU4$p((d#WqBOWiiE}^EBxY$bG)e#!3_3{cCYz*p-Ap8b9Q zJ@VjNp|X5f0qO{4Gw{?Vw?L`xtl9V6;E$#ggjGse=R1Ya5PwL`;Oy_@{-H zU1+S!k=4()bORdoe5R+$(?Bi?mn+ZBY2mt`hWm1*{b0+y(axen%A8ImTH$ZFH|Xi)x<&iTC3t*bZ!i-RMs!?rbL321f>ip;j)vU z3KLBlR~3JvOA>jhf*H;Ar^CKC?;65oNalFQlr^PpH~#FX^VP}0-}aXs`Q6RzGV;D` zo;7qTJsdq}O^=7;)!pGa%L~37Z!}&{wi-x%&5dBhuBI*gMJltcxKhMq&a(LaEzdSH znapry=m9KzxDjSW%eXD%fUogawzOOW<34^UmiCP7r`O-k)C*cop*6g!QDbNje6@L9 zZQd`nvaokDwsvnFe8JuDZQB@-(P(AjZ`DeRS&vvxZ`il9KH-Vg{l_=NqQDZ+*3dl3 zPRC5G_mCr-nx;&(eFk~i5}B=SxM&4` zg}>X+YE8MlJ-*SnSJg=vKc9zc*>jcIF1_;Pa+2>M`I2~o$cEA*S!n)Wp$(ZkA>dYN?5!qn+ba>HKZM*3eXc|Gi{)K7BrkNxBS z^|Q!l37{q(ffdidvUP0>X{r!>kBI_r5N;&L5P==f-}zeY8V4oo3_8hZ^cpC)&&zw1 zv!*Gntx#Zo5v8~D9y^;WInr&1<2PB=*C&fTz<2eo$5fHwkqNxiv2xLtCQBl=v;^PW zTGkzYJbTg5>vp|Q{7r{()fViOVF)HZ%O`Yzgmd0OQYwYRArA=7PQ`pDSoirGwW&o) zeH_(KwM{@o?Y1<|FEuzG_6ALb{oqH`Q=R9m2ja(4Bi2rI3z!bKCN$5uR@+QHmSmO0 zxGkCD-C2iV!E!uWt1G#_w~<%}8I6Z+@c)m$;HkmaBZVi;xk+Ww5n6qoHC`?m4=N7s=||^xR0tDudnH{RTCN0gY96M+uUg zR>H4l!V|is%z?NtQKyO~wQ@}T4=^jtK ziO8=Y>|P0_2d8TC{CKRa!t>{|Gmq1?MtQO^YllT7(Ll0h=@x0`36`orP+M7=3foC? zhZy!wjO2;}Uwl2+a4dLOxzP_krcu@YEYcT~pgmvis(6^3Gu@~LFD=8bJlw=y@l^}E zgVArR4Wk<0xe4Coz~2C_FYQr`x0hn!VCe+)w$d{+1Sdxk-6Cvd8jmf{4Hn3O;KYLc z-{3NMkyI$sm5Wi-f^lOst4z+7EK;*f%bg9507u9$;&FslbNH#L#b+~j>jS58M2@AtC@pL{q}9Wql+rz zXy|!=9gV~yO3_QE@6PU&nXkT-f?#!rmJ6)b&`NgIu=D7qShxw+>cjBRj$}EQSUJxY z?FoEqfBUATj${>&t{O9cMJzb&u$&Zao1C5=SmVLmB%St^DA$C+J17m7 z{3RiRml!)x>c#QpzdoM-?We>4`SxrbS6X%dYuXKRIw#b8Sp34A5~Uu8(>MW4v|ectgp<0_H`J?A{n{w%EZ_(cP7 znK?4yV#OspI__}Cl-*NnU}$z+Rpf>wM+psU_3Em1+6wLkzUnF6C~t1u2YV!_NB%=B z6N`Y_K8OG5EvsfnB4YfA717V^{6~Q0_l#Ft&gsNkZrBL~qS%0DDe7K!rn~lGP!kecT4|{wvM|H3eRzO@h zj-N{k#P;!ZjC9V?@wlafkE-|)a<{WWN}@&ac{{(Fj>_fI3MDxe7Byb5ZbLg|V<-3W z_susq$E*BF7Y_e%typkne9gQx2rJr|p(Ekah0YoS<-z)CYeZJ5(d(tsitNgLIoU|? z9}X`bk5*47XTjf#=d&K?@&}!a7x=JsE$c`SQ_>y3M)Dp;tMsR4g;jJ9=1EEVBz>(J zs+h=Z@b1t&!Doan*72zQ@x^(EIo4+&b}R%5C~KR{1Q1Y8t}%4IdRbi%nQGim>?vv0 zd@|#&9)e(@Q7Y4atR^dTcYX!TM?S-#Di`_?u`rN0M_T zAXY{JehNo|q--N-m1SYrO}<2g!j7I$*cXDyM~-R*6ClZ9M=Sx})cjPjimUs|g%*RW z9=~N@g(WEaIViBBnZa?oG{k6W+s2Fm3b}R848)~>Ec-;tK;CGuq0`LJepo3AeEsS2 zZa)yLMcCtm$S3kpL2*~YmU|l+k3(wYRnX@}{^`}%*X{eurPsUHh?u_?U-X03+8G-0 zg{fIc_Z~^H$F$n1fIgEP855{jEMrYr z)0kvkBRpa5D052+0nEvw-v0`7=Zrp_IBS*06yGP72KYPT1gBRvj;>yr+t?R54I1ly! ztn$1CxeC8_HDn8ge}cYJIn2p1V}`SBFW6AXtgdZhwN9++)u3klE!+&YHVQkxQDV~5 z-uLSD9gWiayVq3fxEQ zKVl+PJb6Zqs(zut=)c;QedvdunZR2v`)9g}-mEUv5oh;2@t@~zy%ls=yfd4ZzKvi+ z-7V-03LsG4XAaMI#oQ};HeF2hGy=g!E=FUe!=nr^RNH~GYOkT;KbWe`QYR{x z3ovP{ZXYjhxO%b8NeL)RL+9*ezX0NX`^!@WRYT_Q%#8x>blG}3f7WR0-I>yOvS<(K z9POYJbPTLf8;y=yKCfLG9JH-UVKT2cILCX2`#=0?@SSB-`=duI zcJFUDT8aCs7$-rn3}y{{;bHeyd6CUvFW&%jCD7E6U9I!id3IJI4B$qt=amXaHnmcw zh171QQJtIxvZj&c2;<>GYdalU{%^EI+3<-b;|rJ9Of>mQ+Cw+vEN(q{%}ixly3gd1 z7C92N_%IDiJt$V5pcChAsII~%bvULSM&zaNv7BEA3c;zeuZm(?>|UggV>$}_pq)m# zWe0?<%OwT`6IFO|qS%nv{GI%0Yk)@l$b0Ho@m^s~i7WBz`uxO4lzSW()n`~VM&DI3 z_Ok$DqcGYAq&6$qTE&`;p-(iL6E>4P*ip@1DX|=uOlbw>-SyY=ylPt0@0u)SNfY9u zopEyQaDH+m3Vg!_Da&1$TkLnTsU;MV3oKaoY2+!@yKc8A_PO3v{kM;qbo+z?ySV8l zJk3l_{Nxm+YCRvDt+GJ%1W@Vziq4>+G7S@SBCCV@3AI=Dsgj?!AJ(g1)|Vf*jlU0f zr;qn%PY>r^;pjwYwOppOKW=ZJ5J0SL^`E33(D7X4s4d=#|4Mj~P;Dc~rnAr_y*0?v zL!eh!NT4~MwcTxndX;){X~iP)UpiKTTp1zczDr9dKJgSv;%l|#SQIqgn zrIDW~Q`-4UjlX7w$-)9R8xtMREzdWJ1m(R}Hw;tp*rGyVoo)EhAqV_lrC=!?3#@!N z{-W_6L$0s%wc<^mU4HSn(7?xsXV~3x=2ZSR`05GknHQwC2IxOg8a9XKWBSQkxT+Uc zbt_%)*A(<1nelV!BEC|`m>%78V<;|hFW-+aKSXbxxS|x4ISUJoR7{lp+?Ji-Zz4nn zV!O%V?joCt^{^OkUh3?c-1#DW3<va~q8sHFo;9J{{N82W^RSVN)OMeFUh54I z9)<|T4$CxI&I&>a-K3vQKBqGz9FdceD%E+!WYN#|o(2ylJe_g8U1@u2ghx z+j4Gbb55@m(O{tkl<%huS6J9cP*5@}LD18qv8DngQ9cF{kaxb5iUlXDs%(s)k1nkQ z`}R+zf09-zREq!m*oVQu===83fEs)^>MI{3Cv9wEv^GU%a zx1=riGyA_2H2A+`=*a-A#@4J+sISvKkFN~mEzg(r+7f4nuM_X5rGZx;w(E~qS9=fF z`;WIr_gZ*eE3iMF34dQTzMQ{nJYRf!S^dT!KMIyiBB>$3&8U#BWE|NnAx#NVijQ0f z>>WB~l^7oWuZEW-&9Rvceu4)Br@b!!uCob$?HyzH3SncfS}p{p5)%vK zrI}tscJi-~LGVuh`f(t_e6eZd?j_q)*wEYL{>k_3-+I>iO>%e_9>8Yf4F%% zHth_8NCOMV(8GmC$)nJm5A;N|IDtRVY5#(9L?bR=8=hrZ;EEj4$bP0jA^pcg5+>oQI+r2A!!FGZ7Zf+0mv_eqz z;AM64v#-zR?=R=Sd8~f-SigJu-R`;*u9zqio-gY%gl%5Ex}1L_@pS#KDP=W~%rZvV ziV7RDmYlBc;S&_LN@?X#RgJ@I(2gFC8%yqOZ$Dg||84L1KYzLW=}@KjN?AlMdg?Zq z8bB>;^>(tar=f?>Tfml6KtZFhaz^dhmgc*8vqKWKvy0Ke@u2OGwxZ@t?vO$tw8q>iQLqTx9Uxp+uNbYEYoT8bifD$1N4qX6)1x znT3uQ%q*(4|Hyp`3Rj+0s)`2M;x_8fgqRD3LHWL7GPREH9*@$Z*&yyTEfAUWbWWEl z%glA?U-;3xel9(Z>?7!+n3viP{PlCd?u8jhmK~qy_z74Sx7P+$)~V?QR$j`TNxZ53 z0HDj7nf(w#CstZqP8*R5*6Z?hfurUN?Q`>5P4jr5;Pz0yV@NSC0>;l112sK<^v z?buhPHi1}b(5*3dK`ty$xs&!=D|)p>azin7(I9>t`Wq-~;FtDhvwC!gr^ zawkDRRZnZ+U(-=j`!NP8^sJww38N4XhD1yqKW*O_4%c5iuKeoK8h@xT(%pLR%6rMo@`fSaN$E&)qouWMFONEix+6 zUL}DHrcMXQs$T+UUH_#Qr#C1RcsRVh`FyGTv7^6z`uIQor9K-91A$gV;fFj1wyJ?f zQj`<_ZT$6YDT~OF3+(Dyri6p~7N3;-=Ix9p=`&1&3D%ykCm8Q;Qu38mju*_;8>2lZ zIdf-IbLUI3Fu$&@$a~M#IevVuw=Bm~(MbuT^vOVB9RTjI2A1i&qpPX`cOsT2$rws} zO74>v0c0-Kzp(vg|2NG#5++u3!^OOK%DR>;JzVLyt9Er&pv@eUrDs&7I+%42RiLC1 zZn+rm*+-^ABxOg9zjM#4c%!UBvXsWV8Cq0R(D}lZN9`&5_~Ig(k+x~n(&twbyM6e= z>H#Q-)aQl2p`*oPxMgciQD<0zm7p-BjlfZVl95uKh)Tyg64&2#6)@djIew0URXSF3 zu|V5mqLMFR;mT?b0#iiK%X6>3(##5^BP$Xwl22;8oA+oq?zMou)6yFDIQR=kopdQu zm(@38jE*`vhxz^HDdX|0mMF3Vs}5IG{?0uuZL8B$t8%w05i4k$r4~3RnOeG#AK=-o zN*nGzu4F@syp@d074@nF>EMc`%67&teM=rFX!YZkd_|(b%&taKmx-b6$C=~##&fKG z7skZmhdWnRB|ic(4KQWIc70@LN{`#%%(^`d9jQ@yTwzv;_V)dl*MfH)KCh!oRUL20 z*4cE2V~pFLp9a2$R(tojxK>=r=}Bp@$)!2Dp)H+fbk7FF;4fNkXg1AlY0e-cOTG9W zbW7vcf%O>V62=HZqPO)NV73FS$w=gh;pAdPNPk*_VrP~O5G#7QgdHMZm%(Hhe@;7P zhv>Ak-e$jKKWbCTQz}$uJhwfzSoN(X7QU2E_%9}$W|Wv2(oOr5GCA}B^@&yD$-Np; zu75ti`l!CpSIULHcOlY!t@5^$ZDOyjrQRW}_m#M{S|5-ulqErl5KR!=Hukg{3#>IB zaX?w_b9D*OB}$*L)Iy?WdglIt>9<*3yna1A`r9x2|M{=ee|*$hpL*TMNIz=qUS7v+ z1uc)AS%e|V?2>3}r|>uEhOMP@7WgWR-iRhJ!lR@w*6Zo3$H~`>_t%qmKH(}e!lId( z|KKwJ?fdtg50TLhzev~hG;W_-Pori+l_vFJo#0n96FWK8vh&P=ejWoUE~=N`nBu7(@>FNmxRXD2zI zY&_1=P&)O<_~ZE3a*d-NQ`<}L*2?O)Q_Op5=eKzBa71>5_1_E~+3RKZYB)o(N~YS% z)uR^L^7^#p$#aY}kC2*XHN`)7R+U~Z_zQX}KFf}AxoR7&ljCBz+R>xZS{CvdM_4+| z2H{+hG~us>(JLY`mPm2Ma~oBCK>T9mu>ir^3VCrb4Z&GfiEqUzei?^prwf0QoIJ&a zS&fLisTqawf5A#HS6(nX-qI1e`s&#K7B#wCnd{_XHzIcvR|Cp{* zcwrT{oz~IguEcdn+w1lmyG-k?*Cl>U-XQp^Omaam_+B{cq$ZbJ+NcOhVjI+xiH(Zj@Nlp#3mMBFj0WZj zSN)X5u#d0UZql2Hq%SCp*45VUG|wFTZA&%LI#{g43wEC8uHM!SBl|r!>N)>+e;Q(9 zy;sFVZ0r%a4)s@@bEcN@m-FvzQ=LJMC@2j6&KkJ1&Do>6K&uz!<*zH11FJQMT+7EJ z)rUG?lx)vo=kO3re5Dtqyx@4Oc)_%}qsI<}n6s48AtM?Ubs;IM91N^$azemg;rLiv zu>Ie}yu|W}HrJv&S~=qLg_bco`mdk&{?|X0d8Gi5^mAUpg0tlXPplCr`F0}zw;7Vi z5wot0JZk1MD`|+j)ueiD~BkQ!X?l?c_%rOFG83P0B zHC+prr>x;SUS zZu`!TS(IkZifNLIh=x14jX7*XYvl#7eaSd-6cf$L`b8socg%Oy&?GR22! z3hln={<9*#EVqq2?0ja0Qw4u6|8e?OvDRa!P`dyT-_yOP=*i3~gw^qK?FN|SEvqRO z!ws^wuM8dB-K(7BO!l(ExRu6y^@ThuxN+=o@pUzVn7Bhry04B`+c{L-C_|7Jyel6t=TyP3YksQ_?Q$gFrMi#|u6i1I4qV-i zo*^l?6FhA`*^Vv0NK0JVw~hAW>g8Xj9|-4xA7;z1C~lS<-H1c6dFapJpdY?oK{66G zKLfsW*zj{Bn%q7}x9_-+2*jSOp3lB9uoz1QS>tYZp57Ggtv4P!0qNd-GUlB)W;_`~ zz?qv2fR)JmFu$|o468Xiizfg|4y=^^3Hj`WgEi$q4d$MhXOGx~oL|aE3Q)|m+EH#k zu9cCcq$VrmBx143ISZZZTUHU3U1>}-P+pW5{PjW^Ccojja*UexxNpz)G2MG4@2@+@ zlaraGXa$}iqG8YU5~X}3o~FV)Tx}s!*$R2@>gLzg_NSAR|NQvrKmMiml2D`bv#qB( zVQ6`#N(MN2`FirI0VbART2?XD>(1Zk_ktCT=bNZ&X4DA9YNwuO#oXin;svwC5^L#> zC!1Fj-8*|gH5sOP3;xEaP!+tp#(i0>pVw+JxAiD=@GAzR+d^=M4auXkI;jxMR!gqD zo-Fmh!s46LFV`new1ZwA69*Fx?{zU29f0jhsN>eWl1Kpy8cZ6jbdz)AvVu#IqZ}FYhxHJ zW~=!xGXR9eGXG2b)xXdZ+Un#WZ((k%IQAQn4b9lVYZ3++9RgR<;J}`@P+yAx0^?`-B4su z_O{klc{K(M;(*y*sSs@^II3 zOXC;{b7zdGEFep3Np9hwvymBv$gz!!RqoaCpntPhyB)+EEj*kYi?06i( zU(I~8$DPu^PKghOQx_$H;N?dV2vjtvplAC1ev^-An$xvy0<`Q z*s#iFg+1npl|AM_+ejD5NrnwpSyfqWfnUn`iw9%9U}(wka#@~K{1bH|FS?_{%ESDs=2yx^L1Q4= zb_4>c7`)uMxdlw=t4bEHTnO{@L6^wt%N>hz%V~S#>U4;ZoSQ9Bjk$p zkilPk@RS+C=ZyYx-kInBj?F-(T)2+%b}Kp)J1urQl?&7==Kkq9-@J!WnDvrCI`PTPrcmm!`ZzC$8cs{1?oFHO3fJoOVz6unUHuJ%yC|qqp?z*H;nn$1O}wm5w0T_k8+04{g68&%@r5N^ zg~bBCF@$#@Ag*puHOT9NpsEQf#+V~_Q1~fPU`mQy_A5Vk8IxDbRRCWizjX5`Z(1$0 z0=~gt2ynH$d(iTHcFI(Wu64{mookT=-XK`nxak)MvN&*wyJx>zv%rjKYb%!q1?8 z>Q_|vi3Ct8qtfx0AMsbZs})yeT=yJqDxloM5X*Wvt1Qe{*Dd5~6)Lsn46)&L`sS%u zD*O%aT6zy$E4NPSw{hIRvEB~WQDmWLoPHX9AUmiU`HDB)Y_kgnIS)^-?&SQdXyL*# zA?6fgTCf<+85qTHEi3)>OF8eUIU4t@w=dA-G4d|TZpj&T@z>hE{a>#}T73`bwwE{^ zJ}*y04_4}E_MLgu=qPDC>A=gq)#~Hf)!zE%;F6SAX-I3Z>K)`v(XvL|9R7jWQ!&OW zuQ>l|)7{jWdu~fn2wn}cis>X5`j7Q%hB+(jY8WR;q7+4mYUH%oFqDRDeksN?rD~t4 z!^Xk?{`uqo^V8n{{8gO!?$23hA|N>1GBcdcI$^Q)lh^7=C80~=Ryxk5c%h#ERI?yI zw~RVLWaPIb1u5%^tuOKh9U7i&>AZsfkFPgjk}Nr{^?scslSz6qO3(NR@PQyeVj%!x z2n;X)X6c#Ut82~5%G#@|_nv{COutd*oXVecQ^r3SAZaIaS zUEnPE3-SssvuaYHbI&1R+!@O@_GE^>Xex6)!4g-J5Qc@EXK4aw4l9&G0cjPZrJZ6m zP$*s`G}*Q-WT26+>SQ2z;ZY!R@$+#0@aNW`1H{E!%vF$GJ5u_U@%Q*f?e)pETdK+v z+4yFqSVUQqk~o||M5^Dg6?C==IrLe_KbNeV^AU6vN1-MH45^wb4X;^IXu{tm;29up z6!t<73F2}Z?}ER{IVv0Su@+^}GKH+ueH*p+O;UPYBTH1Q(+b>Eji>y=wHAXTv7!2L z2h7?7d3C4RBeHt=da znmd3{#lxfP=}a{}H@+~JHI9N%SyZLlZgD4`>un*+mfj8iE-Vz(;EPIgugGUhoAjPV zO;3KWw0>V_gjmpLTdQS7h6-BX;KZwAX8`FE6FZDZmU2UzWx>wBij5rkO??y7X7=N|66i`*vqTE*RQ3Hksg(z8I2CsdieHj{N*Oe@J*gCIUwl2{X`qqaGaqvV5uAI z0;m-)vZ&Cl&dC}g=R3!%hr3#>{_^>;Qj^q_7~J2ObyN>L(qgEj_GIb2)+uI48%4{4#6>vkci3I#F)))9Sj)6SPi> zeHe)p3c~WwI5t+EyYORW%6t7UD&e%(CLEyXC6t1VqGm`&KS5O7s>@N{TN_3M(8lQ01_vLtwbF{l8k4f8w40HR ze?e6=-_~}Gr!-$~TlZ{^CPB$>qMI0+l{B~8!Ps5LV>UjIeIasyHSvn=p%xGee@Co! zpHt!JGgXE>8;;xJ#Ml)jww$js;o&4DG3&4@!}D7zGq0XG54;GoTTAs7LxP1(m$zNo zhHDZHX8eVkv{iFzK&Qsf6ok|ok~E{U+}n|mh|1a$5j}olWkMCr)_QSixB*bk5;Ztq)#(hv}VkTyi`43$ctxFHpz-9f(OoHI`0<_-a~Wn*AN_GSVIpj z&pNLV$_E35g}+naF-L|X*??8VJHJD7i3k8<4sNXMu=;qs{;)dLk>(z;Z-cvbk zqiLySQ+-Kg@OFA&lF6A@A<|+I!{!#X6nTDeX_e>B2_QBJqPkj;!1|q>5eZ9nowUVO z@5~%lOsv3n^Yrjc7M{J0T1qt7^%euWJ?ZGbHprs_Yaz(^qzZ-O5eAE8@*3tZqAZ*O zJSF+sC{2>lU?x(8lly>OTKr1=BZs&mV-;t0@cE+$zxnL*U;bX9Vp6L|znMCw5!V2v zV@qN&BFFL!=4#?aGc^eyTQc|D2^l+mDf@+VWfN{dS4pu%`)tuHxY^QXjM%XeMKjWU zA}WmAX4>mu)B9os|DMrCJ?QfweblWqK+Dr|JU$^fxru^MxU%OY57 zF*uioJ3UvmDAhV%%gO0HMc}K~AR?OaN@x~~1si$QS_W{q*R1*WK3EI|WxowdGAlLB zGK=n^bMoNkS%-hBXH-S+Oeky)2EvYQq*>d#;U{>6(UHY@AdLoV!>*7K{ACOBa-4nP zFQP9H95}WgM{@77USZ;*Adn*kujlVNfvBRt6fK5UD{M8?3R`7fPD{9#bzU0(8hjNUq?&9`jxHV^Ddz||Iq>R| z$P3Pf_ljx~Tvw}HMM6%@QfM6X1c~6lTeO^Wp#8#*$?nQkbv!XzKx(09Ry7?`J>kdN zDi_9!sl`)lCKJYc9~{RKRi%EvfA;?1^uyuVhr^BAflE~*v2UZWbrbc>vflA@bFQM+ z7H`P=_qOoY$}(2&AZE{MLq-FiUAN3kG|)l;@VEFCaaNHkOFsqc7dtk!qn-SI1o#%; zZY>f@^cLz1`Pg;d9WMJrD69A?96O%zE$h8`yEG2hry?>W_FF?qv`*X8($Lv!CgiW~ znefs=;aCQy{pKFLXE;N2G2%Hjm#i05L?|_g%v%b7Q;Xy6o+_TD=vlAvA^t3tO-%Cq z`N_!>L$Gw=%iR;Jz)Bt|{TFYS#-e%;=74V(S)GWHIuiVCLEXu}EG~L>0pq(g~wm_cXWb7wiLdFJdjwdKCLi%DBuz-XDnvU>odm* z>@h zWv?Cq(r3g=&tIW8YE6@w^=TjJ6#CWHKPUV}D?v*w3Y}_zqoaYs16%uYShu$B4KtAT zPYKs11Ti$!CGj5W!>t_E*CAujx9RQJ%Fuk-f=8f_-UK_L6c!up7hAOP*D8Z*DL>mB zKRl|{fnS`;5iCZhTiMxt_)bnT5?{;cpT-&&scb?O36~Wui)6pL|fG znnxA~|0wO)1u9dYM+q8Yl>&n67spyz?D)ac7oUFlmw)+wBB)YM!hAvz?H7TVB!$jGzax^)f6B;IFnK zLuNH&(c3(+pOt~mXvUHTH?B9VbMkL!2YRHwibMhd@sJAqwtK?;Do5XAUEr&yyxd=_ zq2~^dZOk2x!t!4SCk@1@x19PIDdbWCQ6M7-4*p65#ZgUg( zwP4OCZ6Sth9K>B6E4FCxS3y|>LPy)Rrx^RTlxsEeo9^hpNv8^L4~G{w(BKMr)ig6z z?)%+J5Nr$WyTc>=DWI4rPFf@!wd7S+>tkKriPAN8X=dg_@zf5yrMC2cMKz;AhiNl* zL8ThmzFRJIoeEii6{?NS3nI%_{Bd=uhWl7AtR0&7qw_}LJ!`$SmS^%`hT~IdvRb@b z!n^?2?ArC>Tp_0^-N%W*#5ksYz)(Pk_$P4=bWwPSvZb6b#oMkmEu{-5ODHoO2yUB< zRbUBh+RFH8{7ofoY~A|s@GkccvI^XfN9*4RGuC8hhI#|#PgW-&G?J9ThEH@EnaQnjVy;SQyNzJn~F1svD>iL56v8CtEPLn zbRr*Dk45>_HtKf6G{>X3IiOc#r|da3IJRyg)k)k^mR>o3H;?wuo~$kui1lJEEm%xU zT#lGes(l)K1;mMJ*0zfF#GVZJ3V$(zjjg&m4q~rxdbg)Zc&;2YJ8fgfz&|B1pyABGss?Y|mQs*a?ru51x7LPe zeJ#jaMBPJBSwGCNVdGV2(o6_#;fFIo7Vxgm>zD_@nTL!hZQQR$dda&=XlM@vP3q;jkTf*1S^1l!b; z<~S?fSvQeH(f7ntzt|z?v6zvUnb4BbGA<_73ed7V@9)h_gY7h@HAlq?%^k4GNTSwG z99Xg)Su&h?8v6m$1lhK&Z)hCs4Ez;`)DfwZBL-t=wI#kP^l@6l2(;A^h*3Kn87K^Z zT28K@Xlul{kB0|YKWS*nqmo8BixW1feIlvGK^vu~9Hj(tRRK&QJ=)Y7 zK3v>Yb)L_k>}#1b1r{bX)#)=$S=Jd#bFA1WIHYN|Jp$Hw7t)r2bQ(%VkS#LL*qZs1 z14~tn@;F;|!kP+>3tR*cjk8(mR9f?~vXz(R9?-#=C!iIm#J-7LDs2jbC_KXrT{xC! zA@93AFCl?+fqE;B4ceO3C>Ju+2mb+o)lu8VYA*qdLBOd`vDvyCTZiqw0^>#)vy#!g z>~GC*m1XW^VU863CZUND;h{i39842lbB~#SAAhB8m-5@WfwKx+6{i`mNCmUCs`z&J zWKm-5jmQ8V^h6y?>nv=I4^HnXO9F2QORx8wH0zSs(@Q!>CzqU7@w%@60G{&P`v}tW z%{-)lBUrD2^Vlh5&iJcwulv9=Z+1>EpRUiJ9;?X_EC^PKLfHlt!X&%^f3;O@dBLdt zxxK1OFxQqb%wqbPR<5fsnNG5(zsjG5l#Xm)(%1g^%jfH>NBf&EpFREeKRx*Qf2_ZJ zZXZY0n!6SLYUbPM!IBx1#iOzbx}>MwzD_H6*}YfdnFfe{o43tTqvuX_r5K(0-$KdM z(^h0xl~#7J(_*Ox>Bda(CFM;Hj55|`1#(1I%TZu`$q62v@cEJoatD)3GYVMsx>%?_zE$ObV}4zViHfU-)a|68C|Q ziWacWB`#v*4o^1c1ag#BvlIsb0oG;%DDEUvmM2p2F48IqWDQS{}#G+ z%^8km{mUXsXc+^SjYLGPL^s)j(+&Nji%arL5K_kFCS>l@WSdY1Mz zcGu6>=Z_Dy&Y=3ahtFi9zd$8%)FJr{L>r{tv_ljJ)E5ywT)pYqV2w@(7b>0{{Y%G1 zo{TbR3-ZqR3r~=l%e)0!=1lq#hHEUb&9@gB zzIesNx8cuYD`qYoO0)g+>G8?qqs^0*(vhxqHnK}A{6-&A`LnDimHJo6_Ij>%&_;>t z=GXK;FAaixoO-O6Svb3GW<5 zvh24)Re9S%Sdfve-pb&JHriWRCkllVVKr3au_M$I@{+}d=T|@}MOblP3c-rHEm*Ww zM?jf14UL!vaT&3XWIeXY#Z3vt&h2%>&a6>Dc)+o1vdVo}Zyl~8da**CI+p#fpA}P5 zMMpp#qW2&;k%}gN=3(>XMU#TYbzBo~FbeI=TpAq*?kb89i=Y;kilwK$!HA@7YXR1U z#m~p8YBAHj1Z_nWK(Mw{p;Veus@bHn+(up=3%6A53Sk?^l_SS{H?f`HJ+Cq`<9v$+@3)J{7b}jXix~p-vTyptoNq!)mc7&b+0^KdPVq1*mUt4%`Dv&VWp#A30 zmikI*O?+3|GkvRhTJqKI@P+>yfT*1af5t_~-PmfjOKs6?KYx0pfCrhQuV0)7e;vxg zv*0I~oxxua7W{>Zn}?tU8-**w@#M7%bSQ*U$&4kXXLSZ_$O2y};EHxWe(>z@&kuKg z|Hbj=JGakI^|mP;NnU)da1CAP>-5#uacaa9%UbI?3s`mg6QVKP;-FUv@%A=Z%d?HutVc(bcm{p1l3+h zzQwpOUt2g*=I$t-Wawl*5HnmFan9jeiU#+t-|T8Ag<=LWhb~9km4Wwq-#*^~wPX`d zkZL7YZAM=9bugzXV7|P@cy)C!mz2*p2WR89kAuH=P*RgnFiqfrwzTSh2fsOVAP8JEJYnAUSl|x|U1u&8#UTKfzKkHmKzkpa#{5A)g6=9i zDagxyZxQ~Dyy9%aj}g2=r!f;&;@?Dco2FGp4pnr!LF z0#k)iow?W!TW}dYx^Q36@1W?4g0|mzEty27&NreX7v6btBE7&0OSmGQ!IFs3ed4QC z*EP6^iY?`~#2ehf;#4+`_#>iQiN~swhc&fSU_GeL_q8LwNITT_-_jNfbCW$Z>9BIG zYAsa27ge`r2zG2{RN-c1^2t6{SX`E?Ge<;o=c?qBEF`igz~X!tS26#E{sVs{a_eBt zXOIQ`uC+WWV(HtA`950Ww*u57g0+G>SiNa!^nG?hBDcgXrp*cG?K1T&+xd+GFdq$q4A)nsWbKA9T~2UEs^X_r8sr$_ew`S zWE!1tMdm}9q2PM}K{SYDVGOa&16ulmakfGuDnEU;zir`f*mA6S;O{8%tQ|@hHX?_( z@bWm(zNjo~GzjmcNOq+x@YgO~(OMk4wkl^wMrKDilV>EyW_vAeO%MN z)#Al-H4l(?JM!5oEwRdAD7tOOR9AfuU4rsUEVMvh>BEJ>@%au4G$?!MKo!szO?CtMiYfqaz(A6 z>bX?5m(q(D3vl0a!7O!)WxQDU4Qi?O#tVP{7OzVBiufF)J8^r>aK2IAa0yFd{?Say z>NP46AK17VoJvgtyqOwDNBgX`tgzsfJTj1%xAk_WLAW#{KDeSNZb(^Sry#Oy@{W^V z8W`GfH?J*pu-}@LeV6gK<+2G%^Oxg2g=p zky6;&f-&qrV@I6RBJ>viCWtpa#G(;m2wUQ^sM)riG5Ym}V0w6XV{SZ&-OppNbpCiK z2j{PtXCQN+q%XdX_bGH9u6(J7(TiPD(#cSk27_6ZtBZ%Xve6ysL}2jk>QqfX@93&v zE?O-Zd*aN|iF`p4)wdKg*Z{F$Ej)c5Ow%Da>Th*b_Ya??v=NaJ_ZmY7Bo5)TQ{z{cb zPO!OKI&W3uk*C;&F4gcj7djl>UhkZrJvxv_jA@L2!N4j~?WjwQZ_!9eKGnl!p`qob zFLP#as`0e&dJJ)mqclWC1yA~>?oh*|IIC%uLr}Qyzj&0m4`XL@gr)@VmaYD^I?UQh zsYAv5XD0J92!YzyLAikSG7f~6795ROY)33z|BgpD`Y(EMuUfdw>DZ-Zw<^jlQh&Eih04CN9N!etx^URU;OWJSS z9M8jweUdL#f9l9!n`3*gg2m z&hc0K`l;29l2$WD#Bwath8TE#;~1|oF4lvJbB5wTxAOCDp>Q+dma>{Un9Ldu&;cH< z_cSLLIfxiV$n8#LytI`VHfDm(nMmW#RIk^|n=hC2wVrh~g5e%WciQGbjw|p7YM541 zs6A<=sG=42+zS!~c}Fcuj3?e^+|f+clkAKwd_a>>##>bxN^@j2Gc$ozHXsXvQ@~2W z#^m9aHCr=BcNljYm}}T=GjfBki)f`;3r6#$g7oa|m42GP?4Rv6LY-@;q><8m)1L?O zfZx2_RUJzWnd1h#0lO0kmki@R)Dq-gg**|D+L}j|Zi}0K8YvLA@=&bB%neyISOYBi1-Wk?GMfG^jHrP|cJgRJHXu`Yai^jgKBd&30YY z$T(@r$sQ{1k@+wpe|$;ox;+!Nh`I^3K(KgDCId!n-UQ@g+aON(D1Lw4(?-sap5b8J zRy)u>)ZGlnI8s^_k)aw=Ge+K+-9@chn;GRJ&lOg=IV-~5#zPf7D>R)%I&;P zGrdL}b+Y?8T`IyC`(jpTtC5xSYb0x6d8;{-mudX<0{3cK4*2lFK^8?iz1Y#ZND8MZ zuN>N$q0=10+O8D}?Zlp3b@Niaw(^Xk2#9;24%jG{tB{n0uma!fC+oAX4;1Ho@TW)n ze^Gho1IdR*mds431O25a4tcV5Rn1j>OGFM&=IV#(-|=*AtVEi(Gm zdy|hIj6g zEgN{vqc=y#Zigpnj+10&0+EfC)=XOujRiaUuQG#f_tt7o5t`~bqxn)1nkXseE&4Us z4N}Wy=oSkznsP-|rGlkE*~)nSE&NTK)r`MLr_Aui-vPvSi>q-C-@>>=yR4YuSEhs#XRGr^~y=IYSlz10wMmMLm3qunrje;=V$1cS2`#jBR zg1C^FY2HvZzqtpZkhNuj({V?|!+&iIjs)@*>TNbIUF?RN-j zTRl8da_iaY!+j+oTKSEhhzCcUO?FFq5*`9|*LX>ZtE6c$1gx5Pd%wqPV3v+Fg>SJa zcd6Yya~r{gnambf@lOnBY|4sy>uv%e@o!L&tiI&NVy{UNZV7%26ER*+_Er0buXJ%aAcOA1N{gdRQ8s82V>VBB*fwq{5+n8Mx>{_2HttuQIkCj7P72ryGR}xHgA=Wltm94Qm zc^QP16jEgVNGxGeHPf)JTk+k9f*V=l|ei`sr3jv=!K4D$pi>E4uVJ|QlC45EA z%y18E(Y^Ai>&6>oo0EXHY-edo11GkrAQ?-!-|U@ywR7~BN2|{tZyr2<^h)!zG(igSTHmt3VfT&cp_NC^ZLAv%KDPAD<2WU3kTb*~@L)5FMXy;-5>04d#}2IK}96JVkH5)6(1J;Z6Rp%*6_nk~XR* z>Mxqj(K@nu_x>zRJ8nziECQaT`J((f1gitLWOf$*>L=&d@dP=uk6-w&au7=x5CfM% zAU3O)yKANWs^*5^qpTCGgj-QV@m)5r)95q;S}7ZRMuQci^HxmWZc5*ulliMI)WOzI zQIqWz(&$I3zl+F4xv`VzDmC7medICnf5qh%NeOQS_*-^tYp*osCmaS^ZvJdwEe;bP z9>A>~_?2I^Q-Hauj)ym5Z)Xes$}XW-jo??dN(odF6!o@#;mNDQ7x5JuTuc*uk@t_) ztJg9NE0b{kFa`_0N)>%XGPLitDnGldTD5{u*8wap+}HiH>fpR`f9o4VMDU#Xh}e$wGO4c04vx`DpO&jrALW%O8@GBMQ-y(ln?5~9zRCYx$JGyXD{3g442+`e5KpJ%0; z*~qobcD+v|Mtg)gj*i?`cMvljPR*H2=|FjD+{|aT(w$@FaIiXvyw%UlrPwPvJJR=W zpf5$+x_0Tdwkj`dx%c7uJ)@|D>Pygn!QVpRSw}u{UQ)$EfV_#+SOS(Z#bvm0}D2ZeATr4xIb~ zv0I{O2uIRFS?w19N^I0hBr0+*$lK?~^6)(so(r-wNkYg)v(R|vM!Y}FMl7gR5s11# zG0X~^3{M?R7mjC{0d!U=Kf~x`LvA3Bg)^EjND7yRx|{zLd&$A#KLQ$bx4{7hOcSlG z<5C3Fy+N_WfJIR0`DwXv>9nQaEVuB^Ly*X6&Rya-acCnNP3P;LA(k0`&12zy@YrF` z^dR2zB=YY#M?6!no%}C-yFS@HFff`O__O*!mJ`h%F=w0OQtersYN(%udeLf2x1CnF z8~)t-+FlCEXpqi4p#tVtrX_Nh~NvJnO&xKK9?K zk?x@#{(|Ev^GJoT$O7&~3FEWVCGy7|Q7rfE?a|v;t9N%=A#xnSB2q+sZLw~ZuPX5s z7mqh5500LF^3|hX|3$`Ut*l*s^QyUucjJ3}2kwel3ocP`nT_J=I-$lfet*Chs|%Fd zb1#}mEF_KcTi`ocDz*llp&a7^lV^@KhcElERn!{0++lw6$f(Sgmg|D=9FKM^fdudE zKzzQvUx2r@guru={n+Y~Tt()Z_uD?~=rUrfS{t=38dTL}kkkKi=j!T(7edku5%^M^ z*(hvhgb%8L=FDUnoDn#~Ix_zgHLW;mw~FklRSRu@#(Sdaq({7PfEIIad|j!}IaB+s zcLaYy-VU$Q7-6H<4gzGfZ{n}q=TL|GNAK4CoY@kN4cd0Sop>>}B)9Z%Mh(3fJ_hea z=k2*PbDqWij3v4Gl6^ziQFT015i4{%d}Z;&9E~Tp+_(;Aw#hd>)9l;5t01d#-By5> z52|TxP&M+2(0rZrw|VpC)J^rA)eOE;lY_i7%Ga7@Cm&pBIt@G>fl}UCzfnfQF^S)O~`jhQfedjnO5 zHZ_5Tz3RtuQn=~J$X$|q`s3k^+6Q4`7nklg^vy-2R=#Rw`Q?dyj%+(R&F|Lk$ePmq z^0gMBs!6ERsMskU;@%*tp|o<>Ibrr{N1{T61Oc>#7T_CCdSy|?qAu87kXIif{rBjl zn!e6tRpWvU*0Xxn)ZmaDVPE~9hnnKbW*$z?xWG%{oVt}I%xE!K6rp0a>I&P0qyM_= zT;(94r&S-o2|n~5{1DxwnN@U|(EGrBK_T2#JS}2R7)WWp3;u!^rMWkgF$4#H-|oLx zQ+Zpp!s{*Tw-)qyRZ*t!vCd0XZ|H@Uf23L*SrqupfzV!%?%IhVhpz;?M&82;#7a2U zQej}Wv(0gvSezVmp+evL*H_O@&%Zfb{pI;L|MsUZ{`L1-SkUh8$g724lm1Jjbd5zF zw^6ZWNIbBay@@Ph_Vx(6$3|3u0|VmsDLYl_XyKu#{;VjznQt%sPjBl@g3Gg9WG)R| z%ws*U*z^(_=ST^tQ~XXEhz3