Compare commits

...

8 Commits

Author SHA1 Message Date
wxchen
68e8ba7901 Fit a plane to the points,
Calculate the rotation between the plane normal and the Z axis
	new file:   src/maintain/scripts/maintain.py
	deleted:    src/maintain/scripts/test copy.py
	modified:   src/maintain/scripts/test.py
	modified:   src/yolov5_ros/launch/yolov5.launch
2023-05-10 16:25:33 +08:00
wxchen
d3956cbec1 add compute_plane_normal
modified:   src/maintain/scripts/test.py
2023-05-09 21:30:39 +08:00
wxchen
5fbfed3ab3 deleted: src/yolov5_ros/launch/yolov5_d435.launch 2023-05-09 16:49:52 +08:00
wxchen
227162d767 Add initial conditions
modified:   src/maintain/scripts/test.py
2023-05-09 16:47:18 +08:00
wxchen
7045f42ceb Update attitude calculation method
modified:   src/maintain/scripts/test copy.py
	modified:   src/maintain/scripts/test.py
2023-05-09 16:32:38 +08:00
wxchen
5ee37caf7c modified: .gitignore 2023-05-09 01:19:58 +08:00
wxchen
5e8f1d6ab0 rebase
new file:   .gitignore
	new file:   README.md
	new file:   src/detection_msgs/CMakeLists.txt
	new file:   src/detection_msgs/msg/BoundingBox.msg
	new file:   src/detection_msgs/msg/BoundingBoxes.msg
	new file:   src/detection_msgs/package.xml
	new file:   src/geometry2/geometry2/CHANGELOG.rst
	new file:   src/geometry2/geometry2/CMakeLists.txt
	new file:   src/geometry2/geometry2/package.xml
	new file:   src/geometry2/test_tf2/CHANGELOG.rst
	new file:   src/geometry2/test_tf2/CMakeLists.txt
	new file:   src/geometry2/test_tf2/mainpage.dox
	new file:   src/geometry2/test_tf2/package.xml
	new file:   src/geometry2/test_tf2/test/buffer_client_tester.launch
	new file:   src/geometry2/test_tf2/test/buffer_core_test.cpp
	new file:   src/geometry2/test_tf2/test/static_publisher.launch
	new file:   src/geometry2/test_tf2/test/test_buffer_client.cpp
	new file:   src/geometry2/test_tf2/test/test_buffer_client.py
	new file:   src/geometry2/test_tf2/test/test_buffer_server.cpp
	new file:   src/geometry2/test_tf2/test/test_convert.cpp
	new file:   src/geometry2/test_tf2/test/test_convert.py
	new file:   src/geometry2/test_tf2/test/test_message_filter.cpp
	new file:   src/geometry2/test_tf2/test/test_static_publisher.cpp
	new file:   src/geometry2/test_tf2/test/test_static_publisher.py
	new file:   src/geometry2/test_tf2/test/test_tf2_bullet.cpp
	new file:   src/geometry2/test_tf2/test/test_tf2_bullet.launch
	new file:   src/geometry2/test_tf2/test/test_tf_invalid.yaml
	new file:   src/geometry2/test_tf2/test/test_tf_valid.yaml
	new file:   src/geometry2/test_tf2/test/test_utils.cpp
	new file:   src/geometry2/tf2/CHANGELOG.rst
	new file:   src/geometry2/tf2/CMakeLists.txt
	new file:   src/geometry2/tf2/include/tf2/LinearMath/Matrix3x3.h
	new file:   src/geometry2/tf2/include/tf2/LinearMath/MinMax.h
	new file:   src/geometry2/tf2/include/tf2/LinearMath/QuadWord.h
	new file:   src/geometry2/tf2/include/tf2/LinearMath/Quaternion.h
	new file:   src/geometry2/tf2/include/tf2/LinearMath/Scalar.h
	new file:   src/geometry2/tf2/include/tf2/LinearMath/Transform.h
	new file:   src/geometry2/tf2/include/tf2/LinearMath/Vector3.h
	new file:   src/geometry2/tf2/include/tf2/buffer_core.h
	new file:   src/geometry2/tf2/include/tf2/convert.h
	new file:   src/geometry2/tf2/include/tf2/exceptions.h
	new file:   src/geometry2/tf2/include/tf2/impl/convert.h
	new file:   src/geometry2/tf2/include/tf2/impl/utils.h
	new file:   src/geometry2/tf2/include/tf2/time_cache.h
	new file:   src/geometry2/tf2/include/tf2/transform_datatypes.h
	new file:   src/geometry2/tf2/include/tf2/transform_storage.h
	new file:   src/geometry2/tf2/include/tf2/utils.h
	new file:   src/geometry2/tf2/index.rst
	new file:   src/geometry2/tf2/mainpage.dox
	new file:   src/geometry2/tf2/package.xml
	new file:   src/geometry2/tf2/src/buffer_core.cpp
	new file:   src/geometry2/tf2/src/cache.cpp
	new file:   src/geometry2/tf2/src/static_cache.cpp
	new file:   src/geometry2/tf2/test/cache_unittest.cpp
	new file:   src/geometry2/tf2/test/simple_tf2_core.cpp
	new file:   src/geometry2/tf2/test/speed_test.cpp
	new file:   src/geometry2/tf2/test/static_cache_test.cpp
	new file:   src/geometry2/tf2_bullet/CHANGELOG.rst
	new file:   src/geometry2/tf2_bullet/CMakeLists.txt
	new file:   src/geometry2/tf2_bullet/include/tf2_bullet/tf2_bullet.h
	new file:   src/geometry2/tf2_bullet/include/tf2_bullet/tf2_bullet/tf2_bullet.h
	new file:   src/geometry2/tf2_bullet/mainpage.dox
	new file:   src/geometry2/tf2_bullet/package.xml
	new file:   src/geometry2/tf2_bullet/test/test_tf2_bullet.cpp
	new file:   src/geometry2/tf2_eigen/CHANGELOG.rst
	new file:   src/geometry2/tf2_eigen/CMakeLists.txt
	new file:   src/geometry2/tf2_eigen/include/tf2_eigen/tf2_eigen.h
	new file:   src/geometry2/tf2_eigen/mainpage.dox
	new file:   src/geometry2/tf2_eigen/package.xml
	new file:   src/geometry2/tf2_eigen/test/tf2_eigen-test.cpp
	new file:   src/geometry2/tf2_geometry_msgs/CHANGELOG.rst
	new file:   src/geometry2/tf2_geometry_msgs/CMakeLists.txt
	new file:   src/geometry2/tf2_geometry_msgs/conf.py
	new file:   src/geometry2/tf2_geometry_msgs/include/tf2_geometry_msgs/tf2_geometry_msgs.h
	new file:   src/geometry2/tf2_geometry_msgs/index.rst
	new file:   src/geometry2/tf2_geometry_msgs/mainpage.dox
	new file:   src/geometry2/tf2_geometry_msgs/package.xml
	new file:   src/geometry2/tf2_geometry_msgs/rosdoc.yaml
	new file:   src/geometry2/tf2_geometry_msgs/scripts/test.py
	new file:   src/geometry2/tf2_geometry_msgs/setup.py
	new file:   src/geometry2/tf2_geometry_msgs/src/tf2_geometry_msgs/__init__.py
	new file:   src/geometry2/tf2_geometry_msgs/src/tf2_geometry_msgs/tf2_geometry_msgs.py
	new file:   src/geometry2/tf2_geometry_msgs/test/test.launch
	new file:   src/geometry2/tf2_geometry_msgs/test/test_python.launch
	new file:   src/geometry2/tf2_geometry_msgs/test/test_tf2_geometry_msgs.cpp
	new file:   src/geometry2/tf2_geometry_msgs/test/test_tomsg_frommsg.cpp
	new file:   src/geometry2/tf2_kdl/CHANGELOG.rst
	new file:   src/geometry2/tf2_kdl/CMakeLists.txt
	new file:   src/geometry2/tf2_kdl/conf.py
	new file:   src/geometry2/tf2_kdl/include/tf2_kdl/tf2_kdl.h
	new file:   src/geometry2/tf2_kdl/include/tf2_kdl/tf2_kdl/tf2_kdl.h
	new file:   src/geometry2/tf2_kdl/index.rst
	new file:   src/geometry2/tf2_kdl/mainpage.dox
	new file:   src/geometry2/tf2_kdl/package.xml
	new file:   src/geometry2/tf2_kdl/rosdoc.yaml
	new file:   src/geometry2/tf2_kdl/scripts/test.py
	new file:   src/geometry2/tf2_kdl/setup.py
	new file:   src/geometry2/tf2_kdl/src/tf2_kdl/__init__.py
	new file:   src/geometry2/tf2_kdl/src/tf2_kdl/tf2_kdl.py
	new file:   src/geometry2/tf2_kdl/test/test.launch
	new file:   src/geometry2/tf2_kdl/test/test_python.launch
	new file:   src/geometry2/tf2_kdl/test/test_tf2_kdl.cpp
	new file:   src/geometry2/tf2_msgs/CHANGELOG.rst
	new file:   src/geometry2/tf2_msgs/CMakeLists.txt
	new file:   src/geometry2/tf2_msgs/action/LookupTransform.action
	new file:   src/geometry2/tf2_msgs/include/foo
	new file:   src/geometry2/tf2_msgs/mainpage.dox
	new file:   src/geometry2/tf2_msgs/msg/TF2Error.msg
	new file:   src/geometry2/tf2_msgs/msg/TFMessage.msg
	new file:   src/geometry2/tf2_msgs/package.xml
	new file:   src/geometry2/tf2_msgs/srv/FrameGraph.srv
	new file:   src/geometry2/tf2_py/CHANGELOG.rst
	new file:   src/geometry2/tf2_py/CMakeLists.txt
	new file:   src/geometry2/tf2_py/package.xml
	new file:   src/geometry2/tf2_py/setup.py
	new file:   src/geometry2/tf2_py/src/python_compat.h
	new file:   src/geometry2/tf2_py/src/tf2_py.cpp
	new file:   src/geometry2/tf2_py/src/tf2_py/__init__.py
	new file:   src/geometry2/tf2_ros/CHANGELOG.rst
	new file:   src/geometry2/tf2_ros/CMakeLists.txt
	new file:   src/geometry2/tf2_ros/doc/conf.py
	new file:   src/geometry2/tf2_ros/doc/index.rst
	new file:   src/geometry2/tf2_ros/doc/mainpage.dox
	new file:   src/geometry2/tf2_ros/doc/tf2_ros.rst
	new file:   src/geometry2/tf2_ros/include/tf2_ros/buffer.h
	new file:   src/geometry2/tf2_ros/include/tf2_ros/buffer_client.h
	new file:   src/geometry2/tf2_ros/include/tf2_ros/buffer_interface.h
	new file:   src/geometry2/tf2_ros/include/tf2_ros/buffer_server.h
	new file:   src/geometry2/tf2_ros/include/tf2_ros/message_filter.h
	new file:   src/geometry2/tf2_ros/include/tf2_ros/static_transform_broadcaster.h
	new file:   src/geometry2/tf2_ros/include/tf2_ros/transform_broadcaster.h
	new file:   src/geometry2/tf2_ros/include/tf2_ros/transform_listener.h
	new file:   src/geometry2/tf2_ros/package.xml
	new file:   src/geometry2/tf2_ros/rosdoc.yaml
	new file:   src/geometry2/tf2_ros/setup.py
	new file:   src/geometry2/tf2_ros/src/buffer.cpp
	new file:   src/geometry2/tf2_ros/src/buffer_client.cpp
	new file:   src/geometry2/tf2_ros/src/buffer_server.cpp
	new file:   src/geometry2/tf2_ros/src/buffer_server_main.cpp
	new file:   src/geometry2/tf2_ros/src/static_transform_broadcaster.cpp
	new file:   src/geometry2/tf2_ros/src/static_transform_broadcaster_program.cpp
	new file:   src/geometry2/tf2_ros/src/tf2_ros/__init__.py
	new file:   src/geometry2/tf2_ros/src/tf2_ros/buffer.py
	new file:   src/geometry2/tf2_ros/src/tf2_ros/buffer_client.py
	new file:   src/geometry2/tf2_ros/src/tf2_ros/buffer_interface.py
	new file:   src/geometry2/tf2_ros/src/tf2_ros/static_transform_broadcaster.py
	new file:   src/geometry2/tf2_ros/src/tf2_ros/transform_broadcaster.py
	new file:   src/geometry2/tf2_ros/src/tf2_ros/transform_listener.py
	new file:   src/geometry2/tf2_ros/src/transform_broadcaster.cpp
	new file:   src/geometry2/tf2_ros/src/transform_listener.cpp
	new file:   src/geometry2/tf2_ros/test/listener_unittest.cpp
	new file:   src/geometry2/tf2_ros/test/message_filter_test.cpp
	new file:   src/geometry2/tf2_ros/test/message_filter_test.launch
	new file:   src/geometry2/tf2_ros/test/time_reset_test.cpp
	new file:   src/geometry2/tf2_ros/test/transform_listener_time_reset_test.launch
	new file:   src/geometry2/tf2_ros/test/transform_listener_unittest.launch
	new file:   src/geometry2/tf2_sensor_msgs/CHANGELOG.rst
	new file:   src/geometry2/tf2_sensor_msgs/CMakeLists.txt
	new file:   src/geometry2/tf2_sensor_msgs/include/tf2_sensor_msgs/tf2_sensor_msgs.h
	new file:   src/geometry2/tf2_sensor_msgs/package.xml
	new file:   src/geometry2/tf2_sensor_msgs/setup.py
	new file:   src/geometry2/tf2_sensor_msgs/src/tf2_sensor_msgs/__init__.py
	new file:   src/geometry2/tf2_sensor_msgs/src/tf2_sensor_msgs/tf2_sensor_msgs.py
	new file:   src/geometry2/tf2_sensor_msgs/test/test.launch
	new file:   src/geometry2/tf2_sensor_msgs/test/test_tf2_sensor_msgs.cpp
	new file:   src/geometry2/tf2_sensor_msgs/test/test_tf2_sensor_msgs.py
	new file:   src/geometry2/tf2_tools/CHANGELOG.rst
	new file:   src/geometry2/tf2_tools/CMakeLists.txt
	new file:   src/geometry2/tf2_tools/mainpage.dox
	new file:   src/geometry2/tf2_tools/package.xml
	new file:   src/geometry2/tf2_tools/scripts/echo.py
	new file:   src/geometry2/tf2_tools/scripts/view_frames.py
	new file:   src/maintain/CMakeLists.txt
	new file:   src/maintain/launch/maintain.launch
	new file:   src/maintain/package.xml
	new file:   src/maintain/scripts/test copy.py
	new file:   src/maintain/scripts/test.py
	new file:   src/vision_opencv/README.rst
	new file:   src/vision_opencv/cv_bridge/CHANGELOG.rst
	new file:   src/vision_opencv/cv_bridge/CMakeLists.txt
	new file:   src/vision_opencv/cv_bridge/cmake/cv_bridge-extras.cmake.in
	new file:   src/vision_opencv/cv_bridge/doc/conf.py
	new file:   src/vision_opencv/cv_bridge/doc/index.rst
	new file:   src/vision_opencv/cv_bridge/doc/mainpage.dox
	new file:   src/vision_opencv/cv_bridge/include/cv_bridge/cv_bridge.h
	new file:   src/vision_opencv/cv_bridge/include/cv_bridge/rgb_colors.h
	new file:   src/vision_opencv/cv_bridge/package.xml
	new file:   src/vision_opencv/cv_bridge/python/CMakeLists.txt
	new file:   src/vision_opencv/cv_bridge/python/__init__.py.plain.in
	new file:   src/vision_opencv/cv_bridge/python/cv_bridge/__init__.py
	new file:   src/vision_opencv/cv_bridge/python/cv_bridge/core.py
	new file:   src/vision_opencv/cv_bridge/rosdoc.yaml
	new file:   src/vision_opencv/cv_bridge/setup.py
	new file:   src/vision_opencv/cv_bridge/src/CMakeLists.txt
	new file:   src/vision_opencv/cv_bridge/src/boost/README
	new file:   src/vision_opencv/cv_bridge/src/boost/core/scoped_enum.hpp
	new file:   src/vision_opencv/cv_bridge/src/boost/endian/conversion.hpp
	new file:   src/vision_opencv/cv_bridge/src/boost/endian/detail/intrinsic.hpp
	new file:   src/vision_opencv/cv_bridge/src/boost/predef/detail/_cassert.h
	new file:   src/vision_opencv/cv_bridge/src/boost/predef/detail/endian_compat.h
	new file:   src/vision_opencv/cv_bridge/src/boost/predef/detail/test.h
	new file:   src/vision_opencv/cv_bridge/src/boost/predef/library/c/_prefix.h
	new file:   src/vision_opencv/cv_bridge/src/boost/predef/library/c/gnu.h
	new file:   src/vision_opencv/cv_bridge/src/boost/predef/make.h
	new file:   src/vision_opencv/cv_bridge/src/boost/predef/os/android.h
	new file:   src/vision_opencv/cv_bridge/src/boost/predef/os/bsd.h
	new file:   src/vision_opencv/cv_bridge/src/boost/predef/os/bsd/bsdi.h
	new file:   src/vision_opencv/cv_bridge/src/boost/predef/os/bsd/dragonfly.h
	new file:   src/vision_opencv/cv_bridge/src/boost/predef/os/bsd/free.h
	new file:   src/vision_opencv/cv_bridge/src/boost/predef/os/bsd/net.h
	new file:   src/vision_opencv/cv_bridge/src/boost/predef/os/bsd/open.h
	new file:   src/vision_opencv/cv_bridge/src/boost/predef/os/ios.h
	new file:   src/vision_opencv/cv_bridge/src/boost/predef/os/macos.h
	new file:   src/vision_opencv/cv_bridge/src/boost/predef/other/endian.h
	new file:   src/vision_opencv/cv_bridge/src/boost/predef/version_number.h
	new file:   src/vision_opencv/cv_bridge/src/cv_bridge.cpp
	new file:   src/vision_opencv/cv_bridge/src/module.cpp
	new file:   src/vision_opencv/cv_bridge/src/module.hpp
	new file:   src/vision_opencv/cv_bridge/src/module_opencv2.cpp
	new file:   src/vision_opencv/cv_bridge/src/module_opencv3.cpp
	new file:   src/vision_opencv/cv_bridge/src/pycompat.hpp
	new file:   src/vision_opencv/cv_bridge/src/rgb_colors.cpp
	new file:   src/vision_opencv/cv_bridge/test/CMakeLists.txt
	new file:   src/vision_opencv/cv_bridge/test/conversions.py
	new file:   src/vision_opencv/cv_bridge/test/enumerants.py
	new file:   src/vision_opencv/cv_bridge/test/python_bindings.py
	new file:   src/vision_opencv/cv_bridge/test/test_compression.cpp
	new file:   src/vision_opencv/cv_bridge/test/test_endian.cpp
	new file:   src/vision_opencv/cv_bridge/test/test_rgb_colors.cpp
	new file:   src/vision_opencv/cv_bridge/test/utest.cpp
	new file:   src/vision_opencv/cv_bridge/test/utest2.cpp
	new file:   src/vision_opencv/image_geometry/CHANGELOG.rst
	new file:   src/vision_opencv/image_geometry/CMakeLists.txt
	new file:   src/vision_opencv/image_geometry/doc/conf.py
	new file:   src/vision_opencv/image_geometry/doc/index.rst
	new file:   src/vision_opencv/image_geometry/doc/mainpage.dox
	new file:   src/vision_opencv/image_geometry/include/image_geometry/exports.h
	new file:   src/vision_opencv/image_geometry/include/image_geometry/pinhole_camera_model.h
	new file:   src/vision_opencv/image_geometry/include/image_geometry/stereo_camera_model.h
	new file:   src/vision_opencv/image_geometry/package.xml
	new file:   src/vision_opencv/image_geometry/rosdoc.yaml
	new file:   src/vision_opencv/image_geometry/setup.py
	new file:   src/vision_opencv/image_geometry/src/image_geometry/__init__.py
	new file:   src/vision_opencv/image_geometry/src/image_geometry/cameramodels.py
	new file:   src/vision_opencv/image_geometry/src/pinhole_camera_model.cpp
	new file:   src/vision_opencv/image_geometry/src/stereo_camera_model.cpp
	new file:   src/vision_opencv/image_geometry/test/CMakeLists.txt
	new file:   src/vision_opencv/image_geometry/test/directed.py
	new file:   src/vision_opencv/image_geometry/test/utest.cpp
	new file:   src/vision_opencv/opencv_tests/CHANGELOG.rst
	new file:   src/vision_opencv/opencv_tests/CMakeLists.txt
	new file:   src/vision_opencv/opencv_tests/launch/pong.launch
	new file:   src/vision_opencv/opencv_tests/mainpage.dox
	new file:   src/vision_opencv/opencv_tests/nodes/broadcast.py
	new file:   src/vision_opencv/opencv_tests/nodes/rosfacedetect.py
	new file:   src/vision_opencv/opencv_tests/nodes/source.py
	new file:   src/vision_opencv/opencv_tests/package.xml
	new file:   src/vision_opencv/vision_opencv/CHANGELOG.rst
	new file:   src/vision_opencv/vision_opencv/CMakeLists.txt
	new file:   src/vision_opencv/vision_opencv/package.xml
	new file:   src/yolov5_ros/CMakeLists.txt
	new file:   src/yolov5_ros/LICENSE
	new file:   src/yolov5_ros/README.md
	new file:   src/yolov5_ros/launch/yolov5.launch
	new file:   src/yolov5_ros/launch/yolov5_d435.launch
	new file:   src/yolov5_ros/package.xml
	new file:   src/yolov5_ros/src/detect.py
	new file:   src/yolov5_ros/src/yolov5/.dockerignore
	new file:   src/yolov5_ros/src/yolov5/.gitignore
	new file:   src/yolov5_ros/src/yolov5/.pre-commit-config.yaml
	new file:   src/yolov5_ros/src/yolov5/CONTRIBUTING.md
	new file:   src/yolov5_ros/src/yolov5/LICENSE
	new file:   src/yolov5_ros/src/yolov5/README.md
	new file:   src/yolov5_ros/src/yolov5/best.pt
	new file:   src/yolov5_ros/src/yolov5/classify/predict.py
	new file:   src/yolov5_ros/src/yolov5/classify/train.py
	new file:   src/yolov5_ros/src/yolov5/classify/val.py
	new file:   src/yolov5_ros/src/yolov5/data/Argoverse.yaml
	new file:   src/yolov5_ros/src/yolov5/data/GlobalWheat2020.yaml
	new file:   src/yolov5_ros/src/yolov5/data/ImageNet.yaml
	new file:   src/yolov5_ros/src/yolov5/data/Objects365.yaml
	new file:   src/yolov5_ros/src/yolov5/data/SKU-110K.yaml
	new file:   src/yolov5_ros/src/yolov5/data/VOC.yaml
	new file:   src/yolov5_ros/src/yolov5/data/VisDrone.yaml
	new file:   src/yolov5_ros/src/yolov5/data/coco.yaml
	new file:   src/yolov5_ros/src/yolov5/data/coco128.yaml
	new file:   src/yolov5_ros/src/yolov5/data/hyps/hyp.Objects365.yaml
	new file:   src/yolov5_ros/src/yolov5/data/hyps/hyp.VOC.yaml
	new file:   src/yolov5_ros/src/yolov5/data/hyps/hyp.scratch-high.yaml
	new file:   src/yolov5_ros/src/yolov5/data/hyps/hyp.scratch-low.yaml
	new file:   src/yolov5_ros/src/yolov5/data/hyps/hyp.scratch-med.yaml
	new file:   src/yolov5_ros/src/yolov5/data/images/bus.jpg
	new file:   src/yolov5_ros/src/yolov5/data/images/zidane.jpg
	new file:   src/yolov5_ros/src/yolov5/data/mydata.yaml
	new file:   src/yolov5_ros/src/yolov5/data/scripts/download_weights.sh
	new file:   src/yolov5_ros/src/yolov5/data/scripts/get_coco.sh
	new file:   src/yolov5_ros/src/yolov5/data/scripts/get_coco128.sh
	new file:   src/yolov5_ros/src/yolov5/data/scripts/get_imagenet.sh
	new file:   src/yolov5_ros/src/yolov5/data/xView.yaml
	new file:   src/yolov5_ros/src/yolov5/detect.py
	new file:   src/yolov5_ros/src/yolov5/export.py
	new file:   src/yolov5_ros/src/yolov5/hubconf.py
	new file:   src/yolov5_ros/src/yolov5/models/__init__.py
	new file:   src/yolov5_ros/src/yolov5/models/common.py
	new file:   src/yolov5_ros/src/yolov5/models/experimental.py
	new file:   src/yolov5_ros/src/yolov5/models/hub/anchors.yaml
	new file:   src/yolov5_ros/src/yolov5/models/hub/yolov3-spp.yaml
	new file:   src/yolov5_ros/src/yolov5/models/hub/yolov3-tiny.yaml
	new file:   src/yolov5_ros/src/yolov5/models/hub/yolov3.yaml
	new file:   src/yolov5_ros/src/yolov5/models/hub/yolov5-bifpn.yaml
	new file:   src/yolov5_ros/src/yolov5/models/hub/yolov5-fpn.yaml
	new file:   src/yolov5_ros/src/yolov5/models/hub/yolov5-p2.yaml
	new file:   src/yolov5_ros/src/yolov5/models/hub/yolov5-p34.yaml
	new file:   src/yolov5_ros/src/yolov5/models/hub/yolov5-p6.yaml
	new file:   src/yolov5_ros/src/yolov5/models/hub/yolov5-p7.yaml
	new file:   src/yolov5_ros/src/yolov5/models/hub/yolov5-panet.yaml
	new file:   src/yolov5_ros/src/yolov5/models/hub/yolov5l6.yaml
	new file:   src/yolov5_ros/src/yolov5/models/hub/yolov5m6.yaml
	new file:   src/yolov5_ros/src/yolov5/models/hub/yolov5n6.yaml
	new file:   src/yolov5_ros/src/yolov5/models/hub/yolov5s-ghost.yaml
	new file:   src/yolov5_ros/src/yolov5/models/hub/yolov5s-transformer.yaml
	new file:   src/yolov5_ros/src/yolov5/models/hub/yolov5s6.yaml
	new file:   src/yolov5_ros/src/yolov5/models/hub/yolov5x6.yaml
	new file:   src/yolov5_ros/src/yolov5/models/tf.py
	new file:   src/yolov5_ros/src/yolov5/models/yolo.py
	new file:   src/yolov5_ros/src/yolov5/models/yolov5l.yaml
	new file:   src/yolov5_ros/src/yolov5/models/yolov5m.yaml
	new file:   src/yolov5_ros/src/yolov5/models/yolov5n.yaml
	new file:   src/yolov5_ros/src/yolov5/models/yolov5s.yaml
	new file:   src/yolov5_ros/src/yolov5/models/yolov5x.yaml
	new file:   src/yolov5_ros/src/yolov5/requirements.txt
	new file:   src/yolov5_ros/src/yolov5/setup.cfg
	new file:   src/yolov5_ros/src/yolov5/train.py
	new file:   src/yolov5_ros/src/yolov5/tutorial.ipynb
	new file:   src/yolov5_ros/src/yolov5/utils/__init__.py
	new file:   src/yolov5_ros/src/yolov5/utils/activations.py
	new file:   src/yolov5_ros/src/yolov5/utils/augmentations.py
	new file:   src/yolov5_ros/src/yolov5/utils/autoanchor.py
	new file:   src/yolov5_ros/src/yolov5/utils/autobatch.py
	new file:   src/yolov5_ros/src/yolov5/utils/aws/__init__.py
	new file:   src/yolov5_ros/src/yolov5/utils/aws/mime.sh
	new file:   src/yolov5_ros/src/yolov5/utils/aws/resume.py
	new file:   src/yolov5_ros/src/yolov5/utils/aws/userdata.sh
	new file:   src/yolov5_ros/src/yolov5/utils/benchmarks.py
	new file:   src/yolov5_ros/src/yolov5/utils/callbacks.py
	new file:   src/yolov5_ros/src/yolov5/utils/dataloaders.py
	new file:   src/yolov5_ros/src/yolov5/utils/docker/Dockerfile
	new file:   src/yolov5_ros/src/yolov5/utils/docker/Dockerfile-arm64
	new file:   src/yolov5_ros/src/yolov5/utils/docker/Dockerfile-cpu
	new file:   src/yolov5_ros/src/yolov5/utils/downloads.py
	new file:   src/yolov5_ros/src/yolov5/utils/flask_rest_api/README.md
	new file:   src/yolov5_ros/src/yolov5/utils/flask_rest_api/example_request.py
	new file:   src/yolov5_ros/src/yolov5/utils/flask_rest_api/restapi.py
	new file:   src/yolov5_ros/src/yolov5/utils/general.py
	new file:   src/yolov5_ros/src/yolov5/utils/google_app_engine/Dockerfile
	new file:   src/yolov5_ros/src/yolov5/utils/google_app_engine/additional_requirements.txt
	new file:   src/yolov5_ros/src/yolov5/utils/google_app_engine/app.yaml
	new file:   src/yolov5_ros/src/yolov5/utils/loggers/__init__.py
	new file:   src/yolov5_ros/src/yolov5/utils/loggers/clearml/README.md
	new file:   src/yolov5_ros/src/yolov5/utils/loggers/clearml/__init__.py
	new file:   src/yolov5_ros/src/yolov5/utils/loggers/clearml/clearml_utils.py
	new file:   src/yolov5_ros/src/yolov5/utils/loggers/clearml/hpo.py
	new file:   src/yolov5_ros/src/yolov5/utils/loggers/comet/README.md
	new file:   src/yolov5_ros/src/yolov5/utils/loggers/comet/__init__.py
	new file:   src/yolov5_ros/src/yolov5/utils/loggers/comet/comet_utils.py
	new file:   src/yolov5_ros/src/yolov5/utils/loggers/comet/hpo.py
	new file:   src/yolov5_ros/src/yolov5/utils/loggers/wandb/README.md
	new file:   src/yolov5_ros/src/yolov5/utils/loggers/wandb/__init__.py
	new file:   src/yolov5_ros/src/yolov5/utils/loggers/wandb/log_dataset.py
	new file:   src/yolov5_ros/src/yolov5/utils/loggers/wandb/sweep.py
	new file:   src/yolov5_ros/src/yolov5/utils/loggers/wandb/sweep.yaml
	new file:   src/yolov5_ros/src/yolov5/utils/loggers/wandb/wandb_utils.py
	new file:   src/yolov5_ros/src/yolov5/utils/loss.py
	new file:   src/yolov5_ros/src/yolov5/utils/metrics.py
	new file:   src/yolov5_ros/src/yolov5/utils/plots.py
	new file:   src/yolov5_ros/src/yolov5/utils/torch_utils.py
	new file:   src/yolov5_ros/src/yolov5/val.py
2023-05-09 01:18:40 +08:00
wxchen
a00f111d7f rebase 2023-05-09 01:18:10 +08:00
10 changed files with 269 additions and 257 deletions

1
.gitignore vendored
View File

@@ -1,5 +1,6 @@
.catkin_tools
.vscode
.vscode/
.vscode/*
/build
/devel

View File

@@ -1,20 +0,0 @@
{
"configurations": [
{
"browse": {
"databaseFilename": "${default}",
"limitSymbolsToIncludedHeaders": false
},
"includePath": [
"/opt/ros/melodic/include/**",
"/usr/include/**"
],
"name": "ROS",
"intelliSenseMode": "gcc-x64",
"compilerPath": "/usr/bin/gcc",
"cStandard": "gnu11",
"cppStandard": "c++14"
}
],
"version": 4
}

View File

@@ -1,8 +0,0 @@
{
"python.autoComplete.extraPaths": [
"/opt/ros/melodic/lib/python2.7/dist-packages"
],
"python.analysis.extraPaths": [
"/opt/ros/melodic/lib/python2.7/dist-packages"
]
}

18
.vscode/tasks.json vendored
View File

@@ -1,18 +0,0 @@
{
"tasks": [
{
"type": "shell",
"command": "catkin",
"args": [
"build",
// "-DPYTHON_EXECUTABLE=/home/da/miniconda3/envs/gsmini/bin/python",
"-DPYTHON_EXECUTABLE=${HOME}/.conda/envs/gsmini/bin/python"
],
"problemMatcher": [
"$catkin-gcc"
],
"group": "build",
"label": "catkin: build"
}
]
}

136
src/maintain/scripts/maintain.py Executable file
View File

@@ -0,0 +1,136 @@
#! /home/wxchen/.conda/envs/gsmini/bin/python
import rospy
import numpy as np
import open3d as o3d
from sensor_msgs.msg import Image , CameraInfo, PointCloud2
from detection_msgs.msg import BoundingBox, BoundingBoxes
import sensor_msgs.point_cloud2 as pc2
import cv_bridge
from cv_bridge import CvBridge, CvBridgeError
import cv2
import tf2_ros
import tf
from geometry_msgs.msg import PoseStamped, TransformStamped
bridge = CvBridge()
color_intrinsics = None
cloud = None
box = None
d_width = 100
def camera_info_callback(msg):
global color_intrinsics
color_intrinsics = msg
def depth_image_callback(msg):
global depth_image
depth_image = bridge.imgmsg_to_cv2(msg, '16UC1')
def point_cloud_callback(msg):
global cloud
cloud = pc2.read_points(msg, field_names=("x", "y", "z"), skip_nans=True)
def bounding_boxes_callback(msg):
global box
for bounding_box in msg.bounding_boxes:
# Assuming there's only one box, you can add a condition to filter the boxes if needed
box = [bounding_box.xmin - d_width, bounding_box.ymin - d_width, bounding_box.xmax + d_width, bounding_box.ymax + d_width]
def main():
rospy.init_node("plane_fitting_node")
rospy.Subscriber("/camera/color/camera_info", CameraInfo, camera_info_callback)
rospy.Subscriber("/camera/aligned_depth_to_color/image_raw", Image, depth_image_callback)
rospy.Subscriber("/camera/depth/color/points", PointCloud2, point_cloud_callback)
rospy.Subscriber("/yolov5/detections", BoundingBoxes, bounding_boxes_callback)
tf_broadcaster = tf2_ros.TransformBroadcaster()
plane_pub = rospy.Publisher("/plane_pose", PoseStamped, queue_size=10)
rate = rospy.Rate(10) # 10 Hz
while not rospy.is_shutdown():
if color_intrinsics is not None and cloud is not None and box is not None:
# Get the 3D points corresponding to the box
fx, fy = color_intrinsics.K[0], color_intrinsics.K[4]
cx, cy = color_intrinsics.K[2], color_intrinsics.K[5]
points = []
center_x = (box[0] + box[2]) / 2
center_y = (box[1] + box[3]) / 2
depth_array = np.array(depth_image, dtype=np.float32)
pz = depth_array[int(center_y), int(center_x)] / 1000.0
px = (center_x - color_intrinsics.K[2]) * pz / color_intrinsics.K[0]
py = (center_y - color_intrinsics.K[5]) * pz / color_intrinsics.K[4]
rospy.loginfo("Center point: {}".format([px, py, pz]))
screw_point = None
for x, y, z in cloud:
if z != 0:
u = int(np.round((x * fx) / z + cx))
v = int(np.round((y * fy) / z + cy))
if u == center_x and v == center_y:
screw_point = [x, y, z]
if u >= box[0] and u <= box[2] and v >= box[1] and v <= box[3]:
points.append([x, y, z])
points = np.array(points)
if px != 0 and py != 0 and pz != 0:
# rospy.loginfo("Screw point: {}".format(screw_point))
# Fit a plane to the points
pcd = o3d.geometry.PointCloud()
pcd.points = o3d.utility.Vector3dVector(points)
plane_model, inliers = pcd.segment_plane(distance_threshold=0.02, ransac_n=3, num_iterations=100)
[a, b, c, d] = plane_model
# Calculate the rotation between the plane normal and the Z axis
normal = np.array([a, b, c])
z_axis = np.array([0, 0, 1])
cos_theta = np.dot(normal, z_axis) / (np.linalg.norm(normal) * np.linalg.norm(z_axis))
theta = np.arccos(cos_theta)
rotation_axis = np.cross(z_axis, normal)
rotation_axis = rotation_axis / np.linalg.norm(rotation_axis)
quaternion = np.hstack((rotation_axis * np.sin(theta / 2), [np.cos(theta / 2)]))
# Publish the plane pose
# plane_pose = PoseStamped()
# plane_pose.header.stamp = rospy.Time.now()
# plane_pose.header.frame_id = "camera_color_optical_frame"
# plane_pose.pose.position.x = screw_point[0]
# plane_pose.pose.position.y = screw_point[1]
# plane_pose.pose.position.z = -d / np.linalg.norm(normal)
# plane_pose.pose.orientation.x = quaternion[0]
# plane_pose.pose.orientation.y = quaternion[1]
# plane_pose.pose.orientation.z = quaternion[2]
# plane_pose.pose.orientation.w = quaternion[3]
# plane_pub.publish(plane_pose)
# publish screw tf
screw_tf = TransformStamped()
screw_tf.header.stamp = rospy.Time.now()
screw_tf.header.frame_id = "camera_color_optical_frame"
screw_tf.child_frame_id = "screw"
screw_tf.transform.translation.x = px
screw_tf.transform.translation.y = py
screw_tf.transform.translation.z = pz
screw_tf.transform.rotation.x = quaternion[0]
screw_tf.transform.rotation.y = quaternion[1]
screw_tf.transform.rotation.z = quaternion[2]
screw_tf.transform.rotation.w = quaternion[3]
tf_broadcaster.sendTransform(screw_tf)
rate.sleep()
if __name__ == "__main__":
try:
main()
except rospy.ROSInterruptException:
pass

View File

@@ -1,93 +0,0 @@
#! /home/wxchen/.conda/envs/gsmini/bin/python
import numpy as np
import cv2 as cv
from matplotlib import pyplot as plt
import rospy
from sensor_msgs.msg import Image
import message_filters
from cv_bridge import CvBridge, CvBridgeError
import rospkg
MIN_MATCH_COUNT = 10
pkg_path = rospkg.RosPack().get_path('maintain')
rospy.loginfo(pkg_path)
img_template = cv.imread(pkg_path + '/scripts/tt.png',0)
def callback(rgb, depth):
rospy.loginfo("callback")
bridge = CvBridge()
# rospy.loginfo(rgb.header.stamp)
# rospy.loginfo(depth.header.stamp)
try:
rgb_image = bridge.imgmsg_to_cv2(rgb, 'bgr8')
depth_image = bridge.imgmsg_to_cv2(depth, '16UC1')
img_matcher = matcher(rgb_image)
cv.imshow("img_matcher", img_matcher)
cv.waitKey(1000)
except CvBridgeError as e:
print(e)
def matcher(img):
try:
# Initiate SIFT detector
sift = cv.SIFT_create()
# find the keypoints and descriptors with SIFT
kp1, des1 = sift.detectAndCompute(img_template,None)
kp2, des2 = sift.detectAndCompute(img,None)
FLANN_INDEX_KDTREE = 1
index_params = dict(algorithm = FLANN_INDEX_KDTREE, trees = 5)
search_params = dict(checks = 50)
flann = cv.FlannBasedMatcher(index_params, search_params)
matches = flann.knnMatch(des1,des2,k=2)
# store all the good matches as per Lowe's ratio test.
good = []
for m,n in matches:
if m.distance < 0.7*n.distance:
good.append(m)
if len(good)>MIN_MATCH_COUNT:
src_pts = np.float32([ kp1[m.queryIdx].pt for m in good ]).reshape(-1,1,2)
dst_pts = np.float32([ kp2[m.trainIdx].pt for m in good ]).reshape(-1,1,2)
M, mask = cv.findHomography(src_pts, dst_pts, cv.RANSAC,5.0)
matchesMask = mask.ravel().tolist()
h,w = img_template.shape
pts = np.float32([ [0,0],[0,h-1],[w-1,h-1],[w-1,0] ]).reshape(-1,1,2)
dst = cv.perspectiveTransform(pts,M)
roi = img[np.int32(dst)[0][0][1]:np.int32(dst)[2][0][1], np.int32(dst)[0][0][0]:np.int32(dst)[2][0][0]]
# roi = detect_black(roi)
# img2 = cv.polylines(img2,[np.int32(dst)],True,255,3, cv.LINE_AA)
else:
print( "Not enough matches are found - {}/{}".format(len(good), MIN_MATCH_COUNT) )
return roi
except Exception as e:
print(e)
if __name__ == "__main__":
rospy.init_node("maintain")
rospy.loginfo("maintain task start ......")
rgb_sub = message_filters.Subscriber("/camera/color/image_raw", Image)
depth_sub = message_filters.Subscriber("/camera/aligned_depth_to_color/image_raw", Image)
ts = message_filters.TimeSynchronizer([rgb_sub, depth_sub], 1)
ts.registerCallback(callback)
rospy.spin()

View File

@@ -6,20 +6,72 @@ from matplotlib import pyplot as plt
import rospy
import tf2_ros
import tf
from sensor_msgs.msg import Image , CameraInfo
from sensor_msgs.msg import Image , CameraInfo, PointCloud2
from geometry_msgs.msg import PoseStamped, TransformStamped, Quaternion
import message_filters
from cv_bridge import CvBridge, CvBridgeError
import rospkg
# import open3d as o3d
# from open3d_ros_helper import open3d_ros_helper as orh
import os
import sys
import math
from rostopic import get_topic_type
from detection_msgs.msg import BoundingBox, BoundingBoxes
bridge = CvBridge()
annulus_width = 10
# 2d to 3d
def computer_2d_3d(x, y, depth_roi, color_intrinsics):
pz = depth_roi[int(y), int(x)] / 1000.0
px = (x - color_intrinsics[2]) * pz / color_intrinsics[0]
py = (y - color_intrinsics[5]) * pz / color_intrinsics[4]
return px, py, pz
def compute_plane_normal(box, depth, color_intrinsics):
# 计算相机内参
fx = color_intrinsics[0]
fy = color_intrinsics[4]
cx = color_intrinsics[2]
cy = color_intrinsics[5]
# 计算矩形中心点坐标
x_center = (box[0] + box[2]) / 2
y_center = (box[1] + box[3]) / 2
z = depth[int(y_center), int(x_center)]
x = (x_center - cx) * z / fx
y = (y_center - cy) * z / fy
# 计算四个顶点坐标
x1 = (box[0] - cx) * z / fx
y1 = (box[1] - cy) * z / fy
x2 = (box[2] - cx) * z / fx
y2 = (box[1] - cy) * z / fy
x3 = (box[2] - cx) * z / fx
y3 = (box[3] - cy) * z / fy
x4 = (box[0] - cx) * z / fx
y4 = (box[3] - cy) * z / fy
# 计算矩形边缘向量
v1 = np.array([x2 - x1, y2 - y1, depth[int(box[1]), int(box[0])] - z])
v2 = np.array([x3 - x2, y3 - y2, depth[int(box[1]), int(box[2])] - z])
v3 = np.array([x4 - x3, y4 - y3, depth[int(box[3]), int(box[2])] - z])
v4 = np.array([x1 - x4, y1 - y4, depth[int(box[3]), int(box[0])] - z])
# 计算平面法向量
normal = np.cross(v1, v2)
normal += np.cross(v2, v3)
normal += np.cross(v3, v4)
normal += np.cross(v4, v1)
normal /= np.linalg.norm(normal)
# 计算法向量相对于参考向量的旋转角度和旋转轴
ref_vector = np.array([0, 0, 1])
normal_vector = normal
angle = math.acos(np.dot(ref_vector, normal_vector) / (np.linalg.norm(ref_vector) * np.linalg.norm(normal_vector)))
axis = np.cross(ref_vector, normal_vector)
axis = axis / np.linalg.norm(axis)
# 将旋转角度和旋转轴转换为四元数
qx, qy, qz, qw = tf.transformations.quaternion_about_axis(angle, axis)
quaternion = [qx, qy, qz, qw]
return quaternion
def calculate_image_edge_plane_normal(depth_roi):
# Get the shape of the depth_roi
height, width = depth_roi.shape
@@ -73,6 +125,24 @@ def calculate_image_edge_plane_normal(depth_roi):
return normal
# def compute_normal_vector(p1, p2, p3, p4):
# # Compute two vectors in the plane
# v1 = np.array(p2) - np.array(p1)
# v2 = np.array(p3) - np.array(p1)
# # Compute the cross product of the two vectors to get the normal vector
# n = np.cross(v1, v2)
# # Compute the fourth point in the plane
# p4 = np.array(p4)
# # Check if the fourth point is on the same side of the plane as the origin
# if np.dot(n, p4 - np.array(p1)) < 0:
# n = -n
# # Normalize the normal vector to obtain a unit vector
# n = n / np.linalg.norm(n)
# theta = math.acos(n[2])
# sin_theta_2 = math.sin(theta/2)
# quaternion = [math.cos(theta/2), sin_theta_2 * n[0], sin_theta_2 * n[1], sin_theta_2 * n[2]]
# return quaternion
def filter_quaternion(quat, quat_prev, alpha):
if quat_prev is None:
quat_prev = quat
@@ -82,79 +152,77 @@ def filter_quaternion(quat, quat_prev, alpha):
quat_filtered[i] = alpha * quat[i] + (1-alpha) * quat_prev[i]
# Normalize the quaternion
quat_filtered = quat_filtered / np.linalg.norm(quat_filtered)
return quat_filtered
return quat_filtered
def box_callback(box, depth, color_info):
try:
color_intrinsics = color_info.K
depth_image = bridge.imgmsg_to_cv2(depth, '16UC1')
# get the center of screw
boundingBox = box.bounding_boxes[0]
screw_x = (boundingBox.xmax + boundingBox.xmin) / 2
screw_y = (boundingBox.ymax + boundingBox.ymin) / 2
# print(screw_x,screw_y)
# pc = orh.rospc_to_o3dpc(pc_msg)
depth_array = np.array(depth_image, dtype=np.float32)
depth_roi = depth_array[boundingBox.ymin:boundingBox.ymax, boundingBox.xmin:boundingBox.xmax]
if box is not None and len(box.bounding_boxes) > 0:
# get the center of screw
boundingBox = box.bounding_boxes[0]
screw_x = (boundingBox.xmax + boundingBox.xmin) / 2
screw_y = (boundingBox.ymax + boundingBox.ymin) / 2
# print(screw_x,screw_y)
z = np.mean(depth_roi) * 0.001
x = (screw_x - color_intrinsics[2]) * z / color_intrinsics[0]
y = (screw_y - color_intrinsics[5]) * z / color_intrinsics[4]
# rospy.loginfo("screw pose: x: %f, y: %f, z: %f", x, y, z)
# calculate normal direction of screw area
annulus_roi = depth_array[boundingBox.ymin-annulus_width:boundingBox.ymax+annulus_width, boundingBox.xmin-annulus_width:boundingBox.xmax+annulus_width]
normal = calculate_image_edge_plane_normal(annulus_roi)
# print(normal)
depth_array = np.array(depth_image, dtype=np.float32)
# depth_roi = depth_array[boundingBox.ymin:boundingBox.ymax, boundingBox.xmin:boundingBox.xmax]
# publish screw pose
# screw_pose = PoseStamped()
# screw_pose.header.stamp = rospy.Time.now()
# screw_pose.header.frame_id = "camera_color_optical_frame"
# screw_pose.pose.position.x = x
# screw_pose.pose.position.y = y
# screw_pose.pose.position.z = z
# screw_pose.pose.orientation.x = 0
# screw_pose.pose.orientation.y = 0
# screw_pose.pose.orientation.z = 0
# screw_pose.pose.orientation.w = 1
x, y, z = computer_2d_3d(screw_x, screw_y, depth_array, color_intrinsics)
# rospy.loginfo("screw pose: x: %f, y: %f, z: %f", x, y, z)
# calculate normal direction of screw area
box = [boundingBox.ymin - annulus_width, boundingBox.xmin - annulus_width, boundingBox.ymax + annulus_width, boundingBox.xmax + annulus_width]
# p1x, p1y, p1z = computer_2d_3d(boundingBox.xmin-annulus_width, boundingBox.ymin-annulus_width, depth_array, color_intrinsics)
# p2x, p2y, p2z = computer_2d_3d(boundingBox.xmax+annulus_width, boundingBox.ymin-annulus_width, depth_array, color_intrinsics)
# p3x, p3y, p3z = computer_2d_3d(boundingBox.xmax+annulus_width, boundingBox.ymax+annulus_width, depth_array, color_intrinsics)
# p4x, p4y, p4z = computer_2d_3d(boundingBox.xmin-annulus_width, boundingBox.ymax+annulus_width, depth_array, color_intrinsics)
# p1 = [p1x, p1y, p1z]
# p2 = [p2x, p2y, p2z]
# p3 = [p3x, p3y, p3z]
# p4 = [p4x, p4y, p4z]
# normal_q = compute_normal_vector(p1, p2, p3, p4)
normal_q = compute_plane_normal(box, depth_array, color_intrinsics)
# pose_pub.publish(screw_pose)
# normal vector to quaternion
screw_quat = tf.transformations.quaternion_from_euler(0, 0, 0)
screw_quat[0] = normal[0]
screw_quat[1] = normal[1]
screw_quat[2] = normal[2]
screw_quat[3] = 0
# quaternion to euler
screw_euler = tf.transformations.euler_from_quaternion(screw_quat)
screw_quat = tf.transformations.quaternion_from_euler(screw_euler[0], screw_euler[1], 0)
# annulus_roi = depth_array[boundingBox.ymin-annulus_width:boundingBox.ymax+annulus_width, boundingBox.xmin-annulus_width:boundingBox.xmax+annulus_width]
# normal = calculate_image_edge_plane_normal(annulus_roi)
# print(normal)
# Apply low-pass filter to screw quaternion
alpha = 0.4
global screw_quat_prev
screw_quat_filtered = filter_quaternion(screw_quat, screw_quat_prev, alpha)
screw_quat_prev = screw_quat_filtered
# normal vector to quaternion
screw_quat = tf.transformations.quaternion_from_euler(0, 0, 0)
screw_quat[0] = normal_q[0]
screw_quat[1] = normal_q[1]
screw_quat[2] = normal_q[2]
screw_quat[3] = normal_q[3]
# quaternion to euler
screw_euler = tf.transformations.euler_from_quaternion(screw_quat)
screw_quat_zero_z = tf.transformations.quaternion_from_euler(screw_euler[0], screw_euler[1], 0)
# publish screw tf
screw_tf = TransformStamped()
screw_tf.header.stamp = rospy.Time.now()
screw_tf.header.frame_id = "camera_color_optical_frame"
screw_tf.child_frame_id = "screw"
screw_tf.transform.translation.x = x
screw_tf.transform.translation.y = y
screw_tf.transform.translation.z = z
screw_tf.transform.rotation.x = screw_quat_filtered[0]
screw_tf.transform.rotation.y = screw_quat_filtered[1]
screw_tf.transform.rotation.z = screw_quat_filtered[2]
screw_tf.transform.rotation.w = screw_quat_filtered[3]
# Apply low-pass filter to screw quaternion
alpha = 0.4
global screw_quat_prev
screw_quat_filtered = filter_quaternion(screw_quat, screw_quat_prev, alpha)
screw_quat_prev = screw_quat_filtered
tf_broadcaster.sendTransform(screw_tf)
# publish screw tf
screw_tf = TransformStamped()
screw_tf.header.stamp = rospy.Time.now()
screw_tf.header.frame_id = "camera_color_optical_frame"
screw_tf.child_frame_id = "screw"
screw_tf.transform.translation.x = x
screw_tf.transform.translation.y = y
screw_tf.transform.translation.z = z
screw_tf.transform.rotation.x = screw_quat_filtered[0]
screw_tf.transform.rotation.y = screw_quat_filtered[1]
screw_tf.transform.rotation.z = screw_quat_filtered[2]
screw_tf.transform.rotation.w = screw_quat_filtered[3]
tf_broadcaster.sendTransform(screw_tf)
except Exception as e:
@@ -172,6 +240,7 @@ if __name__ == "__main__":
box_sub = message_filters.Subscriber("/yolov5/detections", BoundingBoxes)
depth_sub = message_filters.Subscriber("/camera/aligned_depth_to_color/image_raw", Image)
color_info = message_filters.Subscriber("/camera/color/camera_info", CameraInfo)
# pc_sub = message_filters.Subscriber("/camera/depth/color/points", PointCloud2)
tf_broadcaster = tf2_ros.TransformBroadcaster()

View File

@@ -50,7 +50,8 @@
<param name="publish_image" value="$(arg publish_image)"/>
<param name="output_image_topic" value="$(arg output_image_topic)"/>
</node>
<!-- <include file="$(find camera_launch)/launch/d435.launch"/> -->
<include file="$(find realsense2_camera)/launch/my_camera.launch" >
</include>
</launch>

View File

@@ -1,56 +0,0 @@
<launch>
<!-- Detection configuration -->
<arg name="weights" default="$(find yolov5_ros)/best.pt"/>
<arg name="data" default="$(find yolov5_ros)/src/yolov5/data/coco128.yaml"/>
<arg name="confidence_threshold" default="0.5"/>
<arg name="iou_threshold" default="0.45"/>
<arg name="maximum_detections" default="1000"/>
<arg name="device" default="0"/>
<arg name="agnostic_nms" default="true"/>
<arg name="line_thickness" default="3"/>
<arg name="dnn" default="true"/>
<arg name="half" default="false"/>
<!-- replace imgsz -->
<arg name="inference_size_h" default="640"/>
<arg name="inference_size_w" default="640"/>
<!-- Visualize using OpenCV window -->
<arg name="view_image" default="true"/>
<!-- ROS topics -->
<arg name="input_image_topic" default="/clover0/main_camera/image_raw"/>
<arg name="output_topic" default="/yolov5/detections"/>
<!-- Optional topic (publishing annotated image) -->
<arg name="publish_image" default="false"/>
<arg name="output_image_topic" default="/yolov5/image_out"/>
<node pkg="yolov5_ros" name="detect" type="detect.py" output="screen">
<param name="weights" value="$(arg weights)"/>
<param name="data" value="$(arg data)"/>
<param name="confidence_threshold" value="$(arg confidence_threshold)"/>
<param name="iou_threshold" value="$(arg iou_threshold)" />
<param name="maximum_detections" value="$(arg maximum_detections)"/>
<param name="device" value="$(arg device)" />
<param name="agnostic_nms" value="$(arg agnostic_nms)" />
<param name="line_thickness" value="$(arg line_thickness)"/>
<param name="dnn" value="$(arg dnn)"/>
<param name="half" value="$(arg half)"/>
<param name="inference_size_h" value="$(arg inference_size_h)"/>
<param name="inference_size_w" value="$(arg inference_size_w)"/>
<param name="input_image_topic" value="$(arg input_image_topic)"/>
<param name="output_topic" value="$(arg output_topic)"/>
<param name="view_image" value="$(arg view_image)"/>
<param name="publish_image" value="$(arg publish_image)"/>
<param name="output_image_topic" value="$(arg output_image_topic)"/>
</node>
<include file="$(find camera_launch)/launch/d435.launch"/>
</launch>