diff --git a/.gitmodules b/.gitmodules index 5c08dfac..a1ef73b0 100644 --- a/.gitmodules +++ b/.gitmodules @@ -7,3 +7,6 @@ [submodule "src/external_pkgs/Catch2"] path = src/external_pkgs/Catch2 url = https://github.com/catchorg/Catch2.git +[submodule "src/arm_hardware_interface/moteus"] + path = src/arm_hardware_interface/moteus + url = https://github.com/mjbots/moteus.git diff --git a/bestHammer.pt b/bestHammer.pt new file mode 100644 index 00000000..8a18ca7d Binary files /dev/null and b/bestHammer.pt differ diff --git a/src/arm_hardware_interface/CMakeLists.txt b/src/arm_hardware_interface/CMakeLists.txt index 874ae538..a5c32d5d 100644 --- a/src/arm_hardware_interface/CMakeLists.txt +++ b/src/arm_hardware_interface/CMakeLists.txt @@ -20,11 +20,22 @@ include_directories( ${arm_control_INCLUDE_DIRS} ) - +add_subdirectory(moteus) add_library(${PROJECT_NAME} src/armprotocol.cpp include/arm_hardware_interface/ArmSerialProtocol.h) -add_executable(arm_serial_driver src/ArmSerialInterface.cpp include/ArmSerialInterface.h include/arm_hardware_interface/ArmSerialProtocol.h) -ament_target_dependencies(arm_serial_driver rclcpp sensor_msgs serial rover_msgs arm_control) + +# For old and new arm +add_executable(serial_arm_driver src/ArmSerialInterface.cpp include/ArmSerialInterface.h include/arm_hardware_interface/ArmSerialProtocol.h) +add_executable(moteus_arm_driver src/MoteusInterface.cpp include/ArmSerialInterface.h include/arm_hardware_interface/ArmSerialProtocol.h) + +target_compile_definitions(moteus_arm_driver + PRIVATE + BUILDING_NEW_DRIVER=1 +) + +target_link_libraries(moteus_arm_driver moteus::cpp) +ament_target_dependencies(serial_arm_driver rclcpp sensor_msgs serial rover_msgs arm_control) +ament_target_dependencies(moteus_arm_driver rclcpp sensor_msgs serial rover_msgs arm_control) if(BUILD_TESTING) find_package(ament_lint_auto REQUIRED) @@ -49,7 +60,8 @@ install( ) install(TARGETS - arm_serial_driver + serial_arm_driver + moteus_arm_driver ${PROJECT_NAME} DESTINATION lib/${PROJECT_NAME} ARCHIVE DESTINATION lib diff --git a/src/arm_hardware_interface/include/ArmSerialInterface.h b/src/arm_hardware_interface/include/ArmSerialInterface.h index 8f351c86..de1a9ed7 100644 --- a/src/arm_hardware_interface/include/ArmSerialInterface.h +++ b/src/arm_hardware_interface/include/ArmSerialInterface.h @@ -1,14 +1,18 @@ -#include -#include "sensor_msgs/msg/joy.hpp" -#include "sensor_msgs/msg/joint_state.hpp" #include "arm_control/include/armControlParams.h" +#include "sensor_msgs/msg/joint_state.hpp" +#include "sensor_msgs/msg/joy.hpp" +#include #include "rover_msgs/msg/arm_command.hpp" -#include #include +#include -#include +#ifdef BUILDING_NEW_DRIVER +#include "moteus.h" +using namespace mjbots; +#endif +#include #include @@ -21,7 +25,7 @@ #define AXIS_1_DIR 1 #define AXIS_2_DIR 1 #define AXIS_3_DIR 1 -#define AXIS_4_DIR 1 +#define AXIS_4_DIR 1 #define AXIS_5_DIR 1 #define AXIS_6_DIR 1 @@ -29,77 +33,79 @@ using std::string; class ArmSerial : public rclcpp::Node { public: - ArmSerial(); + ArmSerial(); - // float firm + // float firm - rover_msgs::msg::ArmCommand current_arm_status; - - // angle_echo.positions.resize(NUM_JOINTS); - // void start_rx() { - // serialRxThread = std::thread(&ArmSerial::serial_rx(), this); - // } - // string joint_names[6] = {"joint_turntable", "joint_axis1", "joint_axis2", "joint_axis3", "joint_axis4", "joint_ender"}; //? old arm urdf - string joint_names[NUM_JOINTS + 2] = {"joint_1", "joint_2", "joint_3", "joint_4", "joint_5", "joint_6", "finger_left_joint", "finger_right_joint"}; //? newest (Sep 2024) arm urdf + rover_msgs::msg::ArmCommand current_arm_status; + // angle_echo.positions.resize(NUM_JOINTS); + // void start_rx() { + // serialRxThread = std::thread(&ArmSerial::serial_rx(), this); + // } + // string joint_names[6] = {"joint_turntable", "joint_axis1", "joint_axis2", "joint_axis3", "joint_axis4", "joint_ender"}; //? old arm + // urdf + string joint_names[NUM_JOINTS + 2] = {"joint_1", "joint_2", "joint_3", "joint_4", + "joint_5", "joint_6", "finger_left_joint", "finger_right_joint"}; //? newest (Sep 2024) arm urdf private: + unsigned long baud = 115200; // 19200; + string port = "/dev/serial/by-id/usb-ZEPHYR_UBC_ROVER_Arm_500100C6224069D7-if00"; + + serial::Serial teensy; + serial::Timeout timeout_uart = serial::Timeout::simpleTimeout(1000); // E.g., 1000 ms or 1 second + +#ifdef BUILDING_NEW_DRIVER + std::map> controllers; + std::map servo_data; + std::shared_ptr transport; +#endif + bool send_angles = true; + + struct Axis { + float curr_pos; + float target_pos; + float speed; + float zero_rad; + float max_rad; + int dir; + }; - unsigned long baud = 115200;//19200; - string port = "/dev/serial/by-id/usb-ZEPHYR_UBC_ROVER_Arm_500100C6224069D7-if00"; - - serial::Serial teensy; - serial::Timeout timeout_uart = serial::Timeout::simpleTimeout(1000); // E.g., 1000 ms or 1 second - - - struct Axis{ - float curr_pos; - float target_pos; - float speed; - float zero_rad; - float max_rad; - int dir; - }; - - Axis axes[NUM_JOINTS]; - // Axis axis_EE; - - float target_position[NUM_JOINTS]; - float target_velocities[NUM_JOINTS]; - - int homed = 0; - bool homing = false; - float EE = 0; - volatile float current_velocity[NUM_JOINTS] = {00.00, 00.00, 00.00, 00.00, 00.00, 00.00, 00.00}; - volatile float current_position[NUM_JOINTS] = {00.00, 00.00, 00.00, 00.00, 00.00, 00.00, 00.00}; - volatile int current_limit_switches[NUM_JOINTS] = {-1, -1, -1, -1, -1, -1, -1}; - - rclcpp::Subscription::SharedPtr command_subscriber; - void CommandCallback(const rover_msgs::msg::ArmCommand::SharedPtr msg); + Axis axes[NUM_JOINTS]; + // Axis axis_EE; - rclcpp::Publisher::SharedPtr joint_state_publisher_; + float target_position[NUM_JOINTS]; + float target_velocities[NUM_JOINTS]; - sensor_msgs::msg::JointState prev_joint_states; //for sim - rclcpp::Publisher::SharedPtr arm_position_publisher; + int homed = 0; + bool homing = false; + float EE = 0; + volatile float current_velocity[NUM_JOINTS] = {00.00, 00.00, 00.00, 00.00, 00.00, 00.00, 00.00}; + volatile float current_position[NUM_JOINTS] = {00.00, 00.00, 00.00, 00.00, 00.00, 00.00, 00.00}; + volatile int current_limit_switches[NUM_JOINTS] = {-1, -1, -1, -1, -1, -1, -1}; - rclcpp::TimerBase::SharedPtr timer_; + rclcpp::Subscription::SharedPtr command_subscriber; + void CommandCallback(const rover_msgs::msg::ArmCommand::SharedPtr msg); - std::thread serialRxThread; + rclcpp::Publisher::SharedPtr joint_state_publisher_; - void serial_rx(); //? should be static inline? - float degToRad(float deg); - float firmToMoveitOffsetPos(float deg, int axis); - float firmToMoveitOffsetVel(float deg, int axis); - void send_position_command(float pos[NUM_JOINTS]); - void send_velocity_command(float vel[NUM_JOINTS]); - void send_test_limits_command(); - void sendHomeCmd(int target_axis); - void sendCommCmd(int target_state); - void sendMsg(std::string outMsg); - void parseArmAngleUart(std::string msg); - void parseLimitSwitchTest(std::string msg); - }; - + sensor_msgs::msg::JointState prev_joint_states; // for sim + rclcpp::Publisher::SharedPtr arm_position_publisher; + rclcpp::TimerBase::SharedPtr timer_; + std::thread serialRxThread; + void serial_rx(); //? should be static inline? + float degToRad(float deg); + float firmToMoveitOffsetPos(float deg, int axis); + float firmToMoveitOffsetVel(float deg, int axis); + void send_position_command(float pos[NUM_JOINTS]); + void send_velocity_command(float vel[NUM_JOINTS]); + void send_test_limits_command(); + void sendHomeCmd(int target_axis); + void sendCommCmd(int target_state); + void sendMsg(std::string outMsg); + void parseArmAngleUart(std::string msg); + void parseLimitSwitchTest(std::string msg); +}; diff --git a/src/arm_hardware_interface/moteus b/src/arm_hardware_interface/moteus new file mode 160000 index 00000000..84a952ab --- /dev/null +++ b/src/arm_hardware_interface/moteus @@ -0,0 +1 @@ +Subproject commit 84a952ab43b1bf56137978eba6a453e908bcb337 diff --git a/src/arm_hardware_interface/src/MoteusInterface.cpp b/src/arm_hardware_interface/src/MoteusInterface.cpp new file mode 100644 index 00000000..844fd177 --- /dev/null +++ b/src/arm_hardware_interface/src/MoteusInterface.cpp @@ -0,0 +1,472 @@ +// TODO: make a good comment +#include "ArmSerialInterface.h" + +#define PI 3.14159 + +// conf set servo.default_timeout_s nan +// By default, when commanded over +// CAN, there is a watchdog which requires commands to be sent at +// least every 100ms or the controller will enter a latched fault +// state, disable by using above cmd in tview + +ArmSerial::ArmSerial() : Node("ArmSerialDriver") { + //? new arm offsets. To change, check arm_control/include/armControlParams.h Should be synced with moveit params + + for (int i = 0; i < NUM_JOINTS; i++) { + axes[i].zero_rad = ArmConstants::axis_zero_rads[i]; + axes[i].dir = ArmConstants::axis_dirs[i]; +#ifdef PRINTOUT_AXIS_PARAMS + RCLCPP_INFO(this->get_logger(), "Axis %i /// DIR[ %i ] /// OFFSET TO URDF's ZERO_RAD[ %f ] ", i + 1, axes[i].dir, axes[i].zero_rad); +#endif + } + + // auto qos = rclcpp::QoS(rclcpp::KeepLast(1)).transient_local(); //Very hack way of only using "live" messages - iffy, and may still + // operate off of one stale message. in the future we should use a time stamped message, and check the stamp time against current time to + // make sure msg is not stale. + auto qos = rclcpp::QoS(rclcpp::KeepLast(1)).reliable().durability_volatile(); //? AHHHH WHAT THE FUCK IS A QOS + + arm_position_publisher = this->create_publisher("/arm/feedback", qos); + joint_state_publisher_ = this->create_publisher("/joint_states", qos); + double period = 1.0 / COMM_POLL_RATE; + + current_arm_status.positions.resize(NUM_JOINTS); + + command_subscriber = this->create_subscription( + "/arm/command", 10, std::bind(&ArmSerial::CommandCallback, this, std::placeholders::_1)); + + RCLCPP_INFO(this->get_logger(), "jointsff %d", NUM_JOINTS); + if (!SIMULATE) { + for (int i = 1; i <= NUM_JOINTS; i++) { + moteus::Controller::Options options; + options.id = i; + controllers[i] = std::make_shared(options); + } + + // Stop everything to clear faults. + for (const auto &pair : controllers) { + pair.second->SetStop(); + } + + // does send and recv every 10ms- same as arm firmware? + period = 1 / 100; + transport = moteus::Controller::MakeSingletonTransport({}); + timer_ = this->create_wall_timer(std::chrono::duration(period), std::bind(&ArmSerial::serial_rx, this)); + } + + sleep(0.1); + + int flag = 1; +} + +// Entry Point +int main(int argc, char *argv[]) { + rclcpp::init(argc, argv); + auto node = std::make_shared(); + + RCLCPP_INFO(node->get_logger(), "ArmSerial init"); + // std::thread arm_thread(ArmSerial::SerialRxThread, std::ref(node)); + + rclcpp::spin(node); + + rclcpp::shutdown(); + return 0; +} +// Callbacks + +void ArmSerial::CommandCallback(const rover_msgs::msg::ArmCommand::SharedPtr msg) { + char type = msg->cmd_type; + RCLCPP_INFO(this->get_logger(), "dfsf init %c", type); + switch (type) { + case HOME_CMD: + if (!SIMULATE) { + sendHomeCmd(msg->cmd_value); + } + break; + case COMM_CMD: + if (!SIMULATE) { + sendCommCmd(msg->cmd_value); + } + break; + case TEST_LIMITS_CMD: + if (!SIMULATE) { + send_test_limits_command(); + } + break; + case ABS_POS_CMD: + float target_positions[NUM_JOINTS]; + for (int i = 0; i < NUM_JOINTS; i++) { + target_positions[i] = msg->positions[i]; + } + if (SIMULATE) { + sensor_msgs::msg::JointState joint_states_; + joint_states_.position.resize(NUM_JOINTS); + joint_states_.name.resize(NUM_JOINTS); + for (int i = 0; i < NUM_JOINTS; i++) { + joint_states_.name[i] = joint_names[i]; + joint_states_.position[i] = firmToMoveitOffsetPos(target_positions[i], i); + joint_states_.velocity[i] = firmToMoveitOffsetVel(current_velocity[i], i); + } + joint_states_.header.stamp = rclcpp::Clock().now(); + + joint_state_publisher_->publish(joint_states_); + + } else { + send_position_command(target_positions); + } + break; + case ABS_VEL_CMD: + // double sim_target_velocities[NUM_JOINTS]; + for (int i = 0; i < NUM_JOINTS_NO_EE; i++) { + RCLCPP_INFO(this->get_logger(), "velovc %f", msg->velocities[i]); + target_velocities[i] = msg->velocities[i]; +#ifdef DEBUG_MSGS + RCLCPP_INFO(this->get_logger(), "J%i, %lf", i, msg->velocities[i]); +#endif // DEBUG_MSGS + target_velocities[EE_INDEX] = msg->end_effector * EE_SPEED_SCALE; + // current_velocity[i] = msg->velocities[i]; + } + if (SIMULATE) { + sensor_msgs::msg::JointState joint_states_; + // auto curr_time = this->get_clock()->now(); + joint_states_.header.stamp = rclcpp::Clock().now(); + + joint_states_.velocity.resize(NUM_JOINTS); + joint_states_.position.resize(NUM_JOINTS); + joint_states_.name.resize(NUM_JOINTS); + for (int i = 0; i < NUM_JOINTS; i++) { + joint_states_.name[i] = joint_names[i]; + joint_states_.velocity[i] = firmToMoveitOffsetVel(target_velocities[i], i); + rclcpp::Time current_time(joint_states_.header.stamp); + rclcpp::Time prev_time(prev_joint_states.header.stamp); + joint_states_.position[i] = (prev_joint_states.position[i]) + (joint_states_.velocity[i] * (current_time - prev_time).seconds()); + prev_joint_states.position[i] = joint_states_.position[i]; + joint_states_.position[i] -= axes[i].zero_rad; + } + prev_joint_states.header.stamp = joint_states_.header.stamp; + joint_state_publisher_->publish(joint_states_); + + } else { + + send_velocity_command(target_velocities); //! + } + break; + default: + break; + } +} + +// Member functions + +float ArmSerial::degToRad(float deg) { + float rad = deg * 3.14159 / 180; // 14159265359 + + return (rad); +} + +float ArmSerial::firmToMoveitOffsetPos(float deg, int i) { + + float rad = degToRad(deg); + return ((rad * axes[i].dir) + (axes[i].zero_rad)); +} + +float ArmSerial::firmToMoveitOffsetVel(float deg, int i) { + + float rad = degToRad(deg); + + return ((rad * axes[i].dir)); +} + +void ArmSerial::parseLimitSwitchTest(std::string msg) { + // TODO: can remove, no need for limit switches i think, use pos feedback? + // also can set moteus position limits using + // 2>servopos.position_min + // 2>servopos.position_max + + // int axis = 0; + // int value = 5; + // if (sscanf(msg.c_str(), "Limit Switch %d is %d.", &axis, &value) == 2) { + // if (axis >= 1 && axis <= 6) { + // if (value == 1 || value == 0) { + // this->current_limit_switches[axis - 1]; + // RCLCPP_INFO(this->get_logger(), "limit switches updated"); + // } + // } + // RCLCPP_ERROR(this->get_logger(), "Arm limit switch parsing failed"); + // } +} + +void ArmSerial::parseArmAngleUart(std::string msg) { + // TODO: need to recalculate + // ROS_INFO("Parsing Angle buffer: %s", msg.c_str()); + sensor_msgs::msg::JointState joint_states_; + joint_states_.position.resize(NUM_JOINTS_NO_EE + 2); + joint_states_.velocity.resize(NUM_JOINTS_NO_EE + 2); + joint_states_.name.resize(NUM_JOINTS_NO_EE + 2); + + if (sscanf(msg.c_str(), "$my_angleP(%f, %f, %f, %f, %f, %f, %f)\n", &axes[0].curr_pos, &axes[1].curr_pos, &axes[2].curr_pos, + &axes[3].curr_pos, &axes[4].curr_pos, &axes[5].curr_pos, &axes[6].curr_pos) == NUM_JOINTS) { + // All axes angles are in axes[i].des_angle_pos + // RCLCPP_INFO(this->get_logger(), "Absolute Angle Position Echo Accepted:"); + for (int i = 0; i < NUM_JOINTS_NO_EE; i++) { + current_arm_status.positions[i] = axes[i].curr_pos; + RCLCPP_INFO(this->get_logger(), "eeposse %d, %lf", i, current_arm_status.positions[i]); + joint_states_.name[i] = joint_names[i]; + joint_states_.position[i] = firmToMoveitOffsetPos(axes[i].curr_pos, i); + // joint_states_.velocity[i] = firmToMoveitOffsetVel(current_velocity[i], i); + joint_states_.velocity[i] = firmToMoveitOffsetVel(target_velocities[i], i); // TODO make based off of real velocity from firmware + // joint_states_.position[i] = (prev_joint_states.position[i]) + (joint_states_.velocity[i] * (current_time - prev_time).seconds()); + // prev_joint_states.position[i] = joint_states_.position[i]; + // joint_states_.position[i] -= axes[i].zero_rad; + } + rclcpp::Time current_time(joint_states_.header.stamp); + rclcpp::Time prev_time(prev_joint_states.header.stamp); + joint_states_.name[6] = joint_names[6]; + joint_states_.name[7] = joint_names[7]; + joint_states_.position[6] = firmToMoveitOffsetPos(axes[EE_INDEX].curr_pos, EE_INDEX); + joint_states_.position[7] = firmToMoveitOffsetPos(axes[EE_INDEX].curr_pos * -1, EE_INDEX); + joint_states_.velocity[6] = 0; + joint_states_.velocity[7] = 0; + + joint_states_.header.stamp = rclcpp::Clock().now(); + arm_position_publisher->publish(current_arm_status); + joint_state_publisher_->publish(joint_states_); + // RCLCPP_INFO(this->get_logger(), "Absolute Angle Position Echo Update Successfull"); + // fresh_rx_angle = true; + + } else { + // Error handling: could not parse all 6 angles, or message is messed up. + RCLCPP_ERROR(this->get_logger(), "Absolute Angle Position Echo Rejected, incorrect syntax"); + // fresh_rx_angle = false; + + return; + } +} + +void ArmSerial::sendMsg(std::string outMsg) { + // TODO: prob remove, do send in specific funcs + + // teensy.write(outMsg); + // teensy.flushOutput(); +} + +void ArmSerial::sendHomeCmd(int target_axis) { + // TODO: use moteus pos feedback to get const home position values + + // send home request + // std::string home_msg; + // if (target_axis != HOME_ALL_ID) { + // home_msg = "$h(" + std::to_string(target_axis) + ")\n"; + // } else { + // home_msg = "$h(A)\n"; + // } + // + // sendMsg(home_msg); +} + +void ArmSerial::sendCommCmd(int target_state) { + // send communication request + std::string msg; + if (target_state) { + // msg = "$SCP(1)\n"; + send_angles = true; + } else { + // msg = "$SCP(0)\n"; + send_angles = false; + } + + // sendMsg(msg); +} + +void ArmSerial::serial_rx() { + if (send_angles) { + std::vector command_frames; + + // Accumulate all of our command CAN frames. + for (const auto &pair : controllers) { + command_frames.push_back(pair.second->MakeQuery()); + } + + // Now send them in a single call to Transport::Cycle. + std::vector replies; + transport->BlockingCycle(&command_frames[0], command_frames.size(), &replies); + + // Finally, print out our current query results. + char buf[4096] = {}; + std::string status_line; + + // Note: The original code had a confusing 'status_line += buf;' here + // without 'buf' having been initialized with status data yet. + // I'll keep the logic that builds the servo status line. + + char buf2[4096] = {}; + // We parse these into a map to both sort and de-duplicate them, + // and persist data in the event that any are missing. + for (const auto &frame : replies) { + servo_data[frame.source] = moteus::Query::Parse(frame.data, frame.size); + } + + for (const auto &pair : servo_data) { + const auto r = pair.second; + ::snprintf(buf, sizeof(buf) - 1, "%2d %3d p/v/t=(%7.3f,%7.3f,%7.3f) ", pair.first, static_cast(r.mode), r.position, r.velocity, + r.torque); + // Convert position from revolutions (moteus default) to degrees (360 * rev) + // and store it in the axes structure based on the CAN ID (pair.first). + axes[pair.first - 1].curr_pos = (360 * r.position); + + RCLCPP_INFO(this->get_logger(), "posse %d, %lf", pair.first, axes[pair.first].curr_pos); + status_line += buf; + } + + // --- NEW CODE: Construct the message for parseArmAngleUart --- + // Ensure you have all 7 axes data before calling parseArmAngleUart. + // Assuming axes[0] through axes[6] correspond to the 7 joints. + + char angle_msg_buffer[256]; + int result = ::snprintf(angle_msg_buffer, sizeof(angle_msg_buffer), "$my_angleP(%f, %f, %f, %f, %f, %f, %f)\n", axes[0].curr_pos, + axes[1].curr_pos, axes[2].curr_pos, axes[3].curr_pos, axes[4].curr_pos, axes[5].curr_pos, axes[6].curr_pos); + + // Check if the snprintf succeeded (result > 0 and result < sizeof(angle_msg_buffer)) + if (result > 0 && (size_t)result < sizeof(angle_msg_buffer)) { + // Pass the constructed string to the parsing function + parseArmAngleUart(std::string(angle_msg_buffer)); + } else { + RCLCPP_ERROR(this->get_logger(), "Failed to construct angle message for parsing."); + } + // --- END NEW CODE --- + + // ::printf("%s \r", status_line.c_str()); + // ::fflush(::stdout); + } +} + +void ArmSerial::send_position_command(float pos[NUM_JOINTS]) { + RCLCPP_INFO(this->get_logger(), "Test limits Sent"); + std::vector command_frames; + + // --- START: Update to use 'pos' parameter --- + std::map target_positions; + + // 1. Map the input 'pos' array (index 0 to NUM_JOINTS-1) to CAN IDs. + // Assuming CAN ID starts at 1 and corresponds to the index + 1. + // Also assuming 'pos' contains positions in RADIANS, which need conversion to REVOLUTIONS. + + // Conversion factor: Revolutions = Radians / (2 * pi) + const double RADIANS_TO_REVOLUTIONS = 1.0 / (2.0 * M_PI); // M_PI is typically available in + + for (int i = 0; i < NUM_JOINTS; ++i) { + // Moteus CAN IDs are 1-based, so use (i + 1) as the key. + int can_id = i + 1; + + // Convert input position (assumed Radians) to Moteus's expected Revolutions. + double position_in_rev = (double)pos[i] * RADIANS_TO_REVOLUTIONS; + + // Store the converted target position. + target_positions[can_id] = position_in_rev; + } + // --- END: Update to use 'pos' parameter --- + + // Accumulate all of our command CAN frames. + for (const auto &pair : controllers) { + // pair.first is the CAN ID + // pair.second is the moteus::Controller object + + moteus::PositionMode::Command position_command; + + // Use the converted position from the map. + // We use .at() for safety, assuming every controller CAN ID has a corresponding target_position. + position_command.position = target_positions.at(pair.first); + + // Assuming we want the motor to stop upon reaching the target position. + position_command.velocity = 0.0; + position_command.feedforward_torque = 0.0; + + command_frames.push_back(pair.second->MakePosition(position_command)); + } + + // Now send them in a single call to Transport::Cycle. + std::vector replies; + transport->BlockingCycle(&command_frames[0], command_frames.size(), &replies); + + // Finally, print out our current query results. + char buf[4096] = {}; + std::string status_line; + + status_line += buf; // This line seems vestigial, keeping for context if it's meant to be there. + + // We parse these into a map to both sort and de-duplicate them, + // and persist data in the event that any are missing. + for (const auto &frame : replies) { + servo_data[frame.source] = moteus::Query::Parse(frame.data, frame.size); + } + + // --- Prepare message for parseArmAngleUart --- + char angle_msg_buffer[256]; + + // This part requires all 7 joint positions to be available in the 'axes' array + // after the query/reply cycle has updated 'axes[pair.first].curr_pos = (360 * r.position);' + + for (const auto &pair : servo_data) { + const auto r = pair.second; + ::snprintf(buf, sizeof(buf) - 1, "%2d %3d p/v/t=(%7.3f,%7.3f,%7.3f) ", pair.first, static_cast(r.mode), r.position, r.velocity, + r.torque); + // Position converted from revolutions to DEGREES (360 * rev) and stored for parsing later. + axes[pair.first].curr_pos = (360 * r.position); + + status_line += buf; + } + + // Construct the message for parseArmAngleUart using the *updated* positions (in degrees). + // index 7 is wrong, use EE_INDEX + int result = ::snprintf(angle_msg_buffer, sizeof(angle_msg_buffer), "$my_angleP(%f, %f, %f, %f, %f, %f, %f)\n", axes[1].curr_pos, + axes[2].curr_pos, axes[3].curr_pos, axes[4].curr_pos, axes[5].curr_pos, axes[6].curr_pos, axes[7].curr_pos); + + if (result > 0 && (size_t)result < sizeof(angle_msg_buffer)) { + // Pass the constructed string to the parsing function + parseArmAngleUart(std::string(angle_msg_buffer)); + } else { + // Log an error if snprintf failed or buffer was too small + // (You may need access to the logger for proper ROS2 logging here) + } + // --- End message preparation --- + + // ::printf("%s \r", status_line.c_str()); + // ::fflush(::stdout); +} + +void ArmSerial::send_velocity_command(float vel[NUM_JOINTS]) { + std::vector command_frames; + + std::map target_positions; + + const double RADIANS_TO_REVOLUTIONS = 1.0 / (2.0 * M_PI); // M_PI is typically available in + + // Accumulate all of our command CAN frames. + for (const auto &pair : controllers) { + // pair.first is the CAN ID + // pair.second is the moteus::Controller object + + moteus::PositionMode::Command position_command; + + // Use the converted position from the map. + // We use .at() for safety, assuming every controller CAN ID has a corresponding target_position. + position_command.position = std::numeric_limits::quiet_NaN(); + + // Assuming we want the motor to stop upon reaching the target position. + RCLCPP_INFO(this->get_logger(), "vel %f", static_cast(vel[(pair.first)])); + position_command.velocity = vel[(pair.first - 1)]; + RCLCPP_INFO(this->get_logger(), "vel %f", position_command.velocity); + + command_frames.push_back(pair.second->MakePosition(position_command)); + } + + // Now send them in a single call to Transport::Cycle. + std::vector replies; + transport->BlockingCycle(&command_frames[0], command_frames.size(), &replies); +} + +void ArmSerial::send_test_limits_command() { + char tx_msg[TX_UART_BUFF]; + sprintf(tx_msg, "$t()\n"); + sendMsg(tx_msg); + RCLCPP_INFO(this->get_logger(), "Test limits Sent %s", tx_msg); +} diff --git a/src/aruco_detector/package.xml b/src/aruco_detector/package.xml index 567f1ecc..a27aabb5 100644 --- a/src/aruco_detector/package.xml +++ b/src/aruco_detector/package.xml @@ -17,6 +17,9 @@ + + OpenCV + cv2_aruco ament_lint_auto ament_lint_common diff --git a/src/aruco_detector/src/DetectMarker.cpp b/src/aruco_detector/src/DetectMarker.cpp index 2f85f7fc..4db882ce 100755 --- a/src/aruco_detector/src/DetectMarker.cpp +++ b/src/aruco_detector/src/DetectMarker.cpp @@ -86,5 +86,4 @@ int main(int argc, char** argv) { rclcpp::spin(node); rclcpp::shutdown(); return 0; - } diff --git a/src/localization_dev/localization_dev/__init__.py b/src/localization_dev/localization_dev/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/localization_dev/localization_dev/mahony_filter.py b/src/localization_dev/localization_dev/mahony_filter.py new file mode 100644 index 00000000..3e99de1b --- /dev/null +++ b/src/localization_dev/localization_dev/mahony_filter.py @@ -0,0 +1,210 @@ +import rclpy +from rclpy.node import Node +from sensor_msgs.msg import Imu +from geometry_msgs.msg import Quaternion +import numpy as np + + +def quat_mul(q, r): + """Quaternion multiplication q x r""" + w1, x1, y1, z1 = q + w2, x2, y2, z2 = r + return np.array([ + w1*w2 - x1*x2 - y1*y2 - z1*z2, + w1*x2 + x1*w2 + y1*z2 - z1*y2, + w1*y2 - x1*z2 + y1*w2 + z1*x2, + w1*z2 + x1*y2 - y1*x2 + z1*w2 + ]) + + +def quat_rotate(q, v): + """Rotate vector v by quaternion q.""" + q_conj = np.array([q[0], -q[1], -q[2], -q[3]]) + v_quat = np.array([0.0, v[0], v[1], v[2]]) + return quat_mul(quat_mul(q, v_quat), q_conj)[1:] + + +class MahonyFilter(Node): + def __init__(self): + """ + Tunable params: + kP : Proportional gain + kI : Integral gain + + Need to specify intial attitude estimates, biases and sampling time + + Let Initial attitude be zero for at rest, + Let biases be computed by taking average samples of IMU at rest + then by taking the mean value + The sampling time is the inverse of the IMU publishing rate + + NOTE: Bias drifts over time + + ros2 launch phidgets_spatial spatial-launch.py --> IMU publisher + """ + super().__init__('mahony_filter') + + self.q = np.array([1.0, 0.0, 0.0, 0.0]) # Initial quaternion + self.bg = np.array([0.0, 0.0, 0.0]) # Inital gyro bias + self.kP = 1.0 + self.kI = 0.01 + self.integral_error = np.zeros(3) + + self.last_time = None + + self.imu_sub = self.create_subscription( + Imu, + '/imu/data_raw', + self.imu_callback, + 50 + ) + + # Needs to be implemented later + # self.heading_sub = self.create_subscription( + + self.pub = self.create_publisher( + Quaternion, + '/imu/quaternion', + 50 + ) + + def imu_callback(self, msg): + """ + Gryo Model: + w = w_hat _+ b_g + n_g + w : measured angular velocity + w_hat: true angular velocity + bg(t) : gyro bias -> bg_dot = bg(t) ~ N(0, Qg) + Qg : covariance mtx gyro bias noise + + Accel Model: + a = R.T(a_hat - g) + b_a + n_a + + a : measured acceleration + a_hat: true acceleration + g : gravity vector + R : orientation of the sensor wrt the world frame + b_a(t): accel bias -> b_a_dot = b_a(t) ~ N(0, Qa) + Qa : covariance mtx accel bias noise + + Orientation of the system must be known from an outside source + + https://nitinjsanket.github.io/tutorials/attitudeest/mahony.html + + --------------------------------------------------------------------------- + + Defining a coord axis: + + Let I, W, B be the inertial, world, and body frames respectively. + + The goal is to estimate (I->W)q -> q is a quaternion + + https://ethz.ch/content/dam/ethz/special-interest/mavt/robotics-n-intelligent-systems/asl-dam/documents/lectures/robot_dynamics/RD2_Quaternions.pdf + + + 1) Obtain sensor measurements + I^omega_t, I^a_t be gyro, accel measurements resp. and I^ahat_t be normed + + 2) Orientation error using accel measurments + + Compute orientation error from previous estimate using accel measurements + + v (W->I qhat_est,t) = [2*(q2q4 - q1q3), + 2*(q1q2 + q3q4), + (q1^2 - q2^2 - q3^2 + q4^2)] + + e_t+1 = I^ahat_t+1 x v(W->I qhat_est,t) # for P + ei_t+1 = ei_t + e_t+1 * dt # for I + + dt is the time elapsed between samples + + 3) Update gyro using PI compensations (Fusion) + + I^omega_t+1 = I^omega_t+1 + kP*e_t+1 + kI*ei_t+1 + + 4) Orientation increment from Gyro + + (I->W qdot_omega,t+1) = 1/2 * (W->I qhat_est,t) quat_mult [0, I^omega_t+1].T + + 5) Numerical Integration + + Compute orientation using numerical integration + + (I->W q_est,t+1) = (I->W qhat_est,t) + (I->W qdot_omega,t+1) * dt + + For each time step, repeat from step 1 + --------------------------------------------------------------------------- + + """ + + # --- Compute dt --- + if self.last_time is None: + self.last_time = msg.header.stamp + return + + t = msg.header.stamp + t_now = t.sec + t.nanosec*1e-9 + t_prev = self.last_time.sec + self.last_time.nanosec*1e-9 + dt = t_now - t_prev + self.last_time = t + if dt <= 0: + return + if dt > 0.5: + return # Ignore large time gaps + + # --- Read IMU --- + # rad/s + gyro = np.array([msg.angular_velocity.x, + msg.angular_velocity.y, + msg.angular_velocity.z]) - self.bg + + # m/s^2 + accel = np.array([msg.linear_acceleration.x, + msg.linear_acceleration.y, + msg.linear_acceleration.z]) + + if np.linalg.norm(accel) == 0: + return + + accel = accel / np.linalg.norm(accel) # Only ahat is used + + # --- Compute orientation error --- + v = quat_rotate(self.q, np.array([0.0, 0.0, -1.0])) # Gravity vector in body frame + + e = np.cross(v, accel) + self.integral_error += e * dt + + # --- Update gyro measurements --- + gyro_corrected = gyro + self.kP * e + self.kI * self.integral_error + + # --- Orientation increment from gyro --- + qdot_omega = 0.5 * quat_mul( + self.q, + np.array([0.0, gyro_corrected[0], gyro_corrected[1], gyro_corrected[2]]) # .T 1-d Array + ) + + # --- Numerical integration --- + self.q = self.q + qdot_omega * dt # q_t+1 + self.q = self.q / np.linalg.norm(self.q) # Normalize + # Non-unit quaternions lead to errors + # --- Publish quaternion --- + quat_msg = Quaternion() + quat_msg.w = self.q[0] + quat_msg.x = self.q[1] + quat_msg.y = self.q[2] + quat_msg.z = self.q[3] + self.pub.publish(quat_msg) + + +def main(args=None): + rclpy.init(args=args) + node = MahonyFilter() + try: + rclpy.spin(node) + except KeyboardInterrupt: + pass + rclpy.shutdown() + + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/src/localization_dev/package.xml b/src/localization_dev/package.xml new file mode 100644 index 00000000..b0b645fc --- /dev/null +++ b/src/localization_dev/package.xml @@ -0,0 +1,25 @@ + + + + localization_dev + 0.0.0 + TODO: Package description + kingcammy + TODO: License declaration + + rclpy + sensor_msgs + geometry_msgs + std_msgs + + + + ament_copyright + ament_flake8 + ament_pep257 + python3-pytest + + + ament_python + + diff --git a/src/localization_dev/resource/localization_dev b/src/localization_dev/resource/localization_dev new file mode 100644 index 00000000..e69de29b diff --git a/src/localization_dev/setup.cfg b/src/localization_dev/setup.cfg new file mode 100644 index 00000000..b4eb1e70 --- /dev/null +++ b/src/localization_dev/setup.cfg @@ -0,0 +1,4 @@ +[develop] +script_dir=$base/lib/localization_dev +[install] +install_scripts=$base/lib/localization_dev diff --git a/src/localization_dev/setup.py b/src/localization_dev/setup.py new file mode 100644 index 00000000..de3ca95f --- /dev/null +++ b/src/localization_dev/setup.py @@ -0,0 +1,30 @@ +from setuptools import find_packages, setup + +package_name = 'localization_dev' + +setup( + name=package_name, + version='0.0.0', + packages=find_packages(exclude=['test']), + data_files=[ + ('share/ament_index/resource_index/packages', + ['resource/' + package_name]), + ('share/' + package_name, ['package.xml']), + ], + install_requires=['setuptools', 'numpy'], + zip_safe=True, + maintainer='kingcammy', + maintainer_email='cameronbasara@gmail.com', + description='TODO: Package description', + license='TODO: License declaration', + extras_require={ + 'test': [ + 'pytest', + ], + }, + entry_points={ + 'console_scripts': [ + 'mahony_filter = localization_dev.mahony_filter:main' + ], + }, +) diff --git a/src/localization_dev/test/test_copyright.py b/src/localization_dev/test/test_copyright.py new file mode 100644 index 00000000..97a39196 --- /dev/null +++ b/src/localization_dev/test/test_copyright.py @@ -0,0 +1,25 @@ +# Copyright 2015 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ament_copyright.main import main +import pytest + + +# Remove the `skip` decorator once the source file(s) have a copyright header +@pytest.mark.skip(reason='No copyright header has been placed in the generated source file.') +@pytest.mark.copyright +@pytest.mark.linter +def test_copyright(): + rc = main(argv=['.', 'test']) + assert rc == 0, 'Found errors' diff --git a/src/localization_dev/test/test_flake8.py b/src/localization_dev/test/test_flake8.py new file mode 100644 index 00000000..27ee1078 --- /dev/null +++ b/src/localization_dev/test/test_flake8.py @@ -0,0 +1,25 @@ +# Copyright 2017 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ament_flake8.main import main_with_errors +import pytest + + +@pytest.mark.flake8 +@pytest.mark.linter +def test_flake8(): + rc, errors = main_with_errors(argv=[]) + assert rc == 0, \ + 'Found %d code style errors / warnings:\n' % len(errors) + \ + '\n'.join(errors) diff --git a/src/localization_dev/test/test_pep257.py b/src/localization_dev/test/test_pep257.py new file mode 100644 index 00000000..b234a384 --- /dev/null +++ b/src/localization_dev/test/test_pep257.py @@ -0,0 +1,23 @@ +# Copyright 2015 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ament_pep257.main import main +import pytest + + +@pytest.mark.linter +@pytest.mark.pep257 +def test_pep257(): + rc = main(argv=['.', 'test']) + assert rc == 0, 'Found code style errors / warnings' diff --git a/src/object_detection/object_detection/yolo_detector_node.py b/src/object_detection/object_detection/yolo_detector_node.py index 63782e11..be0def87 100755 --- a/src/object_detection/object_detection/yolo_detector_node.py +++ b/src/object_detection/object_detection/yolo_detector_node.py @@ -1,3 +1,23 @@ +#!/usr/bin/env python3 +# This line should be added at the beginning of any ROS node. It specifies the location of the Python interpreter +# The 'env' command helps ensure that the correct interpreter is used based on your system’s environment +# This concept is useful beyond ROS, as it deals with specifying the interpreter path in scripts +# For more details, you can check this link: https://stackoverflow.com/questions/7670303/purpose-of-usr-bin-python3-shebang +# If you're already familiar with this and just forgot to include it, feel free to ignore this comment up to yo + +""" + Run yolo on a detected camera feed + + How to use: + first run: 'ros2 run cameras cameras_node' + then run: 'ros2 run object_detection yolo_detector_node' + now to display the results: 'ros2 run rqt_image_view rqt_image_view' + + do this seperately in 3 terminals. TODO: setup a launch file so this is easier lol + + Make sure to build and source before running +""" + import rclpy from rclpy.node import Node from sensor_msgs.msg import Image @@ -5,23 +25,31 @@ import cv2 from ultralytics import YOLO - class YOLODetectorNode(Node): def __init__(self): super().__init__('yolo_detector_node') - self.publisher_ = self.create_publisher(Image, 'detected_frames', 10) + self.publisher_ = self.create_publisher(Image, 'camera/yolo/object_detection', 30) # Changed the topic name for some more clarity + self.subscription = self.create_subscription( + Image, + '/camera/color/image_raw', # This can be changed to any camera topic we want to use (realsense raw rn), for comp this will be the ptz topic + self.image_callback, + 10) self.bridge = CvBridge() # TODO Change this to whatever YOLO model we decide to use - self.yolo = YOLO('yolov8n.pt') + # For this I want to do some unit testing to use it for the KRIA on fpga, not sure if yolov8 will be able to be used for this + self.yolo = YOLO('bestHammer.pt') # right now this is fine - # TODO Not sure if this is the right camera for video capture for the rover - self.videoCap = cv2.VideoCapture(0) + ## We do this via ros since we have the topics we can use a subscriber for this + # # TODO Not sure if this is the right camera for video capture for the rover + # self.videoCap = cv2.VideoCapture(0) # Timer to process frames - self.timer = self.create_timer(0.1, self.process_frame) + # self.timer = self.create_timer(0.1, self.process_frame) + def get_colours(self, cls_num): + # You'll have to explain to me whats going on here lol base_colors = [(255, 0, 0), (0, 255, 0), (0, 0, 255)] color_index = cls_num % len(base_colors) increments = [(1, -2, 1), (-2, 1, -1), (1, -1, 2)] @@ -31,14 +59,23 @@ def get_colours(self, cls_num): ] return tuple(color) - def process_frame(self): - ret, frame = self.videoCap.read() - if not ret: - self.get_logger().warning("Failed to capture frame.") + # For now lets try it this way + # Instead of using a timer to process frames periodically, the node now processes frames as they arrive via the callback + # process frame here the same way, but more reactively + # Callback meth is called when each msg is received, so each video frame + def image_callback(self, msg): + # Convert ROS Image message to OpenCV image + try: + frame = self.bridge.imgmsg_to_cv2(msg, desired_encoding='bgr8') + except Exception as e: + self.get_logger().error(f'Error converting image: {e}') return + # Process frame with YOLO results = self.yolo.track(frame, stream=True) - + + # Draw detections on frame + annotated_frame = frame.copy() for result in results: classes_names = result.names for box in result.boxes: @@ -48,14 +85,17 @@ def process_frame(self): cls = int(box.cls[0]) class_name = classes_names[cls] colour = self.get_colours(cls) - cv2.rectangle(frame, (x1, y1), (x2, y2), colour, 2) - cv2.putText(frame, f'{class_name} {box.conf[0]:.2f}', - (x1, y1), cv2.FONT_HERSHEY_SIMPLEX, 1, colour, 2) + if class_name == "hammer": + cv2.rectangle(annotated_frame, (x1, y1), (x2, y2), colour, 2) + cv2.putText(annotated_frame, f'{class_name} {box.conf[0]:.2f}', + (x1, y1), cv2.FONT_HERSHEY_SIMPLEX, 1, colour, 2) + # Convert frame to ROS2 Image message and publish - msg = self.bridge.cv2_to_imgmsg(frame, encoding='bgr8') - self.publisher_.publish(msg) - self.get_logger().info("Published frame with detections.") + detected_msg = self.bridge.cv2_to_imgmsg(annotated_frame, encoding='bgr8') + detected_msg.header = msg.header # Preserve original timestamp and frame_id, just allows us to communicate timestamped data if needed + self.publisher_.publish(detected_msg) + self.get_logger().debug("Published frame with detections") def destroy_node(self): # Release video capture when node is destroyed diff --git a/src/object_detection/package.xml b/src/object_detection/package.xml index f09b6609..12c564b9 100644 --- a/src/object_detection/package.xml +++ b/src/object_detection/package.xml @@ -7,14 +7,15 @@ ubuntu TODO: License declaration + rclpy sensor_msgs cv_bridge - ament_copyright - ament_flake8 - ament_pep257 - python3-pytest + + ament_python diff --git a/src/rover_vision/rover_vision/full_cameras_node.py b/src/rover_vision/rover_vision/full_cameras_node.py new file mode 100755 index 00000000..6f6afdd2 --- /dev/null +++ b/src/rover_vision/rover_vision/full_cameras_node.py @@ -0,0 +1,210 @@ +#!/usr/bin/env python3 +import rclpy +from rclpy.node import Node +from sensor_msgs.msg import Image, Imu +import cv2 +from cv_bridge import CvBridge +import pyrealsense2 as rs +import numpy as np +from typing import List, Dict, Tuple, Optional + +class CameraPublisherFull(Node): + def __init__(self): + super().__init__('camera_publisher_full') + self.bridge = CvBridge() + + # Initialize RealSense if present + self.rs_pipeline = None + self.rs_config = None + self.rs_publishers = {} + self.imu_publisher = self.create_publisher(Imu, '/camera/imu/data', 30) + self.has_realsense = self.setup_realsense() + + # Initialize standard cameras (regardless of RealSense detection) + self.std_publishers = [] + self.std_caps = [] + self.std_dev_paths = [] + + # Auto-detect available standard camera devices + device_paths = self.detect_available_cameras() + for i, dev_path in enumerate(device_paths): + pub = self.create_publisher(Image, f'cam_{i}', 30) + cap = cv2.VideoCapture(dev_path) + if not cap.isOpened(): + self.get_logger().error(f'Could not open video device at: {dev_path}') + continue + self.std_publishers.append(pub) + self.std_caps.append(cap) + self.std_dev_paths.append(dev_path) + + # Check if we have any camera at all + if not self.has_realsense and len(self.std_caps) == 0: + self.get_logger().error('No active video devices found.') + exit(1) + + # Setup timer callback + self.timer_period = 0.033 # ~30 FPS + self.timer = self.create_timer(self.timer_period, self.timer_callback) + + def setup_realsense(self) -> bool: + """Initialize RealSense camera if available and IMU stream""" + try: + # Create pipeline and config + self.rs_pipeline = rs.pipeline() + self.rs_config = rs.config() + + # Try to find RealSense devices + ctx = rs.context() + devices = ctx.query_devices() + if len(devices) == 0: + self.get_logger().info('No RealSense devices detected') + return False + + # Setup streams for the first RealSense device + self.get_logger().info(f'Found RealSense device: {devices[0].get_info(rs.camera_info.name)}') + + # Enable video streams + self.rs_config.enable_stream(rs.stream.color, 640, 480, rs.format.bgr8, 30) + self.rs_config.enable_stream(rs.stream.infrared, 1, 640, 480, rs.format.y8, 30) + self.rs_config.enable_stream(rs.stream.infrared, 2, 640, 480, rs.format.y8, 30) + self.rs_config.enable_stream(rs.stream.depth, 640, 480, rs.format.z16, 30) + + # Enable IMU stream + self.rs_config.enable_stream(rs.stream.accel, rs.format.motion_xyz32f, 30) + self.rs_config.enable_stream(rs.stream.gyro, rs.format.motion_xyz32f, 30) + + # Start streaming + self.rs_pipeline.start(self.rs_config) + + # Create publishers for each stream + self.rs_publishers['color'] = self.create_publisher(Image, '/camera/color/image_raw', 30) + self.rs_publishers['infra1'] = self.create_publisher(Image, '/camera/infra1/image_rect_raw', 30) + self.rs_publishers['infra2'] = self.create_publisher(Image, '/camera/infra2/image_rect_raw', 30) + self.rs_publishers['depth'] = self.create_publisher(Image, '/camera/depth/image_rect_raw', 30) + + self.get_logger().info('RealSense camera initialized successfully') + return True + + except Exception as e: + self.get_logger().warn(f'Failed to initialize RealSense: {e}') + return False + + def detect_available_cameras(self) -> List[str]: + """Detect standard USB cameras, excluding RealSense devices""" + available_cameras = [] + max_tested = 10 + + for i in range(max_tested): + cap = cv2.VideoCapture(f'/dev/video{i}') + if cap.isOpened(): + # Check if this is not a RealSense camera by looking at properties + name = cap.getBackendName() + if 'realsense' not in name.lower(): + available_cameras.append(f'/dev/video{i}') + else: + self.get_logger().info(f'Skipping RealSense camera at /dev/video{i}') + cap.release() + + self.get_logger().info(f'Detected standard camera devices: {available_cameras}') + return available_cameras + + def publish_realsense_frame(self, frame, frame_type): + if frame_type in self.rs_publishers: + if frame_type in ['color', 'infra1', 'infra2', 'depth']: + # Process image based on type + if frame_type == 'color': + # Convert BGR to RGB + img = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) + msg = self.bridge.cv2_to_imgmsg(img, encoding="rgb8") + elif frame_type in ['infra1', 'infra2']: + # Infrared is already grayscale (y8) + msg = self.bridge.cv2_to_imgmsg(frame, encoding="mono8") + else: # depth + # Depth is 16-bit + msg = self.bridge.cv2_to_imgmsg(frame, encoding="16UC1") + + self.rs_publishers[frame_type].publish(msg) + + def publish_imu_data(self, accel_data, gyro_data): + imu_msg = Imu() + imu_msg.header.stamp = self.get_clock().now().to_msg() + + # Fill in IMU data for linear acceleration + imu_msg.linear_acceleration.x = accel_data[0] + imu_msg.linear_acceleration.y = accel_data[1] + imu_msg.linear_acceleration.z = accel_data[2] + + # Fill in IMU data for angular velocity + imu_msg.angular_velocity.x = gyro_data[0] + imu_msg.angular_velocity.y = gyro_data[1] + imu_msg.angular_velocity.z = gyro_data[2] + + self.imu_publisher.publish(imu_msg) + + def timer_callback(self): + # Process RealSense frames if available + if self.has_realsense: + try: + frames = self.rs_pipeline.wait_for_frames() + + # Process camera frames + color_frame = frames.get_color_frame() + if color_frame: + color_image = np.asanyarray(color_frame.get_data()) + self.publish_realsense_frame(color_image, 'color') + + depth_frame = frames.get_depth_frame() + if depth_frame: + depth_image = np.asanyarray(depth_frame.get_data()) + self.publish_realsense_frame(depth_image, 'depth') + + # Process infrared frames + for i in [1, 2]: + ir_frame = frames.get_infrared_frame(i) + if ir_frame: + ir_image = np.asanyarray(ir_frame.get_data()) + self.publish_realsense_frame(ir_image, f'infra{i}') + + # Process IMU frames + accel_frame = frames.first(rs.stream.accel) + gyro_frame = frames.first(rs.stream.gyro) + if accel_frame and gyro_frame: + accel_data = accel_frame.as_motion_frame().get_motion_data() + gyro_data = gyro_frame.as_motion_frame().get_motion_data() + self.publish_imu_data(accel_data, gyro_data) + + except Exception as e: + self.get_logger().error(f'Error processing RealSense frames: {e}') + + # Process standard camera frames + for i in range(len(self.std_caps)): + cap = self.std_caps[i] + dev_path = self.std_dev_paths[i] + ret, frame = cap.read() + if ret: + frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) + msg = self.bridge.cv2_to_imgmsg(frame, encoding="rgb8") + self.std_publishers[i].publish(msg) + else: + pass + +def main(args=None): + # Initialize node + rclpy.init(args=args) + node = CameraPublisherFull() + try: + # Spin + rclpy.spin(node) + except KeyboardInterrupt: + pass + finally: + # Shutdown + if node.rs_pipeline: + node.rs_pipeline.stop() + for cap in node.std_caps: + cap.release() + node.destroy_node() + rclpy.shutdown() + +if __name__ == '__main__': + main() diff --git a/src/rover_vision/rover_vision/imu_node.py b/src/rover_vision/rover_vision/imu_node.py new file mode 100755 index 00000000..4e145e7f --- /dev/null +++ b/src/rover_vision/rover_vision/imu_node.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python3 +import rclpy +from rclpy.node import Node +from sensor_msgs.msg import Imu +import pyrealsense2 as rs +import numpy as np + +class IMUPublisher(Node): + def __init__(self): + super().__init__('full_cameras_node') + self.imu_pub = self.create_publisher(Imu, '/camera/imu/data', 10) + self.rs_pipeline = None + self.has_realsense = self.initialize_camera() + + if self.has_realsense: + self.get_logger().info('Starting IMU data processing') + # Increase timer period to reduce load (50Hz instead of 100Hz) + self.timer = self.create_timer(0.02, self.timer_callback) + + def initialize_camera(self): + try: + self.rs_pipeline = rs.pipeline() + self.rs_config = rs.config() + + # Try to find RealSense devices + ctx = rs.context() + devices = ctx.query_devices() + if len(devices) == 0: + self.get_logger().info('No RealSense devices detected') + return False + + self.get_logger().info(f"Found RealSense device: {devices[0].get_info(rs.camera_info.name)}") + + # Set specific IMU fps + self.rs_config.enable_stream(rs.stream.accel) + self.rs_config.enable_stream(rs.stream.gyro) + + # Start pipeline with configuration + self.rs_pipeline.start(self.rs_config) + self.get_logger().info('RealSense camera initialized successfully') + return True + + except Exception as e: + self.get_logger().error(f'Failed to initialize RealSense: {e}') + return False + + @staticmethod + def gyro_data(gyro): + return np.asarray([gyro.x, gyro.y, gyro.z]) + + @staticmethod + def accel_data(accel): + return np.asarray([accel.x, accel.y, accel.z]) + + def timer_callback(self): + if self.has_realsense: + try: + # Add a short timeout for frame waiting (5ms) + frames = self.rs_pipeline.wait_for_frames(5000) + + # Get motion frame set + accel = frames.first_or_default(rs.stream.accel) + gyro = frames.first_or_default(rs.stream.gyro) + + if accel and gyro: + # Extract motion data + accel_data = self.accel_data(accel.as_motion_frame().get_motion_data()) + gyro_data = self.gyro_data(gyro.as_motion_frame().get_motion_data()) + + # Create and populate IMU message + imu_msg = Imu() + imu_msg.header.stamp = self.get_clock().now().to_msg() + imu_msg.header.frame_id = "camera_imu_frame" + + # Set angular velocity (gyro) + imu_msg.angular_velocity.x = gyro_data[0] + imu_msg.angular_velocity.y = gyro_data[1] + imu_msg.angular_velocity.z = gyro_data[2] + + # Set linear acceleration (accelerometer) + imu_msg.linear_acceleration.x = accel_data[0] + imu_msg.linear_acceleration.y = accel_data[1] + imu_msg.linear_acceleration.z = accel_data[2] + + # Publish the message + self.imu_pub.publish(imu_msg) + + except Exception as e: + self.get_logger().error(f'Error processing frames: {e}') + +def main(args=None): + rclpy.init(args=args) + node = IMUPublisher() + + try: + rclpy.spin(node) + except KeyboardInterrupt: + pass + finally: + if node.rs_pipeline: + node.rs_pipeline.stop() + node.destroy_node() + rclpy.shutdown() + +if __name__ == '__main__': + main() \ No newline at end of file