diff --git a/.gitignore b/.gitignore
index 2c9a0b5e..b76c2118 100644
--- a/.gitignore
+++ b/.gitignore
@@ -11,4 +11,6 @@
.prev-vscode
**.pt
**.pth
-**.gz
\ No newline at end of file
+**.gz
+hri/packages/nlp/assets/**
+!hri/packages/nlp/assets/download-model.bash
\ No newline at end of file
diff --git a/.gitmodules b/.gitmodules
index 706a6bf5..1ba74950 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -10,4 +10,9 @@
branch = humble
[submodule "manipulation/packages/pymoveit2"]
path = manipulation/packages/pymoveit2
- url = https://github.com/AndrejOrsula/pymoveit2
\ No newline at end of file
+ url = https://github.com/AndrejOrsula/pymoveit2
+
+[submodule "navigation/packages/PlayStation-JoyInterface-ROS2"]
+ path = navigation/packages/PlayStation-JoyInterface-ROS2
+ url = https://github.com/HarvestX/PlayStation-JoyInterface-ROS2.git
+ branch = humble
diff --git a/docker/hri/Dockerfile.stt b/docker/hri/Dockerfile.stt
index fb571564..0d8dab72 100644
--- a/docker/hri/Dockerfile.stt
+++ b/docker/hri/Dockerfile.stt
@@ -6,11 +6,7 @@ COPY ../../hri/packages/speech/scripts/stt/requirements.txt .
# Install dependencies
RUN apt-get update && apt-get install -y \
- libsndfile1 \
- ffmpeg \
portaudio19-dev \
&& rm -rf /var/lib/apt/lists/*
-RUN pip install --no-cache-dir -r requirements.txt
-
-RUN apt-get update && apt-get install -y ffmpeg
\ No newline at end of file
+RUN pip install --no-cache-dir -r requirements.txt
\ No newline at end of file
diff --git a/docker/hri/Dockerfile.stt-l4t b/docker/hri/Dockerfile.stt-l4t
new file mode 100644
index 00000000..ccfa937f
--- /dev/null
+++ b/docker/hri/Dockerfile.stt-l4t
@@ -0,0 +1,40 @@
+FROM dustynv/l4t-pytorch:r36.4.0
+
+# Install dependencies
+RUN apt-get update && apt-get install -y \
+libsndfile1 \
+libxml2-dev \
+portaudio19-dev \
+git \
+cmake \
+g++ \
+&& rm -rf /var/lib/apt/lists/*
+
+# Install CTranslate2 from source
+WORKDIR /tmp
+RUN git clone --recursive https://github.com/OpenNMT/CTranslate2.git
+WORKDIR /tmp/CTranslate2
+RUN mkdir build
+WORKDIR /tmp/CTranslate2/build
+RUN cmake .. -DOPENMP_RUNTIME=NONE -DWITH_CUDA=ON -DWITH_MKL=OFF -DWITH_CUDNN=ON
+RUN make -j4
+RUN make install
+RUN ldconfig
+WORKDIR /tmp/CTranslate2/python
+RUN pip install -r install_requirements.txt
+RUN python3 setup.py bdist_wheel
+RUN pip install dist/*.whl
+
+# Install PyAV from source
+WORKDIR /tmp
+RUN git clone https://github.com/PyAV-Org/PyAV.git
+WORKDIR /tmp/PyAV
+RUN pip install virtualenv
+RUN bash -c "source ./scripts/activate.sh && ./scripts/build-deps && make && deactivate && pip install ."
+
+# Install additional Python dependencies
+WORKDIR /app
+COPY ../../hri/packages/speech/scripts/stt/requirements.txt .
+RUN python3 -m pip install --upgrade pip
+RUN pip install --no-cache-dir -r /app/requirements.txt
+RUN pip install --upgrade protobuf
\ No newline at end of file
diff --git a/docker/hri/docker-compose.yml b/docker/hri/docker-compose.yml
index 35b5a1c6..3847b79d 100644
--- a/docker/hri/docker-compose.yml
+++ b/docker/hri/docker-compose.yml
@@ -1,7 +1,8 @@
include:
# Jetson Orin
- path: l4t-devices.yaml
- - path: stt.yaml
+ - path: stt-l4t.yaml
+ - path: ollama-jetson.yaml
# - path: chromadb.yaml
# Laptop
diff --git a/docker/hri/ollama-jetson.yaml b/docker/hri/ollama-jetson.yaml
new file mode 100644
index 00000000..997af525
--- /dev/null
+++ b/docker/hri/ollama-jetson.yaml
@@ -0,0 +1,13 @@
+services:
+ ollama:
+ container_name: home2-hri-ollama
+ image: dustynv/ollama:r36.4.0
+ runtime: nvidia
+ network_mode: host
+ volumes:
+ - ../../hri/packages/nlp/assets:/ollama
+ environment:
+ - OLLAMA_MODELS=/ollama
+ stdin_open: true
+ tty: true
+ restart: unless-stopped
diff --git a/docker/hri/ollama.yaml b/docker/hri/ollama.yaml
new file mode 100644
index 00000000..66d40447
--- /dev/null
+++ b/docker/hri/ollama.yaml
@@ -0,0 +1,13 @@
+services:
+ ollama:
+ container_name: home2-hri-ollama
+ image: ollama/ollama
+ runtime: nvidia
+ network_mode: host
+ volumes:
+ - ../../hri/packages/nlp/assets:/ollama
+ environment:
+ - OLLAMA_MODELS=/ollama
+ stdin_open: true
+ tty: true
+ restart: unless-stopped
diff --git a/docker/hri/stt-l4t.yaml b/docker/hri/stt-l4t.yaml
new file mode 100644
index 00000000..b0768f0d
--- /dev/null
+++ b/docker/hri/stt-l4t.yaml
@@ -0,0 +1,15 @@
+services:
+ stt-l4t:
+ container_name: home2-hri-stt-l4t
+ image: roborregos/home2:hri-stt-l4t
+ runtime: nvidia
+ build:
+ context: ../..
+ dockerfile: docker/hri/Dockerfile.stt-l4t
+ ports:
+ - 50051:50051
+ volumes:
+ - ../../hri/packages/speech/scripts/stt/:/app
+ - ../../hri/packages/speech/speech/:/app/speech
+ command: >
+ bash -c "source /tmp/PyAV/scripts/activate.sh && deactivate && python3 -u faster-whisper-l4t.py --port 50051 --model_size base.en"
diff --git a/docker/hri/stt.yaml b/docker/hri/stt.yaml
index 3d3b3211..83ced35d 100644
--- a/docker/hri/stt.yaml
+++ b/docker/hri/stt.yaml
@@ -10,5 +10,4 @@ services:
volumes:
- ../../hri/packages/speech/scripts/stt/:/app
- ../../hri/packages/speech/speech/:/app/speech
-
- command: ["bash", "-c", "python3 -u Faster-whisper.py --port 50051 --model_size base.en"]
+ command: ["bash", "-c", "python3 -u faster-whisper.py --port 50051 --model_size base.en"]
diff --git a/docker/navigation/Dockerfile.cpu b/docker/navigation/Dockerfile.cpu
new file mode 100644
index 00000000..4b5f52cd
--- /dev/null
+++ b/docker/navigation/Dockerfile.cpu
@@ -0,0 +1,2 @@
+ARG BASE_IMAGE=ubuntu:22.04
+FROM ${BASE_IMAGE}
diff --git a/docker/navigation/Dockerfile.cuda b/docker/navigation/Dockerfile.cuda
new file mode 100644
index 00000000..1bb48c88
--- /dev/null
+++ b/docker/navigation/Dockerfile.cuda
@@ -0,0 +1,3 @@
+ARG BASE_IMAGE=nvidia/cuda:11.4.1-cudnn8-devel-ubuntu20.04
+FROM ${BASE_IMAGE}
+
diff --git a/docker/navigation/Dockerfile.jetson b/docker/navigation/Dockerfile.jetson
new file mode 100644
index 00000000..55838e51
--- /dev/null
+++ b/docker/navigation/Dockerfile.jetson
@@ -0,0 +1,7 @@
+ARG BASE_IMAGE=dustynv/l4t-pytorch:r36.4.0
+FROM ${BASE_IMAGE}
+
+USER ros
+RUN sudo usermod -a -G video ros
+RUN newgrp video
+WORKDIR /workspace
diff --git a/docker/navigation/Dockerfile.l4t b/docker/navigation/Dockerfile.l4t-32
similarity index 100%
rename from docker/navigation/Dockerfile.l4t
rename to docker/navigation/Dockerfile.l4t-32
diff --git a/docker/navigation/docker-compose.yaml b/docker/navigation/docker-compose.yaml
new file mode 100644
index 00000000..77f16247
--- /dev/null
+++ b/docker/navigation/docker-compose.yaml
@@ -0,0 +1,29 @@
+services:
+ navigation:
+ container_name: home2-navigation
+ build:
+ context: ../..
+ dockerfile: ${DOCKERFILE:-docker/navigation/Dockerfile.cpu}
+ args:
+ BASE_IMAGE: ${BASE_IMAGE:-roborregos/home2:cpu_base}
+ image: ${IMAGE_NAME:-roborregos/home2:vision-cpu}
+ volumes:
+ - ../../:/workspace
+ - /dev:/dev
+ - /dev/video0:/dev/video0
+ - /tmp/.X11-unix:/tmp/.X11-unix
+
+ network_mode: host
+ user: 1000:1000
+ privileged: true
+
+ environment:
+ DISPLAY: ${DISPLAY}
+ TERM: xterm-256color
+ QT_X11_NO_MITSHM: 1
+ QT_DEBUG_PLUGINS: 1
+
+ tty: true
+ stdin_open: true
+ working_dir: /workspace
+ command: ["/bin/bash", "-i", "-l"]
\ No newline at end of file
diff --git a/docker/navigation/run.sh b/docker/navigation/run.sh
new file mode 100755
index 00000000..d7307a42
--- /dev/null
+++ b/docker/navigation/run.sh
@@ -0,0 +1,122 @@
+#!/bin/bash
+
+
+#_________________________BUILD_________________________
+
+# Image names
+CPU_IMAGE="roborregos/home2:cpu_base"
+CUDA_IMAGE="roborregos/home2:gpu_base"
+
+# Function to check if an image exists
+check_image_exists() {
+ local image_name=$1
+ if ! docker images --format "{{.Repository}}:{{.Tag}}" | grep -q "^${image_name}$"; then
+ echo "Image $image_name does not exist. Building it..."
+ return 1 # Image doesn't exist
+ else
+ echo "Image $image_name already exists. Skipping build."
+ return 0 # Image exists
+ fi
+}
+
+# Check type of environment (CPU, GPU, or Jetson), default CPU
+ENV_TYPE="cpu"
+
+# Check device type
+if [[ -f /etc/nv_tegra_release ]]; then
+ ENV_TYPE="jetson"
+else
+ # Check if NVIDIA GPUs are available
+ if command -v nvidia-smi > /dev/null 2>&1; then
+ if nvidia-smi > /dev/null 2>&1; then
+ ENV_TYPE="gpu"
+ fi
+ fi
+fi
+echo "Detected environment: $ENV_TYPE"
+> .env
+# Write environment variables to .env file for Docker Compose and build base images
+case $ENV_TYPE in
+ "cpu")
+ #_____CPU_____
+ echo "DOCKERFILE=docker/navigation/Dockerfile.cpu" >> .env
+ echo "BASE_IMAGE=roborregos/home2:cpu_base" >> .env
+ echo "IMAGE_NAME=roborregos/home2:navigation-cpu" >> .env
+
+ # Build the base image if it doesn't exist
+ check_image_exists "$CPU_IMAGE"
+ if [ $? -eq 1 ]; then
+ docker compose -f ../cpu.yaml build
+ fi
+ ;;
+ "gpu")
+ #_____GPU_____
+ echo "DOCKERFILE=docker/navigation/Dockerfile.gpu" >> .env
+ echo "BASE_IMAGE=roborregos/home2:gpu_base" >> .env
+ echo "IMAGE_NAME=roborregos/home2:navigation-gpu" >> .env
+
+ # Build the base image if it doesn't exist
+ check_image_exists "$CUDA_IMAGE"
+ if [ $? -eq 1 ]; then
+ docker compose -f ../cuda.yaml build
+ fi
+
+ ;;
+ "jetson")
+ #_____Jetson_____
+ echo "DOCKERFILE=docker/navigation/Dockerfile.jetson" >> .env
+ echo "BASE_IMAGE=roborregos/home2:l4t_base" >> .env
+ echo "IMAGE_NAME=roborregos/home2:navigation-jetson" >> .env
+ ;;
+ *)
+ echo "Unknown environment type!"
+ exit 1
+ ;;
+esac
+
+
+#_________________________SETUP_________________________
+
+# Export user
+export LOCAL_USER_ID=$(id -u)
+export LOCAL_GROUP_ID=$(id -g)
+
+# Remove current install, build and log directories if they exist
+# if [ -d "../../install" ] || [ -d "../../log" ] || [ -d "../../build" ]; then
+# read -p "Do you want to delete 'install', 'log', and 'build' directories (Recommended for first build)? (y/n): " confirmation
+# if [[ "$confirmation" == "y" ]]; then
+# rm -rf ../../install/ ../../log/ ../../build/
+# echo "Directories deleted."
+# else
+# echo "Operation cancelled."
+# fi
+# fi
+
+# Setup camera permissions
+if [ -e /dev/video0 ]; then
+ echo "Setting permissions for /dev/video0..."
+ sudo chmod 666 /dev/video0 # Allow the container to access the camera device
+fi
+
+#_________________________RUN_________________________
+
+SERVICE_NAME="navigation" # Change this to your service name in docker-compose.yml
+
+# Check if the container exists
+EXISTING_CONTAINER=$(docker ps -a -q -f name=$SERVICE_NAME)
+if [ -z "$EXISTING_CONTAINER" ]; then
+ echo "No container with the name $SERVICE_NAME exists. Building and starting the container now..."
+ docker compose up --build -d
+fi
+
+# Check if the container is running
+RUNNING_CONTAINER=$(docker ps -q -f name=$SERVICE_NAME)
+
+if [ -n "$RUNNING_CONTAINER" ]; then
+ echo "Container $SERVICE_NAME is already running. Executing bash..."
+ docker compose exec -it $SERVICE_NAME /bin/bash
+else
+ echo "Container $SERVICE_NAME is stopped. Starting it now..."
+ docker compose up --build -d
+ docker compose exec $SERVICE_NAME /bin/bash
+fi
diff --git a/frida_interfaces/CMakeLists.txt b/frida_interfaces/CMakeLists.txt
index c5311d69..bdfcb38b 100644
--- a/frida_interfaces/CMakeLists.txt
+++ b/frida_interfaces/CMakeLists.txt
@@ -8,7 +8,7 @@ find_package(std_msgs REQUIRED)
find_package(geometry_msgs REQUIRED)
find_package(sensor_msgs REQUIRED)
find_package(action_msgs REQUIRED)
-find_package(geometry_msgs REQUIRED)
+find_package(shape_msgs REQUIRED)
find_package(unique_identifier_msgs REQUIRED)
file(GLOB_RECURSE action_files RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}" "*.action")
@@ -20,7 +20,7 @@ rosidl_generate_interfaces(${PROJECT_NAME}
${action_files}
${srv_files}
${msg_files}
- DEPENDENCIES std_msgs unique_identifier_msgs action_msgs geometry_msgs sensor_msgs
+ DEPENDENCIES std_msgs unique_identifier_msgs action_msgs geometry_msgs sensor_msgs shape_msgs
)
# Export dependencies for other packages
diff --git a/frida_interfaces/hri/srv/AddItem.srv b/frida_interfaces/hri/srv/AddItem.srv
index ffe51ee4..04ee473b 100644
--- a/frida_interfaces/hri/srv/AddItem.srv
+++ b/frida_interfaces/hri/srv/AddItem.srv
@@ -1,7 +1,7 @@
# AddItem.srv
-string[] document
-string[] id
+string document
string collection
+string metadata
---
bool success # Whether the operation was successful
string message # A message describing the result
diff --git a/frida_interfaces/hri/srv/QueryItem.srv b/frida_interfaces/hri/srv/QueryItem.srv
index ed827de7..dc9a2c8f 100644
--- a/frida_interfaces/hri/srv/QueryItem.srv
+++ b/frida_interfaces/hri/srv/QueryItem.srv
@@ -2,7 +2,9 @@
string query # The search query
string collection
int16 topk #number of results
+string where
---
string[] results # List of results (documents) that match the query, top k
+string[] metadata
bool success # Whether the operation was successful
string message # A message describing the result
diff --git a/frida_interfaces/manipulation/srv/AddCollisionObject.srv b/frida_interfaces/manipulation/srv/AddCollisionObject.srv
new file mode 100644
index 00000000..9133dc64
--- /dev/null
+++ b/frida_interfaces/manipulation/srv/AddCollisionObject.srv
@@ -0,0 +1,8 @@
+string id
+string type
+geometry_msgs/PoseStamped pose
+geometry_msgs/Point dimensions # length, width, height
+shape_msgs/Mesh mesh
+string path_to_mesh
+---
+bool success
\ No newline at end of file
diff --git a/frida_interfaces/manipulation/srv/RemoveCollisionObject.srv b/frida_interfaces/manipulation/srv/RemoveCollisionObject.srv
new file mode 100644
index 00000000..04d5d1bc
--- /dev/null
+++ b/frida_interfaces/manipulation/srv/RemoveCollisionObject.srv
@@ -0,0 +1,3 @@
+string id
+---
+bool success
\ No newline at end of file
diff --git a/frida_interfaces/manipulation/srv/ToggleServo.srv b/frida_interfaces/manipulation/srv/ToggleServo.srv
new file mode 100644
index 00000000..6c7653c2
--- /dev/null
+++ b/frida_interfaces/manipulation/srv/ToggleServo.srv
@@ -0,0 +1,3 @@
+bool state
+---
+bool success
\ No newline at end of file
diff --git a/frida_interfaces/package.xml b/frida_interfaces/package.xml
index c5d28312..fe7f3eab 100644
--- a/frida_interfaces/package.xml
+++ b/frida_interfaces/package.xml
@@ -19,6 +19,7 @@
geometry_msgs
sensor_msgs
action_msgs
+ shape_msgs
rosidl_interface_packages
diff --git a/hri/packages/embeddings/scripts/ChromaClient.py b/hri/packages/embeddings/scripts/ChromaClient.py
new file mode 100644
index 00000000..16df8556
--- /dev/null
+++ b/hri/packages/embeddings/scripts/ChromaClient.py
@@ -0,0 +1,176 @@
+import chromadb
+from chromadb.utils import embedding_functions
+import pandas as pd
+from pathlib import Path
+import json
+from uuid import uuid4
+
+
+class ChromaClient:
+ def __init__(self):
+ self.client = chromadb.HttpClient(host="localhost", port=8000)
+ # Configure the embedding function
+ self.sentence_transformer_ef = (
+ embedding_functions.SentenceTransformerEmbeddingFunction(
+ model_name="all-MiniLM-L12-v2"
+ )
+ )
+
+ def list_collections(self):
+ """Method to list all collections"""
+ return self.client.list_collections()
+
+ def get_collection(self, collection_name):
+ """Method to get a collection, it retunrsn a collection object"""
+ return self.client.get_collection(collection_name)
+
+ def build_embeddings_callback(self):
+ """Method to build embeddings for the household items data"""
+ script_dir = Path(__file__).resolve().parent
+ dataframes = [
+ (
+ script_dir / "../embeddings/dataframes/items.csv",
+ "Household items context",
+ ),
+ (
+ script_dir / "../embeddings/dataframes/locations.csv",
+ "Household locations context",
+ ),
+ (
+ script_dir / "../embeddings/dataframes/categories.csv",
+ "Household categories context",
+ ),
+ (
+ script_dir / "../embeddings/dataframes/actions.csv",
+ "Household actions context",
+ ),
+ ]
+
+ collections = {}
+ for file, context in dataframes:
+ # Get the absolute path of the file and create a pandas data frame
+ file = file.resolve()
+ df = pd.read_csv(file)
+
+ # Check if the csv has a name column
+ if "name" not in df.columns:
+ raise ValueError(f"The 'name' column is missing in {file}")
+
+ # Add context to each row
+ df["name"] = df["name"].apply(lambda x: f"{x} {context}")
+
+ # Check if the collection already exists and has the correct name format
+ collection_name = file.stem
+ collection_name = self._sanitize_collection_name(collection_name)
+ collections[collection_name] = self._get_or_create_collection(
+ collection_name
+ )
+
+ # There is metadata
+ if "metadata" in df.columns:
+ # Add the entries in the collection
+ collections[collection_name].add(
+ documents=df["name"].tolist(),
+ metadatas=[
+ json.loads(row["metadata"])
+ if "metadata" in row and row["metadata"]
+ else {}
+ for _, row in df.iterrows()
+ ],
+ ids=[str(uuid4()) for _ in range(len(df))],
+ )
+ # There is no metadata
+ else:
+ collections[collection_name].add(
+ documents=df["name"].tolist(),
+ ids=[str(uuid4()) for _ in range(len(df))],
+ )
+ return
+
+ def remove_collection(self, collection_name):
+ """Method to remove a collection"""
+ try:
+ self.client.delete_collection(collection_name)
+ except Exception:
+ raise ValueError(f"The collection is missing {collection_name}")
+ return
+
+ def _get_or_create_collection(self, collection_name):
+ """Helper method to get or create a collection"""
+ try:
+ return self.client.get_or_create_collection(name=collection_name)
+ except Exception:
+ raise ValueError(f"The collection is missing {collection_name}")
+
+ def get_entry_by_id(self, collection_name, id_):
+ """Method to get an entry by id"""
+ try:
+ collection_ = self.client.get_collection(name=collection_name)
+ except Exception:
+ raise ValueError(f"The collection is missing {collection_name}")
+ return collection_.get(ids=id_, include=["metadatas", "documents"])
+
+ def get_entry_by_metadata(self, collection_name, metadata):
+ """Method to get an entry by metadata"""
+ try:
+ collection_ = self.client.get_collection(name=collection_name)
+ except Exception:
+ raise ValueError(f"The collection is missing {collection_name}")
+
+ return collection_.get(where=metadata, include=["metadatas", "documents"])
+
+ def get_entry_by_document(self, collection_name, document):
+ """Method to get an entry by document"""
+ try:
+ collection_ = self.client.get_collection(name=collection_name)
+ except Exception:
+ raise ValueError(f"The collection is missing {collection_name}")
+
+ return collection_.get(
+ where_document={"$contains": document}, include=["metadatas", "documents"]
+ )
+
+ def add_entry(self, collection_name, document):
+ """Method to add an entry to a collection"""
+ try:
+ collection_ = self.client.get_collection(name=collection_name)
+ except Exception:
+ raise ValueError(f"The collection is missing {collection_name}")
+
+ return collection_.add(ids=str(uuid4()), documents=[document])
+
+ def add_entry_with_metadata(self, collection_name, document, metadata):
+ """Method to add an entry with metadata to a collection"""
+ try:
+ collection_ = self.client.get_collection(name=collection_name)
+ except Exception:
+ raise ValueError(f"The collection is missing {collection_name}")
+
+ return collection_.add(
+ ids=str(uuid4()), documents=[document], metadatas=[metadata]
+ )
+
+ def query(self, collection_name, query, top_k):
+ """Method to query the collection"""
+ try:
+ collection_ = self.client.get_collection(name=collection_name)
+ except Exception:
+ raise ValueError(f"Does not exist collection: {collection_name}")
+
+ return collection_.query(
+ query_texts=query,
+ n_results=top_k,
+ include=["metadatas", "documents", "distances"],
+ )
+
+ def _sanitize_collection_name(self, collection):
+ """Ensures collection name is a valid string due to the constraints of the ChromaDB API https://docs.trychroma.com/docs/collections/create-get-delete"""
+ collection = str(collection).strip()
+
+ if not (
+ 3 <= len(collection) <= 63
+ and collection.replace("-", "").replace("_", "").isalnum()
+ ):
+ raise ValueError(f"Invalid collection name: {collection}")
+
+ return collection
diff --git a/hri/packages/embeddings/scripts/item_categorization.py b/hri/packages/embeddings/scripts/item_categorization.py
index a0734c79..01ce433c 100755
--- a/hri/packages/embeddings/scripts/item_categorization.py
+++ b/hri/packages/embeddings/scripts/item_categorization.py
@@ -1,50 +1,41 @@
#!/usr/bin/env python3
-from pathlib import Path
-
-import chromadb
-import pandas as pd
import rclpy
-from chromadb.utils import embedding_functions
from rclpy.node import Node
-from rclpy.parameter import Parameter
+from pydantic import BaseModel, ValidationError
+from typing import Optional
+from frida_interfaces.srv import AddItem, BuildEmbeddings, QueryItem
+
+# Assuming ChromaAdapter handles Chroma client and embedding functions
+from ChromaClient import ChromaClient
+
-from frida_interfaces.srv import (
- AddItem,
- BuildEmbeddings,
- QueryItem,
- RemoveItem,
- UpdateItem,
-)
+# Metadata validation model for metadata
+class MetadataModel(BaseModel):
+ shelve: Optional[str]
+ category: Optional[str]
+ timestamp: Optional[str]
+ # other metadata fields can be defined here
class Embeddings(Node):
def __init__(self):
super().__init__("embeddings")
self.get_logger().info("Initializing item_categorization.")
- # Declare parameters for the sentence transformer model and collections built flag
+
+ # Declare parameters
self.declare_parameter("Embeddings_model", "all-MiniLM-L12-v2")
- self.declare_parameter("collections_built", 0) # Default: 0 (not built)
+ self.declare_parameter("collections_built", 0)
# Parameters for services
self.declare_parameter("ADD_ITEM_SERVICE", "add_item")
- self.declare_parameter("REMOVE_ITEM_SERVICE", "remove_item")
- self.declare_parameter("UPDATE_ITEM_SERVICE", "update_item")
self.declare_parameter("QUERY_ITEM_SERVICE", "query_item")
self.declare_parameter("BUILD_EMBEDDINGS_SERVICE", "build_embeddings")
# Resolve parameters
- model_name_ = (
- self.get_parameter("Embeddings_model").get_parameter_value().string_value
- )
+
add_item_service = (
self.get_parameter("ADD_ITEM_SERVICE").get_parameter_value().string_value
)
- remove_item_service = (
- self.get_parameter("REMOVE_ITEM_SERVICE").get_parameter_value().string_value
- )
- update_item_service = (
- self.get_parameter("UPDATE_ITEM_SERVICE").get_parameter_value().string_value
- )
query_item_service = (
self.get_parameter("QUERY_ITEM_SERVICE").get_parameter_value().string_value
)
@@ -53,244 +44,86 @@ def __init__(self):
.get_parameter_value()
.string_value
)
+ # Initialize ChromaAdapter (handles Chroma client and embedding functions)
+ self.chroma_adapter = ChromaClient()
- # Create the BuildEmbeddings service
+ # Create services
self.build_embeddings_service = self.create_service(
BuildEmbeddings, build_embeddings_service, self.build_embeddings_callback
)
-
- # Initialize ChromaDB client
- self.chroma_client = chromadb.HttpClient(host="localhost", port=8000)
-
- # Configure the embedding function
- self.sentence_transformer_ef = (
- embedding_functions.SentenceTransformerEmbeddingFunction(
- model_name=model_name_
- )
- )
-
- # Check if collections are built or need to be built
- self.check_and_update_collections()
-
- # Initialize services
self.add_item_service = self.create_service(
AddItem, add_item_service, self.add_item_callback
)
-
- self.remove_item_service = self.create_service(
- RemoveItem, remove_item_service, self.remove_item_callback
- )
-
- self.update_item_service = self.create_service(
- UpdateItem, update_item_service, self.update_item_callback
- )
self.query_item_service = self.create_service(
QueryItem, query_item_service, self.query_item_callback
)
-
self.get_logger().info("item_categorization initialized.")
- def check_and_update_collections(self):
- """Check if collections exist and call the method to build them if missing."""
- collections = [
- "items",
- "locations",
- "categories",
- "actions",
- ] # List of expected collections
- collections_built = 1 # Assume collections are built unless proven otherwise
-
- for collection_name in collections:
- try:
- # Check if the collection exists
- collection = self.chroma_client.get_collection(name=collection_name)
- if collection is None:
- collections_built = (
- 0 # If any collection is missing, set to 0 (not built)
- )
- break # No need to check further
- except Exception:
- collections_built = (
- 0 # If any error occurs (collection doesn't exist), set to 0
- )
- break # No need to check further
-
- # Update the ROS parameter based on whether collections are built
- self.set_parameters([Parameter("collections_built", value=collections_built)])
- if collections_built:
- self.get_logger().info("Collections already built, skipping build.")
- else:
- self.get_logger().info(
- "Collections not found, proceeding to build collections."
- )
- self.build_embeddings_callback() # Build the collections if not built
-
def add_item_callback(self, request, response):
"""Service callback to add items to ChromaDB"""
try:
- collection_name = self._sanitize_collection_name(request.collection)
- collection = self._get_or_create_collection(collection_name)
-
- collection.add(
- documents=request.document,
- metadatas=[{"text": doc} for doc in request.document],
- ids=request.id,
- )
-
- response.success = True
- response.message = "Items added successfully"
- except Exception as e:
- response.success = False
- response.message = f"Failed to add items: {str(e)}"
- self.get_logger().error(response.message)
- return response
-
- def remove_item_callback(self, request, response):
- """Service callback to remove items from ChromaDB"""
- try:
- collection_name = self._sanitize_collection_name(request.collection)
- collection = self._get_or_create_collection(collection_name)
-
- self.get_logger().info(
- f"Removing items: {request.item_id} from collection: {collection_name}"
- )
- collection.delete(ids=request.item_id)
-
- response.success = True
- response.message = "Items removed successfully"
- except Exception as e:
- response.success = False
- response.message = f"Failed to remove items: {str(e)}"
- self.get_logger().error(response.message)
- return response
-
- def update_item_callback(self, request, response):
- """Service callback to update items in ChromaDB"""
- try:
- collection_name = self._sanitize_collection_name(request.collection)
- collection = self._get_or_create_collection(collection_name)
-
- for item_id, field, new_data in zip(
- request.item_id, request.field, request.new_data
- ):
- self.get_logger().info(
- f"Updating {field} for item_id={item_id} with new_data={new_data}"
+ # Check if metadata is provided and if it's not empty
+ if request.metadata.strip():
+ # If metadata is provided and not empty, validate and parse it
+ metadata_parsed = MetadataModel.model_validate_json(request.metadata)
+ metadata_parsed = metadata_parsed.model_dump()
+ self.chroma_adapter.add_entry(
+ request.collection, request.document, metadata_parsed
)
-
- update_args = {"ids": [item_id]}
- if field == "document":
- update_args["documents"] = [new_data]
- elif field == "metadata":
- update_args["metadatas"] = [{"text": new_data}]
-
- collection.update(**update_args)
-
+ else:
+ # If metadata is empty (either empty string or only whitespace), set it to an empty dictionary
+ metadata_parsed = {}
+ self.chroma_adapter.add_entry(request.collection, request.document)
+ # Delegate to ChromaAdapter to handle the actual ChromaDB interaction
response.success = True
- response.message = "Items updated successfully"
+ response.message = "Item added successfully"
+ except ValidationError as e:
+ response.success = False
+ response.message = f"Invalid metadata: {str(e)}"
except Exception as e:
response.success = False
- response.message = f"Failed to update items: {str(e)}"
+ response.message = f"Failed to add item: {str(e)}"
self.get_logger().error(response.message)
return response
def query_item_callback(self, request, response):
"""Service callback to query items from ChromaDB"""
try:
- collection_name = self._sanitize_collection_name(request.collection)
- collection = self._get_or_create_collection(collection_name)
-
- results = collection.query(
- query_texts=[request.query], n_results=request.topk
+ # Delegate to ChromaAdapter to handle the actual ChromaDB interaction
+ results = self.chroma_adapter.query(
+ request.collection, request.query, request.topk
)
- # Ensure results exist before accessing
response.results = [str(doc) for doc in results.get("documents", [])]
response.success = True if response.results else False
response.message = (
"Query successful" if response.results else "No matching items found"
)
+ response.metadata = [str(doc) for doc in results.get("metadatas", [])]
+
except Exception as e:
response.success = False
response.message = f"Failed to query items: {str(e)}"
self.get_logger().error(response.message)
return response
- def build_embeddings_callback(self):
+ def build_embeddings_callback(self, request, response):
"""Method to build embeddings for the household items data"""
- script_dir = Path(__file__).resolve().parent
- dataframes = [
- (
- script_dir / "../embeddings/dataframes/items.csv",
- "Household items context",
- ),
- (
- script_dir / "../embeddings/dataframes/locations.csv",
- "Household locations context",
- ),
- (
- script_dir / "../embeddings/dataframes/categories.csv",
- "Household categories context",
- ),
- (
- script_dir / "../embeddings/dataframes/actions.csv",
- "Household actions context",
- ),
- ]
- collections = {}
- for file, context in dataframes:
- file = file.resolve() # Ensure the path is absolute
- df = pd.read_csv(file)
-
- if "name" not in df.columns:
- raise ValueError(f"The 'name' column is missing in {file}")
-
- df["name"] = df["name"].apply(lambda x: f"{x} {context}")
- collection_name = file.stem
- collections[collection_name] = self._get_or_create_collection(
- collection_name
- )
-
- collections[collection_name].add(
- documents=df["name"].tolist(),
- metadatas=[
- {
- "text": row["name"],
- "original_name": row["name"].split(" ")[0],
- **row.to_dict(),
- }
- for _, row in df.iterrows()
- ],
- ids=[f"{collection_name}_{i}" for i in range(len(df))],
- )
-
- self.get_logger().info("Build request received and handled successfully")
-
- def _sanitize_collection_name(self, collection):
- """Ensures collection name is a valid string"""
- if isinstance(collection, list):
- collection = collection[0] # Extract from list if necessary
-
- collection = str(collection).strip()
-
- if not (
- 3 <= len(collection) <= 63
- and collection.replace("-", "").replace("_", "").isalnum()
- ):
- self.get_logger().error(f"Invalid collection name: {collection}")
- raise ValueError(f"Invalid collection name: {collection}")
-
- return collection
-
- def _get_or_create_collection(self, collection_name):
- """Helper method to get or create a collection"""
try:
- return self.chroma_client.get_or_create_collection(name=collection_name)
+ # Call the build_embeddings_callback of ChromaAdapter to handle the actual embedding process
+ self.chroma_adapter.build_embeddings_callback()
+ self.chroma_adapter.build_embeddings_callback()
+ response.success = True
+ response.message = "Embeddings built successfully"
+ self.get_logger().info("Build request handled successfully")
+
except Exception as e:
- self.get_logger().error(
- f"Error retrieving collection '{collection_name}': {str(e)}"
- )
- raise
+ response.success = False
+ response.message = f"Error while building embeddings: {str(e)}"
+ self.get_logger().error(f"Error while building embeddings: {str(e)}")
+
+ return response
def main():
diff --git a/hri/packages/nlp/assets/download-model.bash b/hri/packages/nlp/assets/download-model.bash
new file mode 100644
index 00000000..6859ac3d
--- /dev/null
+++ b/hri/packages/nlp/assets/download-model.bash
@@ -0,0 +1,2 @@
+curl -L https://huggingface.co/diegohc/robollm/resolve/main/rbrgs-finetuned-unsloth.F16.gguf -o rbrgs-finetuned.F16.gguf
+curl -L https://huggingface.co/diegohc/robollm/resolve/main/Modelfile -o Modelfile
\ No newline at end of file
diff --git a/hri/packages/speech/scripts/stt/Faster-whisper.py b/hri/packages/speech/scripts/stt/faster-whisper-l4t.py
similarity index 98%
rename from hri/packages/speech/scripts/stt/Faster-whisper.py
rename to hri/packages/speech/scripts/stt/faster-whisper-l4t.py
index ee15d5cd..1ec005f0 100644
--- a/hri/packages/speech/scripts/stt/Faster-whisper.py
+++ b/hri/packages/speech/scripts/stt/faster-whisper-l4t.py
@@ -23,6 +23,7 @@ def __init__(self, model_size):
def load_model(self):
model_directory = os.path.join(os.path.dirname(__file__), "models")
device = "cuda" if torch.cuda.is_available() else "cpu"
+ print(f"running with {device}")
return WhisperModel(
self.model_size,
download_root=model_directory,
diff --git a/hri/packages/speech/scripts/stt/faster-whisper.py b/hri/packages/speech/scripts/stt/faster-whisper.py
new file mode 100644
index 00000000..1fee69db
--- /dev/null
+++ b/hri/packages/speech/scripts/stt/faster-whisper.py
@@ -0,0 +1,85 @@
+import argparse
+import os
+import sys
+from concurrent import futures
+
+import grpc
+import speech_pb2
+import speech_pb2_grpc
+from faster_whisper import WhisperModel
+
+# Add the directory containing the protos to the Python path
+sys.path.append(os.path.join(os.path.dirname(__file__), "speech"))
+
+from wav_utils import WavUtils
+
+
+class WhisperServicer(speech_pb2_grpc.SpeechServiceServicer):
+ def __init__(self, model_size):
+ self.model_size = model_size
+ self.audio_model = self.load_model()
+
+ def load_model(self):
+ model_directory = os.path.join(os.path.dirname(__file__), "models")
+ return WhisperModel(
+ self.model_size,
+ download_root=model_directory,
+ device="cpu",
+ compute_type="float32",
+ )
+
+ def Transcribe(self, request, context):
+ print("Received audio data, transcribing...")
+
+ # Generate a temporary WAV file from received audio data
+ temp_file = WavUtils.generate_temp_wav(1, 2, 16000, request.audio_data)
+
+ # Perform transcription
+ result = self.audio_model.transcribe(
+ temp_file,
+ language="en",
+ hotwords="Frida kitchen attendance RoBorregos",
+ condition_on_previous_text=True,
+ )
+
+ WavUtils.discard_wav(temp_file)
+
+ # Access the generator to collect text segments
+ segments = result[0] # The generator
+ transcription = "".join(
+ [segment.text for segment in segments]
+ ) # Collect all text
+
+ print(f"Transcription: {transcription}")
+
+ # Return the transcribed text
+ return speech_pb2.TextResponse(text=transcription)
+
+
+def serve(port, model_size):
+ # Create the gRPC server
+ server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
+ speech_pb2_grpc.add_SpeechServiceServicer_to_server(
+ WhisperServicer(model_size), server
+ )
+
+ # Bind to a port
+ server.add_insecure_port(f"0.0.0.0:{port}")
+ print(f"Whisper gRPC server is running on port {port}...")
+ server.start()
+ server.wait_for_termination()
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(description="Whisper gRPC server")
+ parser.add_argument(
+ "--port", type=int, default=50051, help="Port to run the gRPC server on"
+ )
+ parser.add_argument(
+ "--model_size",
+ type=str,
+ default="base.en",
+ help="Model size to use (base.en, large.en, or small.en)",
+ )
+ args = parser.parse_args()
+ serve(args.port, args.model_size)
diff --git a/hri/packages/speech/scripts/stt/requirements.txt b/hri/packages/speech/scripts/stt/requirements.txt
index 9e9c6bd1..8fb8e0b5 100644
--- a/hri/packages/speech/scripts/stt/requirements.txt
+++ b/hri/packages/speech/scripts/stt/requirements.txt
@@ -1,8 +1,6 @@
-grpcio
-grpcio-tools
-torch
-torchaudio
-faster-whisper
-pydub
-soundfile
-sounddevice
\ No newline at end of file
+grpcio==1.70.0
+grpcio-tools==1.70.0
+faster-whisper==1.1.1
+pydub==0.25.1
+soundfile==0.13.1
+sounddevice==0.5.1
\ No newline at end of file
diff --git a/hri/packages/speech/scripts/stt/speech_pb2.py b/hri/packages/speech/scripts/stt/speech_pb2.py
index 20f01270..42a0328e 100644
--- a/hri/packages/speech/scripts/stt/speech_pb2.py
+++ b/hri/packages/speech/scripts/stt/speech_pb2.py
@@ -2,7 +2,7 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# NO CHECKED-IN PROTOBUF GENCODE
# source: speech.proto
-# Protobuf Python Version: 5.28.1
+# Protobuf Python Version: 5.29.0
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
@@ -12,8 +12,8 @@
_runtime_version.ValidateProtobufRuntimeVersion(
_runtime_version.Domain.PUBLIC,
5,
- 28,
- 1,
+ 29,
+ 0,
'',
'speech.proto'
)
diff --git a/hri/packages/speech/scripts/stt/speech_pb2_grpc.py b/hri/packages/speech/scripts/stt/speech_pb2_grpc.py
index 90fe5ba4..087c8b56 100644
--- a/hri/packages/speech/scripts/stt/speech_pb2_grpc.py
+++ b/hri/packages/speech/scripts/stt/speech_pb2_grpc.py
@@ -5,7 +5,7 @@
import speech_pb2 as speech__pb2
-GRPC_GENERATED_VERSION = '1.68.1'
+GRPC_GENERATED_VERSION = '1.70.0'
GRPC_VERSION = grpc.__version__
_version_not_supported = False
diff --git a/hri/requirements/speech.txt b/hri/requirements/speech.txt
index 3f2922fe..c676f6ce 100644
--- a/hri/requirements/speech.txt
+++ b/hri/requirements/speech.txt
@@ -22,5 +22,6 @@ piper-tts==1.2.0
piper==0.14.4
# gRPC
-grpcio==1.68.1
-grpcio-tools==1.68.1
\ No newline at end of file
+grpcio==1.70.0
+grpcio-tools==1.70.0
+protobuf==5.29.0
\ No newline at end of file
diff --git a/manipulation/packages/frida_motion_planning/CMakeLists.txt b/manipulation/packages/frida_motion_planning/CMakeLists.txt
index 9717232e..8a24aaee 100644
--- a/manipulation/packages/frida_motion_planning/CMakeLists.txt
+++ b/manipulation/packages/frida_motion_planning/CMakeLists.txt
@@ -15,12 +15,18 @@ ament_python_install_package(frida_motion_planning)
# Install examples
set(EXAMPLES_DIR examples)
set(SCRIPTS_DIR frida_motion_planning)
-
+
+
+file(GLOB_RECURSE example_files RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}" "${EXAMPLES_DIR}/*.py")
+
install(PROGRAMS
- ${EXAMPLES_DIR}/call_joint_goal.py
- ${EXAMPLES_DIR}/call_pose_goal.py
+ ${example_files}
+ ${EXAMPLES_DIR}/ds4_demo.py
${SCRIPTS_DIR}/motion_planning_server.py
DESTINATION lib/${PROJECT_NAME}
)
+install(DIRECTORY launch
+ DESTINATION share/${PROJECT_NAME})
+
ament_package()
diff --git a/manipulation/packages/frida_motion_planning/examples/add_collision_object.py b/manipulation/packages/frida_motion_planning/examples/add_collision_object.py
new file mode 100755
index 00000000..164c3770
--- /dev/null
+++ b/manipulation/packages/frida_motion_planning/examples/add_collision_object.py
@@ -0,0 +1,83 @@
+#!/usr/bin/env python3
+
+import rclpy
+from rclpy.node import Node
+from frida_interfaces.srv import AddCollisionObject
+from geometry_msgs.msg import Quaternion
+from std_msgs.msg import Header
+
+"""
+Usage
+- ros2 run frida_motion_planning add_collision_object.py --ros-args -p type:="box" -p position:="[0.5, 0.0, 0.5]" -p dimensions:="[0.2, 0.2, 0.05]"
+- ros2 run frida_motion_planning add_collision_object.py --ros-args -p type:="cylinder" -p position:="[0.2, 0.0, 0.5]" -p quat_xyzw:="[0.0, 0.0, 0.0, 1.0]" -p dimensions:="[0.1, 0.4]"
+- ros2 run frida_motion_planning add_collision_object.py --ros-args -p type:="sphere -p position:="[0.2, 0.0, 0.5]" -p dimensions:="[0.15]"
+- ros2 run frida_motion_planning add_collision_object.py --ros-args -p type:="mesh" -p position:="[0.2, 0.0, 0.5]" -p quat_xyzw:="[0.0, 0.0, 0.0, 1.0]" -p mesh_file:="package://frida_motion_planning/meshes/box.stl"
+"""
+
+
+def main():
+ rclpy.init()
+
+ node = Node("ex_collision_primitive")
+ node.declare_parameter("type", "box")
+ node.declare_parameter("position", [0.5, 0.0, 0.5])
+ node.declare_parameter("quat_xyzw", [0.0, 0.0, 0.0, 1.0])
+ node.declare_parameter("dimensions", [0.1, 0.1, 0.1])
+ node.declare_parameter("mesh_file", "")
+
+ add_collision_object_client = node.create_client(
+ AddCollisionObject, "/manipulation/add_collision_object"
+ )
+ while not add_collision_object_client.wait_for_service(timeout_sec=1.0):
+ node.get_logger().info("Service not available, waiting again...")
+
+ type = node.get_parameter("type").get_parameter_value().string_value
+ position = node.get_parameter("position").get_parameter_value().double_array_value
+ quat_xyzw = node.get_parameter("quat_xyzw").get_parameter_value().double_array_value
+ dimensions = (
+ node.get_parameter("dimensions").get_parameter_value().double_array_value
+ )
+ mesh_file = node.get_parameter("mesh_file").get_parameter_value().string_value
+
+ print(f"Sending type: {type}")
+ print(f"Sending position: {position}")
+ print(f"Sending quat_xyzw: {quat_xyzw}")
+ print(f"Sending dimensions: {dimensions}")
+ request = AddCollisionObject.Request()
+ request.id = "object"
+ request.type = type
+ request.pose.header = Header()
+ request.pose.header.frame_id = "link_base"
+ request.pose.header.stamp = node.get_clock().now().to_msg()
+ request.pose.pose.position.x = float(position[0])
+ request.pose.pose.position.y = float(position[1])
+ request.pose.pose.position.z = float(position[2])
+ if request.type == "box":
+ request.dimensions.x = float(dimensions[0])
+ request.dimensions.y = float(dimensions[1])
+ request.dimensions.z = float(dimensions[2])
+ elif request.type == "cylinder":
+ request.dimensions.x = float(dimensions[0])
+ request.dimensions.z = float(dimensions[1])
+ else:
+ request.dimensions.x = float(dimensions[0])
+ request.pose.pose.orientation = Quaternion()
+ request.pose.pose.orientation.x = float(quat_xyzw[0])
+ request.pose.pose.orientation.y = float(quat_xyzw[1])
+ request.pose.pose.orientation.z = float(quat_xyzw[2])
+ request.pose.pose.orientation.w = float(quat_xyzw[3])
+ if request.type == "mesh":
+ request.path_to_mesh = mesh_file
+
+ future = add_collision_object_client.call_async(request)
+ rclpy.spin_until_future_complete(node, future)
+ response = future.result()
+
+ print(f"Response: {response.success}")
+
+ node.destroy_node()
+ rclpy.shutdown()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/manipulation/packages/frida_motion_planning/examples/ds4_demo.py b/manipulation/packages/frida_motion_planning/examples/ds4_demo.py
new file mode 100755
index 00000000..3e450f33
--- /dev/null
+++ b/manipulation/packages/frida_motion_planning/examples/ds4_demo.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env python3
+
+from sensor_msgs.msg import Joy
+
+from rclpy.node import Node
+from rclpy.qos import QoSProfile
+from rclpy.qos import QoSReliabilityPolicy
+from rclpy.qos import QoSHistoryPolicy
+from rclpy.qos import QoSDurabilityPolicy
+from rclpy.action import ActionClient
+from frida_interfaces.action import MoveJoints
+from geometry_msgs.msg import TwistStamped
+from xarm_msgs.srv import SetDigitalIO
+import rclpy
+
+DEG2RAD = 3.14159265359 / 180.0
+
+POS0 = [-90, -45, -90, -170, -45, -55]
+POS1 = [30, -10, -40, -170, 45, -55]
+
+POS0 = [x * DEG2RAD for x in POS0]
+POS1 = [x * DEG2RAD for x in POS1]
+
+
+class ServoDS4(Node):
+ def __init__(self):
+ super().__init__("servo_ds4")
+ self.create_subscription(Joy, "/joy", self.joy_callback, 1)
+ print("Initializing...")
+ # Create a client for the service
+ self.gripper_client = self.create_client(
+ SetDigitalIO, "/xarm/set_tgpio_digital"
+ )
+ self._move_joints_action_client = ActionClient(
+ self, MoveJoints, "/manipulation/move_joints_action_server"
+ )
+ self.twist_pub = self.create_publisher(
+ TwistStamped,
+ "/servo_server/delta_twist_cmds",
+ QoSProfile(
+ durability=QoSDurabilityPolicy.VOLATILE,
+ reliability=QoSReliabilityPolicy.RELIABLE,
+ history=QoSHistoryPolicy.KEEP_ALL,
+ ),
+ )
+
+ # self.start_service = self.create_client(ToggleServo, "/manipulation/toggle_servo")
+ # self.start_service.wait_for_service()
+ # future = self.start_service.call_async(ToggleServo.Request(enable=True))
+ # rclpy.spin_until_future_complete(self, future)
+ self.busy_planner = False
+ self.busy_gripper = False
+ print("Initialized")
+
+ def joy_callback(self, msg):
+ if msg.buttons[0]:
+ print("Setting gripper state open")
+ self.set_gripper_state("open")
+ elif msg.buttons[2]:
+ print("Setting gripper state close")
+ self.set_gripper_state("close")
+ elif msg.buttons[1]:
+ print("Setting joint state 0")
+ self.send_goal(joint_positions=POS0)
+ elif msg.buttons[3]:
+ print("Setting joint state 1")
+ self.send_goal(joint_positions=POS1)
+
+ def set_gripper_state(self, state):
+ if self.busy_gripper:
+ return
+ request = SetDigitalIO.Request()
+ request.ionum = int(0)
+ request.value = int(0) if state == "open" else int(1)
+
+ future = self.gripper_client.call_async(request)
+ self.busy_gripper = True
+ future.add_done_callback(self.gripper_callback)
+
+ # let the server pick the default values
+ def send_goal(
+ self,
+ joint_names=[],
+ joint_positions=[],
+ velocity=0.2,
+ acceleration=0.0,
+ planner_id="",
+ ):
+ if self.busy_planner:
+ return
+ goal_msg = MoveJoints.Goal()
+ goal_msg.joint_names = joint_names
+ goal_msg.joint_positions = joint_positions
+ goal_msg.velocity = velocity
+ goal_msg.acceleration = acceleration
+ goal_msg.planner_id = planner_id
+
+ self.get_logger().info("Sending joint goal...")
+ future = self._move_joints_action_client.send_goal_async(goal_msg)
+ future.add_done_callback(self.goal_response_callback)
+ self.busy_planner = True
+
+ def gripper_callback(self, future):
+ self.busy_gripper = False
+
+ def goal_response_callback(self, future):
+ goal_handle = future.result()
+ if not goal_handle.accepted:
+ self.get_logger().info("Goal rejected :(")
+ return
+
+ self.get_logger().info("Goal accepted :)")
+
+ self._get_result_future = goal_handle.get_result_async()
+ self._get_result_future.add_done_callback(self.get_result_callback)
+
+ def get_result_callback(self, future):
+ result = future.result().result
+ print("Result:", result)
+ self.busy_planner = False
+
+
+def main(args=None):
+ rclpy.init(args=args)
+ node = ServoDS4()
+ rclpy.spin(node)
+ rclpy.shutdown()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/manipulation/packages/frida_motion_planning/examples/remove_collision_object.py b/manipulation/packages/frida_motion_planning/examples/remove_collision_object.py
new file mode 100755
index 00000000..abc37312
--- /dev/null
+++ b/manipulation/packages/frida_motion_planning/examples/remove_collision_object.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python3
+
+import rclpy
+from rclpy.node import Node
+from frida_interfaces.srv import RemoveCollisionObject
+
+"""
+Usage
+- ros2 run frida_motion_planning add_collision_object.py --ros-args -p id:="object"
+"""
+
+
+def main():
+ rclpy.init()
+
+ node = Node("ex_collision_primitive")
+ node.declare_parameter("id", "object")
+
+ remove_collision_object_client = node.create_client(
+ RemoveCollisionObject, "/manipulation/remove_collision_object"
+ )
+ while not remove_collision_object_client.wait_for_service(timeout_sec=1.0):
+ node.get_logger().info("Service not available, waiting again...")
+
+ id = node.get_parameter("id").get_parameter_value().string_value
+
+ request = RemoveCollisionObject.Request()
+ request.id = id
+
+ print(f"Sending id: {id}")
+
+ future = remove_collision_object_client.call_async(request)
+ rclpy.spin_until_future_complete(node, future)
+ response = future.result()
+
+ print(f"Response: {response.success}")
+
+ node.destroy_node()
+ rclpy.shutdown()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/manipulation/packages/frida_motion_planning/frida_motion_planning/motion_planning_server.py b/manipulation/packages/frida_motion_planning/frida_motion_planning/motion_planning_server.py
index 19434fb2..c3ff730b 100755
--- a/manipulation/packages/frida_motion_planning/frida_motion_planning/motion_planning_server.py
+++ b/manipulation/packages/frida_motion_planning/frida_motion_planning/motion_planning_server.py
@@ -4,9 +4,16 @@
from rclpy.node import Node
from rclpy.action import ActionServer
from rclpy.callback_groups import ReentrantCallbackGroup
+from geometry_msgs.msg import TwistStamped
from frida_interfaces.action import MoveToPose, MoveJoints
-from frida_interfaces.srv import GetJoints
+from frida_interfaces.srv import (
+ GetJoints,
+ AddCollisionObject,
+ RemoveCollisionObject,
+ ToggleServo,
+)
from frida_motion_planning.utils.MoveItPlanner import MoveItPlanner
+from frida_motion_planning.utils.MoveItServo import MoveItServo
class MotionPlanningServer(Node):
@@ -14,10 +21,23 @@ def __init__(self):
super().__init__("pick_server")
self.callback_group = ReentrantCallbackGroup()
+ # Here we can select other planner (if implemented)
+ self.planner = MoveItPlanner(self, self.callback_group)
+ self.planner.set_velocity(0.15)
+ self.planner.set_acceleration(0.15)
+ self.planner.set_planner("RRTConnect")
+
+ self.servo = MoveItServo(
+ self,
+ self.callback_group,
+ max_velocity=0.1,
+ max_acceleration=0.1,
+ )
+
self._move_to_pose_server = ActionServer(
self,
MoveToPose,
- "move_to_pose_action_server",
+ "/manipulation/move_to_pose_action_server",
self.move_to_pose_execute_callback,
callback_group=self.callback_group,
)
@@ -25,20 +45,39 @@ def __init__(self):
self._move_joints_server = ActionServer(
self,
MoveJoints,
- "move_joints_action_server",
+ "/manipulation/move_joints_action_server",
self.move_joints_execute_callback,
callback_group=self.callback_group,
)
self.get_joints_service = self.create_service(
- GetJoints, "get_joints", self.get_joints_callback
+ GetJoints, "/manipulation/get_joints", self.get_joints_callback
+ )
+
+ self.add_collision_object_service = self.create_service(
+ AddCollisionObject,
+ "/manipulation/add_collision_object",
+ self.add_collision_object_callback,
+ )
+
+ self.remove_collision_object_service = self.create_service(
+ RemoveCollisionObject,
+ "/manipulation/remove_collision_object",
+ self.remove_collision_object_callback,
+ )
+
+ self.toggle_servo_service = self.create_service(
+ ToggleServo, "/manipulation/toggle_servo", self.toggle_servo_callback
+ )
+
+ self.servo_speed_subscriber = self.create_subscription(
+ TwistStamped,
+ "/manipulation/servo_speed",
+ self.servo_speed_callback,
+ 10,
+ callback_group=self.callback_group,
)
- # Here we can select other planner (if implemented)
- self.planner = MoveItPlanner(self, self.callback_group)
- self.planner.set_velocity(0.15)
- self.planner.set_acceleration(0.15)
- self.planner.set_planner("RRTConnect")
self.get_logger().info("Pick Action Server has been started")
async def move_to_pose_execute_callback(self, goal_handle):
@@ -131,6 +170,138 @@ def get_joints_callback(self, request, response):
response.joint_names.append(joint_name)
return response
+ def add_collision_object_callback(self, request, response):
+ """Handle requests to add collision objects to the planning scene"""
+ try:
+ # Generate a unique ID for the collision object
+ object_id = f"{request.id}"
+
+ # Handle different collision object types
+ if request.type == "box":
+ self.planner.add_collision_box(
+ id=object_id,
+ size=(
+ request.dimensions.x,
+ request.dimensions.y,
+ request.dimensions.z,
+ ),
+ pose=request.pose,
+ )
+ self.get_logger().info(f"Added collision box: {object_id}")
+
+ elif request.type == "sphere":
+ # For spheres, use the x component of dimensions as radius
+ self.planner.add_collision_sphere(
+ id=object_id, radius=request.dimensions.x, pose=request.pose
+ )
+ self.get_logger().info(f"Added collision sphere: {object_id}")
+
+ elif request.type == "cylinder":
+ # For cylinders, use x as radius, z as height
+ self.planner.add_collision_cylinder(
+ id=object_id,
+ height=request.dimensions.z,
+ radius=request.dimensions.x,
+ pose=request.pose,
+ )
+ self.get_logger().info(f"Added collision cylinder: {object_id}")
+
+ elif request.type == "mesh":
+ # Add collision mesh from file path -> Priority to file path
+ if request.path_to_mesh:
+ self.planner.add_collision_mesh(
+ id=object_id,
+ filepath=request.path_to_mesh,
+ pose=request.pose,
+ scale=(
+ request.dimensions.x if request.dimensions.x != 0.0 else 1.0
+ ),
+ )
+ self.get_logger().info(
+ f"Added collision mesh from file: {object_id}"
+ )
+ # Or from mesh data
+ else:
+ import trimesh
+
+ # Convert mesh data to trimesh object
+ mesh = trimesh.Trimesh()
+ mesh.vertices = [(v.x, v.y, v.z) for v in request.mesh.vertices]
+ mesh.faces = [
+ (t.vertex_indices[0], t.vertex_indices[1], t.vertex_indices[2])
+ for t in request.mesh.triangles
+ ]
+
+ # transform from
+ self.planner.add_collision_mesh(
+ id=object_id,
+ filepath=None,
+ pose=request.pose,
+ mesh=mesh,
+ frame_id=request.pose.header.frame_id,
+ )
+ self.get_logger().info(
+ f"Added collision mesh from data: {object_id}"
+ )
+ else:
+ self.get_logger().error(
+ f"Unsupported collision object type: {request.type}"
+ )
+ response.success = False
+ return response
+
+ response.success = True
+ return response
+
+ except Exception as e:
+ self.get_logger().error(f"Failed to add collision object: {str(e)}")
+ response.success = False
+ return response
+
+ def remove_collision_object_callback(self, request, response):
+ """Handle requests to remove collision objects from the planning scene"""
+ try:
+ # Generate a unique ID for the collision object
+ object_id = f"{request.id}"
+
+ # Remove the collision object
+ self.planner.remove_collision_object(object_id)
+ self.get_logger().info(f"Removed collision object: {object_id}")
+
+ response.success = True
+ return response
+
+ except Exception as e:
+ self.get_logger().error(f"Failed to remove collision object: {str(e)}")
+ response.success = False
+ return response
+
+ def toggle_servo_callback(self, request, response):
+ """Handle requests to toggle the servo"""
+ try:
+ if request.state:
+ self.servo.enable_servo()
+ self.get_logger().info("Enabled servo")
+ else:
+ self.servo.disable_servo()
+ self.get_logger().info("Disabled servo")
+ response.success = True
+ return response
+ except Exception as e:
+ self.get_logger().error(f"Failed to toggle servo: {str(e)}")
+ response.success = False
+ return response
+
+ def servo_speed_callback(self, msg):
+ """Handle changes to the servo speed"""
+ self.get_logger().info(f"Received servo speed: {msg.twist}")
+
+ self.servo.update_servo(
+ linear=(msg.twist.linear.x, msg.twist.linear.y, msg.twist.linear.z),
+ angular=(msg.twist.angular.x, msg.twist.angular.y, msg.twist.angular.z),
+ frame_id=msg.header.frame_id,
+ )
+
def main(args=None):
rclpy.init(args=args)
diff --git a/manipulation/packages/frida_motion_planning/frida_motion_planning/utils/MoveItPlanner.py b/manipulation/packages/frida_motion_planning/frida_motion_planning/utils/MoveItPlanner.py
index 4fe9169c..dda6dbaa 100644
--- a/manipulation/packages/frida_motion_planning/frida_motion_planning/utils/MoveItPlanner.py
+++ b/manipulation/packages/frida_motion_planning/frida_motion_planning/utils/MoveItPlanner.py
@@ -183,45 +183,69 @@ def delete_all_constraints(self) -> None:
self.moveit2.clear_goal_constraints()
self.moveit2.clear_path_constraints()
- def add_collision_box(
- self, id: str, size: List[float], position: List[float], quat_xyzw: List[float]
- ) -> None:
+ def add_collision_box(self, id: str, size: List[float], pose: PoseStamped) -> None:
self.moveit2.add_collision_box(
id=id,
size=tuple(size),
- position=tuple(position),
- quat_xyzw=tuple(quat_xyzw),
+ position=[pose.pose.position.x, pose.pose.position.y, pose.pose.position.z],
+ quat_xyzw=[
+ pose.pose.orientation.x,
+ pose.pose.orientation.y,
+ pose.pose.orientation.z,
+ pose.pose.orientation.w,
+ ],
+ frame_id=pose.header.frame_id,
)
def add_collision_cylinder(
- self,
- id: str,
- height: float,
- radius: float,
- position: List[float],
- quat_xyzw: List[float],
+ self, id: str, height: float, radius: float, pose: PoseStamped
) -> None:
self.moveit2.add_collision_cylinder(
id=id,
height=height,
radius=radius,
- position=tuple(position),
- quat_xyzw=tuple(quat_xyzw),
+ position=[pose.pose.position.x, pose.pose.position.y, pose.pose.position.z],
+ quat_xyzw=[
+ pose.pose.orientation.x,
+ pose.pose.orientation.y,
+ pose.pose.orientation.z,
+ pose.pose.orientation.w,
+ ],
+ frame_id=pose.header.frame_id,
+ )
+
+ def add_collision_sphere(self, id: str, radius: float, pose: PoseStamped) -> None:
+ self.moveit2.add_collision_sphere(
+ id=id,
+ radius=radius,
+ position=[pose.pose.position.x, pose.pose.position.y, pose.pose.position.z],
+ quat_xyzw=[
+ pose.pose.orientation.x,
+ pose.pose.orientation.y,
+ pose.pose.orientation.z,
+ pose.pose.orientation.w,
+ ],
+ frame_id=pose.header.frame_id,
)
def add_collision_mesh(
self,
id: str,
filepath: str,
- position: List[float],
- quat_xyzw: List[float],
+ pose: PoseStamped,
scale: float = 1.0,
) -> None:
self.moveit2.add_collision_mesh(
id=id,
filepath=filepath,
- position=tuple(position),
- quat_xyzw=tuple(quat_xyzw),
+ position=[pose.pose.position.x, pose.pose.position.y, pose.pose.position.z],
+ quat_xyzw=[
+ pose.pose.orientation.x,
+ pose.pose.orientation.y,
+ pose.pose.orientation.z,
+ pose.pose.orientation.w,
+ ],
+ frame_id=pose.header.frame_id,
scale=scale,
)
diff --git a/manipulation/packages/frida_motion_planning/frida_motion_planning/utils/MoveItServo.py b/manipulation/packages/frida_motion_planning/frida_motion_planning/utils/MoveItServo.py
new file mode 100644
index 00000000..a82b3479
--- /dev/null
+++ b/manipulation/packages/frida_motion_planning/frida_motion_planning/utils/MoveItServo.py
@@ -0,0 +1,38 @@
+from rclpy.callback_groups import ReentrantCallbackGroup
+from rclpy.node import Node
+
+from pymoveit2 import MoveIt2Servo
+from typing import Tuple
+from frida_motion_planning.utils.Servo import Servo
+
+
+class MoveItServo(Servo):
+ def __init__(
+ self,
+ node: Node,
+ callback_group: ReentrantCallbackGroup,
+ max_velocity: float = 0.5,
+ max_acceleration: float = 0.5,
+ ):
+ self.node = node
+ self.servo = MoveIt2Servo(
+ node,
+ frame_id="world",
+ linear_speed=max_velocity,
+ angular_speed=max_velocity,
+ enable_at_init=False,
+ callback_group=callback_group,
+ )
+
+ def enable_servo(self) -> None:
+ self.servo.enable()
+
+ def disable_servo(self) -> None:
+ self.servo.disable()
+
+ def update_servo(
+ self,
+ linear: Tuple[float, float, float] = (0.0, 0.0, 0.0),
+ angular: Tuple[float, float, float] = (0.0, 0.0, 0.0),
+ ):
+ self.servo.servo(linear=linear, angular=angular, enable_if_disabled=False)
diff --git a/manipulation/packages/frida_motion_planning/frida_motion_planning/utils/Servo.py b/manipulation/packages/frida_motion_planning/frida_motion_planning/utils/Servo.py
new file mode 100644
index 00000000..930401e9
--- /dev/null
+++ b/manipulation/packages/frida_motion_planning/frida_motion_planning/utils/Servo.py
@@ -0,0 +1,35 @@
+from abc import ABC, abstractmethod
+from typing import Tuple
+from rclpy.node import Node
+
+
+class Servo(ABC):
+ """Abstract base class for motion planners doing servoing in Cartesian space"""
+
+ @abstractmethod
+ def __init__(
+ self,
+ node: Node,
+ max_velocity: float = 0.5,
+ max_acceleration: float = 0.5,
+ ):
+ """Initialize the planner"""
+ pass
+
+ @abstractmethod
+ def enable_servo(self) -> None:
+ """Enable servoing"""
+ pass
+
+ @abstractmethod
+ def disable_servo(self) -> None:
+ """Disable servoing"""
+ pass
+
+ @abstractmethod
+ def update_servo(
+ self,
+ linear: Tuple[float, float, float] = (0.0, 0.0, 0.0),
+ angular: Tuple[float, float, float] = (0.0, 0.0, 0.0),
+ ):
+ pass
diff --git a/manipulation/packages/frida_motion_planning/frida_motion_planning/utils/tf_utils.py b/manipulation/packages/frida_motion_planning/frida_motion_planning/utils/tf_utils.py
new file mode 100644
index 00000000..3d472968
--- /dev/null
+++ b/manipulation/packages/frida_motion_planning/frida_motion_planning/utils/tf_utils.py
@@ -0,0 +1,33 @@
+import math
+
+
+def quat_to_rpy(quat):
+ """
+ Convert a quaternion to roll, pitch, and yaw (in radians).
+
+ Args:
+ quat (iterable): A quaternion represented as [x, y, z, w].
+
+ Returns:
+ tuple: (roll, pitch, yaw) in radians.
+ """
+ x, y, z, w = quat
+
+ # Compute roll (x-axis rotation)
+ sinr_cosp = 2.0 * (w * x + y * z)
+ cosr_cosp = 1.0 - 2.0 * (x * x + y * y)
+ roll = math.atan2(sinr_cosp, cosr_cosp)
+
+ # Compute pitch (y-axis rotation)
+ sinp = 2.0 * (w * y - z * x)
+ if abs(sinp) >= 1:
+ pitch = math.copysign(math.pi / 2, sinp)
+ else:
+ pitch = math.asin(sinp)
+
+ # Compute yaw (z-axis rotation)
+ siny_cosp = 2.0 * (w * z + x * y)
+ cosy_cosp = 1.0 - 2.0 * (y * y + z * z)
+ yaw = math.atan2(siny_cosp, cosy_cosp)
+
+ return roll, pitch, yaw
diff --git a/manipulation/packages/frida_motion_planning/launch/demo_ds4.launch.py b/manipulation/packages/frida_motion_planning/launch/demo_ds4.launch.py
new file mode 100644
index 00000000..7bc46503
--- /dev/null
+++ b/manipulation/packages/frida_motion_planning/launch/demo_ds4.launch.py
@@ -0,0 +1,50 @@
+from launch import LaunchDescription
+from launch_ros.actions import Node
+from launch.actions import IncludeLaunchDescription
+from launch.launch_description_sources import PythonLaunchDescriptionSource
+from launch.substitutions import LaunchConfiguration, PathJoinSubstitution
+from launch_ros.substitutions import FindPackageShare
+
+
+def generate_launch_description():
+ # command_interpreter_config = os.path.join(
+ # get_package_share_directory(
+ # "nlp"), "config", "command_interpreter.yaml"
+ # )
+
+ robot_ip = LaunchConfiguration("robot_ip")
+
+ # robot moveit realmove launch
+ # xarm_moveit_config/launch/_robot_moveit_realmove.launch.py
+ robot_moveit_realmove_launch = IncludeLaunchDescription(
+ PythonLaunchDescriptionSource(
+ PathJoinSubstitution(
+ [
+ FindPackageShare("xarm_moveit_config"),
+ "launch",
+ "_robot_moveit_realmove.launch.py",
+ ]
+ )
+ ),
+ launch_arguments={
+ "robot_ip": robot_ip,
+ "dof": "6",
+ "robot_type": "xarm",
+ "hw_ns": "xarm",
+ "no_gui_ctrl": "false",
+ }.items(),
+ )
+
+ motion_planning_launch = Node(
+ package="frida_motion_planning",
+ executable="motion_planning_server.py",
+ name="motion_planning_server",
+ )
+
+ demo_ds4_launch = Node(
+ package="frida_motion_planning", executable="ds4_demo.py", name="ds4_demo"
+ )
+
+ return LaunchDescription(
+ [robot_moveit_realmove_launch, motion_planning_launch, demo_ds4_launch]
+ )
diff --git a/navigation/packages/PlayStation-JoyInterface-ROS2 b/navigation/packages/PlayStation-JoyInterface-ROS2
new file mode 160000
index 00000000..88f92314
--- /dev/null
+++ b/navigation/packages/PlayStation-JoyInterface-ROS2
@@ -0,0 +1 @@
+Subproject commit 88f92314c2550cce963eeacfe9717d1cfca8b814
diff --git a/navigation/packages/dashgo_driver/config/dashgo_parameters.yaml b/navigation/packages/dashgo_driver/config/dashgo_parameters.yaml
deleted file mode 100644
index 4e9f08c0..00000000
--- a/navigation/packages/dashgo_driver/config/dashgo_parameters.yaml
+++ /dev/null
@@ -1,52 +0,0 @@
-DashgoDriver:
- ros__parameters:
- port: /dev/ttyUSB0
- baudrate: 115200
- timeout: 0.1
- rate: 50
- sensorstate_rate: 10
-
- # === Robot Module activation
- use_smotheer: False # To use the smoother nodelet set use_smoother to True
- useSonar: False
- useImu: True
- use_base_controller: True
- base_controller_rate: 10
-
- # For a robot that uses base_footprint, change base_frame to base_footprint
- base_frame: base_footprint
-
-
- # === Robot drivetrain parameters
- wheel_diameter: 0.1280
- wheel_track: 0.341
- encoder_resolution: 1200
- gear_reduction: 1.0
- motors_reversed: False
-
- accel_limit: 1.0
-
- #Activate modes
- pub_encoders: False
- voltage_pub: False
- show_statics: False
-
-
- #Sonar parameters
- sonar_height: 0.115
-
- sonar0_offset_yaw: 0.524
- sonar0_offset_x: 0.18
- sonar0_offset_y: 0.10
-
- sonar1_offset_yaw: 0.0
- sonar1_offset_x: 0.20
- sonar1_offset_y: 0.0
-
- sonar2_offset_yaw: -0.524
- sonar2_offset_x: 0.18
- sonar2_offset_y: -0.10
-
- sonar3_offset_yaw: 3.14
- sonar3_offset_x: -0.20
- sonar3_offset_y: 0.0
\ No newline at end of file
diff --git a/navigation/packages/dashgo_driver/config/dashgo_params.yaml b/navigation/packages/dashgo_driver/config/dashgo_params.yaml
index c0dd95fd..70ebb38a 100644
--- a/navigation/packages/dashgo_driver/config/dashgo_params.yaml
+++ b/navigation/packages/dashgo_driver/config/dashgo_params.yaml
@@ -10,7 +10,7 @@ DashgoDriver:
useImu: True
# For a robot that uses base_footprint, change base_frame to base_footprint
- base_frame: base_footprint
+ base_frame: base_link
# === Robot drivetrain parameters
diff --git a/navigation/packages/dashgo_driver/launch/dashgo_v2.launch.py b/navigation/packages/dashgo_driver/launch/dashgo_driver.launch.py
similarity index 100%
rename from navigation/packages/dashgo_driver/launch/dashgo_v2.launch.py
rename to navigation/packages/dashgo_driver/launch/dashgo_driver.launch.py
diff --git a/navigation/packages/dashgo_driver/launch/dashgo_driver_launch.py b/navigation/packages/dashgo_driver/launch/dashgo_driver_launch.py
deleted file mode 100644
index 9f3f6781..00000000
--- a/navigation/packages/dashgo_driver/launch/dashgo_driver_launch.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import os
-from ament_index_python.packages import get_package_share_directory
-from launch import LaunchDescription
-from launch_ros.actions import Node
-
-def generate_launch_description():
- config = os.path.join(
- get_package_share_directory('dashgo_driver'),
- 'config',
- 'dashgo_parameters.yaml'
- )
- return LaunchDescription([
- Node(
- package='dashgo_driver',
- executable='dashgo_driver2.py',
- name='DashgoDriver',
- output='screen',
- emulate_tty=True,
- parameters=[
- config
- ],
- ),
- ])
\ No newline at end of file
diff --git a/navigation/packages/dashgo_driver/launch/ekf_test.launch.py b/navigation/packages/dashgo_driver/launch/ekf.launch.py
similarity index 96%
rename from navigation/packages/dashgo_driver/launch/ekf_test.launch.py
rename to navigation/packages/dashgo_driver/launch/ekf.launch.py
index f27b31fe..362bdc3f 100644
--- a/navigation/packages/dashgo_driver/launch/ekf_test.launch.py
+++ b/navigation/packages/dashgo_driver/launch/ekf.launch.py
@@ -18,7 +18,7 @@ def generate_launch_description():
'two_d_mode': True,
'publish_tf': True,
'map_frame': 'map',
- 'base_link_frame': 'base_footprint',
+ 'base_link_frame': 'base_link',
'world_frame': 'odom',
'odom0': 'dashgo_odom',
#'odom0': 'odom',
diff --git a/navigation/packages/dashgo_driver/scripts/dashgo_driverlol.py b/navigation/packages/dashgo_driver/scripts/dashgo_driverlol.py
deleted file mode 100644
index f87c9b4e..00000000
--- a/navigation/packages/dashgo_driver/scripts/dashgo_driverlol.py
+++ /dev/null
@@ -1,1350 +0,0 @@
-#!/usr/bin/env python3
-from serial.serialutil import SerialException
-from serial import Serial
-import sys, traceback
-import os
-from math import pi as PI,sin, cos
-import struct
-import math
-import binascii
-
-import rclpy
-from rclpy.node import Node
-from rclpy.duration import Duration
-from geometry_msgs.msg import Twist,Quaternion,TransformStamped
-from std_msgs.msg import Int16, Int32, UInt16, Float32, String
-from sensor_msgs.msg import Range, Imu, PointCloud2
-from nav_msgs.msg import Odometry
-import tf2_ros
-import time as t
-
-ODOM_POSE_COVARIANCE = [float(1e-3), 0.0, 0.0, 0.0, 0.0, 0.0,
- 0.0, float(1e-3), 0.0, 0.0, 0.0, 0.0,
- 0.0, 0.0, float(1e6), 0.0, 0.0, 0.0,
- 0.0, 0.0, 0.0, float(1e6), 0.0, 0.0,
- 0.0, 0.0, 0.0, 0.0, float(1e6), 0.0,
- 0.0, 0.0, 0.0, 0.0, 0.0, float(1e3)]
-ODOM_TWIST_COVARIANCE = [float(1e-3), 0.0, 0.0, 0.0, 0.0, 0.0,
- 0.0, float(1e-3), 0.0, 0.0, 0.0, 0.0,
- 0.0, 0.0, float(1e6), 0.0, 0.0, 0.0,
- 0.0, 0.0, 0.0, float(1e6), 0.0, 0.0,
- 0.0, 0.0, 0.0, 0.0, float(1e6), 0.0,
- 0.0, 0.0, 0.0, 0.0, 0.0, float(1e3)]
-N_ANALOG_PORTS = 6
-N_DIGITAL_PORTS = 12
-
-
-class Stm32:
-
- def __init__(self,node, port, baudrate, timeout,stm32_timeout):
- # Initialize the values
- self.port = port
- self.stm32_timeout = stm32_timeout
- self.baudrate = baudrate
- self.timeout = timeout
- self.node = node
- self.PID_RATE = 30 # Do not change this! It is a fixed property of the Stm32 PID controller.
- self.PID_INTERVAL = 1000 / 30
- self.WAITING_FF = 0
- self.WAITING_AA = 1
- self.RECEIVE_LEN = 2
- self.RECEIVE_PACKAGE = 3
- self.RECEIVE_CHECK = 4
- self.HEADER0 = 0xff
- self.HEADER1 = 0xaa
- self.SUCCESS = 0
- self.FAIL = -1
- self.receive_state_ = self.WAITING_FF
- self.receive_check_sum_ = 0
- self.payload_command = b''
- self.payload_ack = b''
- self.payload_args = b''
- self.payload_len = 0
- self.byte_count_ = 0
- self.receive_message_length_ = 0
- self.encoder_count = 0
- self.writeTimeout = timeout
- self.interCharTimeout = timeout / 30.
- # An array to cache analog sensor readings
- self.analog_sensor_cache = [None] * N_ANALOG_PORTS
- # An array to cache digital sensor readings
- self.digital_sensor_cache = [None] * N_DIGITAL_PORTS
-
- def connect(self):
- try:
- self.node.get_logger().info(f"Connecting to Stm32 on port {self.port} ...")
- # self.port = Serial(port="/dev/port1", baudrate=115200, timeout=0.1, writeTimeout=0.1)
- self.port = Serial(port=self.port, baudrate=self.baudrate, timeout=self.timeout, writeTimeout=self.writeTimeout)
- # The next line is necessary to give the firmware time to wake up.
- counter = 0
- t.sleep(1)
- state_, val = self.get_baud()
- while val!=self.baudrate:
- t.sleep(1)
- state_, val = self.get_baud()
- counter+=1
- if counter >= self.stm32_timeout:
- raise SerialException
- self.node.get_logger().info("Connected at" + str(self.baudrate))
- self.node.get_logger().info("Stm32 is ready.")
-
- except SerialException:
- self.node.get_logger().error("Serial Exception:")
- self.node.get_logger().error(str(sys.exc_info()))
- self.node.get_logger().error("Traceback follows:")
- traceback.print_exc(file=sys.stdout)
- self.node.get_logger().error("Cannot connect to Stm32!")
- os._exit(1)
- def receiveFiniteStates(self, rx_data):
- if self.receive_state_ == self.WAITING_FF:
- #print str(binascii.b2a_hex(rx_data))
- if rx_data == b'\xff':
- self.receive_state_ = self.WAITING_AA
- self.receive_check_sum_ =0
- self.receive_message_length_ = 0
- self.byte_count_=0
- self.payload_ack = b''
- self.payload_args = b''
- self.payload_len = 0
-
-
- elif self.receive_state_ == self.WAITING_AA :
- if rx_data == b'\xaa':
- self.receive_state_ = self.RECEIVE_LEN
- self.receive_check_sum_ = 0
- else:
- self.receive_state_ = self.WAITING_FF
-
- elif self.receive_state_ == self.RECEIVE_LEN:
- self.receive_message_length_, = struct.unpack("B",rx_data)
- self.receive_state_ = self.RECEIVE_PACKAGE
- self.receive_check_sum_ = self.receive_message_length_
- elif self.receive_state_ == self.RECEIVE_PACKAGE:
- if self.byte_count_==0:
- self.payload_ack = rx_data
- else:
- self.payload_args += rx_data
- uc_tmp_, = struct.unpack("B",rx_data)
- self.receive_check_sum_ = self.receive_check_sum_ + uc_tmp_
- self.byte_count_ +=1
- #print "byte:"+str(byte_count_) +","+ "rece_len:"+str(receive_message_length_)
- if self.byte_count_ >= self.receive_message_length_:
- self.receive_state_ = self.RECEIVE_CHECK
-
- elif self.receive_state_ == self.RECEIVE_CHECK:
- #print "checksun:" + str(rx_data) + " " + str(self.receive_check_sum_%255)
- #uc_tmp_, = struct.unpack("B",rx_data)
- #print "checksum:" + str(uc_tmp_) +" " + str((self.receive_check_sum_)%255)
- #if uc_tmp_ == (self.receive_check_sum_)%255:
- if 1:
- self.receive_state_ = self.WAITING_FF
- #print str(binascii.b2a_hex(value))
- #left, right, = struct.unpack('hh', value)
- #print "left:"+str(left)+", right:"+str(right)
- return 1
- else:
- self.receive_state_ = self.WAITING_FF
- else:
- self.receive_state_ = self.WAITING_FF
- return 0
-
- def recv(self, timeout=0.5):
- timeout = min(timeout, self.timeout)
- ''' This command should not be used on its own: it is called by the execute commands
- below in a thread safe manner. Note: we use read() instead of readline() since
- readline() tends to return garbage characters from the Stm32
- '''
- c = ''
- value = ''
- attempts = 0
- c = self.port.read(1)
- #print str(binascii.b2a_hex(c))
- while self.receiveFiniteStates(c) != 1:
- c = self.port.read(1)
- #print str(binascii.b2a_hex(c))
- attempts += 1
- if attempts * self.interCharTimeout > timeout:
- return 0
- return 1
-
- def get_baud(self):
- ''' Get the current baud rate on the serial port.
- '''
- cmd_str=struct.pack("4B", self.HEADER0, self.HEADER1, 0x01, 0x00) + struct.pack("B", 0x01)
- if (self.execute(cmd_str))==1 and self.payload_ack == b'\x00':
- val, = struct.unpack('I', self.payload_args)
- return self.SUCCESS, val
- else:
- # print("ACK", self.payload_ack, self.payload_ack == b'\x00', self.execute(cmd_str)==1)
- return self.FAIL, 0
-
- def reset_IMU(self):
- cmd_str=struct.pack("4B", self.HEADER0, self.HEADER1, 0x01, 0x41) + struct.pack("B", 0x42)
- if (self.execute(cmd_str))==1 and self.payload_ack == b'\x00':
- return self.SUCCESS
- else:
- return self.FAIL
- def start_automatic_recharge(self):
- ''' start for automatic recharge.
- '''
- cmd_str=struct.pack("6B", self.HEADER0, self.HEADER1, 0x03, 0x10, 0x01, 0x00) + struct.pack("B", 0x14)
- if (self.execute(cmd_str))==1 and self.payload_ack == b'\x00':
- print("start")
- return self.SUCCESS
- else:
- return self.FAIL
- def get_hardware_version(self):
- ''' Get the current version of the hardware.
- '''
- cmd_str=struct.pack("4B", self.HEADER0, self.HEADER1, 0x01, 0x13) + struct.pack("B", 0x14)
- if (self.execute(cmd_str))==1 and self.payload_ack == b'\x00':
- val0,val1,val2,val3 = struct.unpack('BBBB', self.payload_args)
- return self.SUCCESS, val0, val1,val2,val3
- else:
- return self.FAIL, -1, -1
- def get_firmware_version(self):
- ''' Get the current version of the firmware.
- '''
- cmd_str=struct.pack("4B", self.HEADER0, self.HEADER1, 0x01, 0x01) + struct.pack("B", 0x02)
- if (self.execute(cmd_str))==1 and self.payload_ack == b'\x00':
- val0,val1,val2,val3 = struct.unpack('BBBB', self.payload_args)
- return self.SUCCESS, val0, val1,val2,val3
- else:
- return self.FAIL, -1, -1
- def get_pid(self, cmd):
- ''' Get the current value of the imu.
- '''
- check_number_list = [0x01, cmd]
- checknum = self.get_check_sum(check_number_list)
- cmd_str=struct.pack("4B", self.HEADER0, self.HEADER1, 0x01, cmd) + struct.pack("B", checknum)
- if (self.execute(cmd_str))==1 and self.payload_ack == b'\x00':
- val_l,val_r = struct.unpack('HH', self.payload_args)
- lreal=float(val_l)/100.0
- rreal=float(val_r)/100.0
- return self.SUCCESS, lreal, rreal
- else:
- return self.FAIL, -1, -1
- def set_pid(self, cmd, left, right):
- ''' set pid.
- '''
- lpid = int(left*100)
- lpid_l = lpid & 0xff
- lpid_h = (lpid >> 8) & 0xff
- rpid = int(right*100)
- rpid_l = rpid & 0xff
- rpid_h = (rpid >> 8) & 0xff
- check_number_list = [0x05, cmd, lpid_h, lpid_l, rpid_h, rpid_l]
- checknum = self.get_check_sum(check_number_list)
- cmd_str=struct.pack("8B", self.HEADER0, self.HEADER1, 0x05, cmd, lpid_h, lpid_l, rpid_h, rpid_l) + struct.pack("B", checknum)
- if (self.execute(cmd_str))==1 and self.payload_ack == b'\x00':
- return self.SUCCESS
- else:
- return self.FAIL
- def reset_encoders(self):
- cmd_str=struct.pack("4B", self.HEADER0, self.HEADER1, 0x01, 0x03) + struct.pack("B", 0x04)
- if (self.execute(cmd_str))==1 and self.payload_ack == b'\x00':
- return self.SUCCESS
- else:
- return self.FAIL
-
- def get_check_sum(self,list):
- list_len = len(list)
- cs = 0
- for i in range(list_len):
- #print i, list[i]
- cs += list[i]
- cs=cs%255
- return cs
-
- def reset_system(self):
- ''' reset system.
- '''
- cmd_str=struct.pack("4B", self.HEADER0, self.HEADER1, 0x01, 0x40) + struct.pack("B", 0x41)
- if (self.execute(cmd_str))==1 and self.payload_ack == b'\x00':
- return self.SUCCESS
- else:
- return self.FAIL
-
- def get_encoder_counts(self):
- cmd_str=struct.pack("4B", self.HEADER0, self.HEADER1, 0x01, 0x02) + struct.pack("B", 0x03)
- if (self.execute(cmd_str))==1 and self.payload_ack == b'\x00':
- left_enc, right_enc, = struct.unpack('HH', self.payload_args)
- return self.SUCCESS, left_enc, right_enc
- else:
- return self.FAIL, 0, 0
- def get_infrareds(self):
- ''' Get the current distance on the infrareds.
- '''
- cmd_str=struct.pack("4B", self.HEADER0, self.HEADER1, 0x01, 0x0F) + struct.pack("B", 0x10)
- if (self.execute(cmd_str))==1 and self.payload_ack == b'\x00':
- val0, val1, val2, val3, val4, val5 = struct.unpack('HHHHHH', self.payload_args)
- return self.SUCCESS, val0, val1, val2, val3, val4, val5
- else:
- return self.FAIL, -1, -1, -1, -1, -1, -1
-
- def get_recharge_way(self):
- ''' Get the way of the recharge.
- '''
- cmd_str=struct.pack("4B", self.HEADER0, self.HEADER1, 0x01, 0x17) + struct.pack("B", 0x18)
- if (self.execute(cmd_str))==1 and self.payload_ack == b'\x00':
- #print("payload:"+str(binascii.b2a_hex(self.payload_args)))
- way, = struct.unpack('I', self.payload_args)
- return self.SUCCESS, way
- else:
- return self.FAIL, -1
- def get_automatic_recharge_status(self):
- ''' Get the status of automatic recharge.
- '''
- cmd_str=struct.pack("4B", self.HEADER0, self.HEADER1, 0x01, 0x11) + struct.pack("B", 0x12)
- if (self.execute(cmd_str))==1 and self.payload_ack == b'\x00':
- val, = struct.unpack('I', self.payload_args)
- return self.SUCCESS, val
- else:
- return self.FAIL, -1
- def get_embtn_recharge(self):
- ''' Get the status of the emergency button and recharge.
- '''
- cmd_str=struct.pack("4B", self.HEADER0, self.HEADER1, 0x01, 0x15) + struct.pack("B", 0x16)
- if (self.execute(cmd_str))==1 and self.payload_ack == b'\x00':
- em,rech = struct.unpack('HH', self.payload_args)
- return self.SUCCESS, em, rech
- else:
- return self.FAIL, -1, -1
- def get_sonar_range(self):
- cmd_str=struct.pack("4B", self.HEADER0, self.HEADER1, 0x01, 0x0D) + struct.pack("B", 0x0E)
- if (self.execute(cmd_str))==1 and self.payload_ack == b'\x00':
- #left_enc,right_enc, = struct.unpack('hh', self.payload_args)
- sonar0, sonar1, sonar2, sonar3, sonar4, sonar5, = struct.unpack('6H', self.payload_args)
- return self.SUCCESS, sonar0, sonar1, sonar2, sonar3, sonar4, sonar5
- else:
- return self.FAIL, 0, 0, 0, 0, 0, 0
- def get_imu_val(self):
- cmd_str=struct.pack("4B", self.HEADER0, self.HEADER1, 0x01, 0x05) + struct.pack("B", 0x06)
- if (self.execute(cmd_str))==1 and self.payload_ack == b'\x00':
- #left_enc,right_enc, = struct.unpack('hh', self.payload_args)
- yaw, yaw_vel, x_acc, y_acc, z_acc, = struct.unpack('5H', self.payload_args)
- return self.SUCCESS, yaw, yaw_vel, x_acc, y_acc, z_acc
- else:
- return self.FAIL, 0, 0, 0, 0, 0
- def drive(self, left, right):
- data1 = struct.pack("h", int(left))
- d1, d2 = struct.unpack("BB", data1)
-
- data2 = struct.pack("h", int(right))
- c1, c2 = struct.unpack("BB", data2)
-
- self.check_list = [0x05,0x04, d1, d2, c1, c2]
- self.check_num = self.get_check_sum(self.check_list)
- cmd_str=struct.pack("4B", self.HEADER0, self.HEADER1, 0x05, 0x04) + struct.pack("hh", int(left), int(right)) + struct.pack("B", self.check_num)
- if (self.execute(cmd_str))==1 and self.payload_ack == b'\x00':
- return self.SUCCESS
- else:
- return self.FAIL
- def get_voltage(self):
- ''' Get the current voltage the battery.
- '''
- cmd_str=struct.pack("4B", self.HEADER0, self.HEADER1, 0x01, 0x12) + struct.pack("B", 0x13)
- if (self.execute(cmd_str))==1 and self.payload_ack == b'\x00':
- vol1, vol2, vol3, vol4, vol5, vol6 = struct.unpack('6H', self.payload_args)
- return self.SUCCESS, vol1, vol2, vol3, vol4, vol5, vol6
- else:
- return self.FAIL, -1, -1, -1, -1, -1, -1
- def stop_automatic_recharge(self):
- ''' stop for automatic recharge.
- '''
- cmd_str=struct.pack("6B", self.HEADER0, self.HEADER1, 0x03, 0x10, 0x00, 0x00) + struct.pack("B", 0x13)
- if (self.execute(cmd_str))==1 and self.payload_ack == b'\x00':
- print("stop")
- return self.SUCCESS
- else:
- return self.FAIL
- def execute(self, cmd):
- try:
- self.port.flushInput()
- except:
- pass
-
- ntries = 1
- attempts = 0
-
- try:
- self.port.write(cmd)
- res = self.recv(self.timeout)
- while attempts < ntries and res !=1 :
- try:
- self.port.flushInput()
- self.port.write(cmd)
- res = self.recv(self.timeout)
- #print "response : " + str(binascii.b2a_hex(res))
- except:
- self.node.get_logger().error("Exception executing command: " + str(binascii.b2a_hex(cmd)))
- attempts += 1
- except:
- self.node.get_logger().error("Exception executing command: " + str(binascii.b2a_hex(cmd)))
- return 0
-
- return 1
-
-class BaseController:
- def __init__(
- self,node,rate, Stm32, base_frame, wheel_diameter,
- wheel_track, encoder_resolution, gear_reduction, accel_limit,
- motors_reversed, start_rotation_limit_w,use_smotheer,useImu,useSonar,
- encoder_min, encoder_max,sonar_height, sonar0_offset_yaw,sonar0_offset_x,sonar0_offset_y,
- sonar1_offset_yaw,sonar1_offset_x,sonar1_offset_y, sonar2_offset_yaw,sonar2_offset_x,
- sonar2_offset_y,sonar3_offset_yaw,sonar3_offset_x,sonar3_offset_y,sonar4_offset_yaw,
- sonar4_offset_x,sonar4_offset_y,imu_frame_id,imu_offset,PubEncoders,voltage_pub,
- show_statics,base_controller_timeout,encoder_high_wrap,encoder_low_wrap
- ):
- self.node = node
- self.timeout = base_controller_timeout
- self.encoder_high_wrap = encoder_high_wrap
- self.encoder_low_wrap = encoder_low_wrap
- self.voltage_pub = voltage_pub
- self.show_statics = show_statics
- self.useImu = useImu
- self.imu_frame_id = imu_frame_id
- self.sonar_height = sonar_height
- self.encoder_min=encoder_min
- self.encoder_max=encoder_max
- self.imu_offset = imu_offset
- self.PubEncoders = PubEncoders
- self.Stm32 = Stm32
- self.rate = rate
- self.useSonar = useSonar
- self.use_smotheer = use_smotheer
- self.base_frame = base_frame
- self.wheel_diameter = wheel_diameter
- self.wheel_track = wheel_track
- self.encoder_resolution = encoder_resolution
- self.gear_reduction = gear_reduction
- self.accel_limit = accel_limit
- self.motors_reversed = motors_reversed
- self.start_rotation_limit_w = start_rotation_limit_w
- self.sonar0_offset_yaw = sonar0_offset_yaw
- self.sonar0_offset_x = sonar0_offset_x
- self.sonar0_offset_y = sonar0_offset_y
- self.sonar1_offset_yaw = sonar1_offset_yaw
- self.sonar1_offset_x = sonar1_offset_x
- self.sonar1_offset_y = sonar1_offset_y
- self.sonar2_offset_yaw = sonar2_offset_yaw
- self.sonar2_offset_x = sonar2_offset_x
- self.sonar2_offset_y = sonar2_offset_y
- self.sonar3_offset_yaw = sonar3_offset_yaw
- self.sonar3_offset_x = sonar3_offset_x
- self.sonar3_offset_y = sonar3_offset_y
- self.sonar4_offset_yaw = sonar4_offset_yaw
- self.sonar4_offset_x = sonar4_offset_x
- self.sonar4_offset_y = sonar4_offset_y
- self.stopped = False
- self.bad_encoder_count = 0
- self.ticks_per_meter = self.encoder_resolution * self.gear_reduction / (self.wheel_diameter * PI)
- self.max_accel = self.accel_limit * self.ticks_per_meter / self.rate
- self.l_wheel_mult = 0
- self.r_wheel_mult = 0
- self.SUCCESS = 0
- self.FAIL = -1
-
- now = self.node.get_clock().now()
- self.then = now
- self.t_delta = Duration(seconds=1.0 / self.rate)
- self.t_next = now + self.t_delta
-
- # Internal data
- self.enc_left = None # encoder readings
- self.enc_right = None
- self.x = 0 # position in xy plane
- self.y = 0
- self.th = 0 # rotation in radians
- self.v_left = 0
- self.v_right = 0
- self.v_des_left = 0 # cmd_vel setpoint
- self.v_des_right = 0
- self.last_cmd_vel = now
- self.lwheel_ele = 0
- self.rwheel_ele = 0
- self.recharge_way=0
- self.sonar_r0 =0.0
- self.sonar_r1 =0.0
- self.sonar_r2 =0.0
- self.sonar_r3 =0.0
- self.sonar_r4 =0.0
- self.safe_range_0 = 10
- self.safe_range_1 = 30
- self.sonar_maxval = 3.5
- self.sonar_cloud = [[100.0,0.105,0.1],[100.0,-0.105,0.1],[0.2,100.0,0.1],[0.2,-100.0,0.1],[-100.0,0.0,0.1]]
- self.voltage_val = 0
- self.voltage_str = ""
- self.emergencybt_val = 0
- self.is_recharge = False
- self.recharge_status = 0
- self.isPassed = True
-
- if (self.use_smotheer == True):
- self.robot_cmd_vel_pub = self.node.create_publisher(Twist, 'robot_cmd_vel', 5)
- self.node.create_subscription(Int16,'is_passed',self.isPassedCallback,10) # Probably not needed
- self.node.create_subscription(Twist,'smoother_cmd_vel',self.cmdVelCallback,10)
- else:
- self.node.create_subscription(Twist,'cmd_vel',self.cmdVelCallback,10)
-
- #RESET VALUES OF STM32
- self.Stm32.reset_encoders()
- self.Stm32.reset_IMU()
-
- #SONAR DECLARATION
- if (self.useSonar == True):
- self.sonar0_pub = self.node.create_publisher(Range, 'sonar0', 5)
- self.sonar1_pub = self.node.create_publisher(Range, 'sonar1', 5)
- self.sonar2_pub = self.node.create_publisher(Range, 'sonar2', 5)
- self.sonar3_pub = self.node.create_publisher(Range, 'sonar3', 5)
- self.sonar4_pub = self.node.create_publisher(Range, 'sonar4', 5)
-
- self.sonar_pub_cloud = self.node.create_publisher(PointCloud2, 'sonar_cloudpoint', 5)
-
- self.sonar_cloud[0][0] = self.sonar0_offset_x + self.sonar_maxval * math.cos(self.sonar0_offset_yaw)
- self.sonar_cloud[0][1] = self.sonar0_offset_y + self.sonar_maxval * math.sin(self.sonar0_offset_yaw)
- self.sonar_cloud[0][2] = self.sonar_height
-
- self.sonar_cloud[1][0] = self.sonar1_offset_x + self.sonar_maxval * math.cos(self.sonar1_offset_yaw)
- self.sonar_cloud[1][1] = self.sonar1_offset_y + self.sonar_maxval * math.sin(self.sonar1_offset_yaw)
- self.sonar_cloud[1][2] = self.sonar_height
-
- self.sonar_cloud[2][0] = self.sonar2_offset_x + self.sonar_maxval * math.cos(self.sonar2_offset_yaw)
- self.sonar_cloud[2][1] = self.sonar2_offset_y + self.sonar_maxval * math.sin(self.sonar2_offset_yaw)
- self.sonar_cloud[2][2] = self.sonar_height
-
- self.sonar_cloud[3][0] = self.sonar3_offset_x + self.sonar_maxval * math.cos(self.sonar3_offset_yaw)
- self.sonar_cloud[3][1] = self.sonar3_offset_y + self.sonar_maxval * math.sin(self.sonar3_offset_yaw)
- self.sonar_cloud[3][2] = self.sonar_height
-
- self.sonar_cloud[4][0] = self.sonar4_offset_x + self.sonar_maxval * math.cos(self.sonar4_offset_yaw)
- self.sonar_cloud[4][1] = self.sonar4_offset_y + self.sonar_maxval * math.sin(self.sonar4_offset_yaw)
- self.sonar_cloud[4][2] = self.sonar_height
-
- #IMU DECLARATION
- self.imuPub = self.node.create_publisher(Imu, 'imu', 5)
- self.imuAnglePub = self.node.create_publisher(Float32, 'imu_angle', 5)
- #Set up odometry broadcaster
- self.odomPub = self.node.create_publisher(Odometry, 'odom', 5)
- self.odomBroadcaster = tf2_ros.TransformBroadcaster(self.node)
-
- self.node.get_logger().info("Started base controller for a base of " + str(self.wheel_track) + "m wide with " + str(self.encoder_resolution) + " ticks per rev")
- self.node.get_logger().info("Publishing odometry data at: " + str(self.rate) + " Hz using " + str(self.base_frame) + " as base frame")
-
- if self.PubEncoders == True:
- self.lEncoderPub = self.node.create_publisher(UInt16, 'Lencoder', 5)
- self.rEncoderPub = self.node.create_publisher(UInt16, 'Rencoder', 5)
- self.lVelPub=self.node.create_publisher(Int16, 'Lvel', 5)
- self.rVelPub=self.node.create_publisher(Int16, 'Rvel', 5)
-
-
-
- if self.voltage_pub == True:
- self.voltagePub = self.node.create_publisher(Int32, 'voltage_value', 5)
- self.voltage_percentage_pub = self.node.create_publisher(Int32, 'voltage_percentage', 5)
- self.voltage_str_pub = self.node.create_publisher(String, 'voltage_str', 5)
-
- if self.show_statics == True:
- self.emergencybt_pub = self.node.create_publisher(Int16, 'emergencybt_status', 5)
- self.recharge_ir_pub = self.node.create_publisher(Int16, 'recharge_ir_status', 5)
- self.node.create_subscription(Int16,'recharge_handle',self.handleRechargeCallback,10)
- self.recharge_pub = self.node.create_publisher(Int16,'recharge_status',5)
- self.node.create_subscription(Int16,'ware_version_req',self.reqVersionCallback,10)
- self.version_pub = self.node.create_publisher(String,'ware_version',5)
- self.pid_p_pub = self.node.create_publisher(String,'pid_p',5)
- self.pid_i_pub = self.node.create_publisher(String,'pid_i',5)
- self.pid_d_pub = self.node.create_publisher(String,'pid_d',5)
- self.node.create_subscription(String,'pid_req', self.reqPidCallback,10)
- self.node.create_subscription( String, 'pid_set', self.reqSetPidCallback,10)
- self.recharge_way_pub = self.node.create_publisher(Int32,'recharge_way',5)
- self.lwheel_ele_pub = self.node.create_publisher(Int32,'lwheel_ele', 5)
- self.rwheel_ele_pub = self.node.create_publisher(Int32,'rwheel_ele', 5)
- self.ir0_pub = self.node.create_publisher(Int32,'ir0',5)
- self.ir1_pub = self.node.create_publisher(Int32,'ir1',5)
- self.ir2_pub = self.node.create_publisher(Int32,'ir2',5)
- self.ir3_pub = self.node.create_publisher(Int32,'ir3',5)
- self.ir4_pub = self.node.create_publisher(Int32,'ir4',5)
- self.ir5_pub = self.node.create_publisher(Int32,'ir5',5)
-
-
- self.node.create_subscription(Int16,'imu_reset',self.resetImuCallback,10)
- self.node.create_subscription(Int16,'encoder_reset',self.resetEncoderCallback,10)
- self.node.create_subscription(Int16,'system_reset',self.resetSystemCallback,10)
-
-
- self.stm32_version=0
- _,stm32_hardware1,stm32_hardware0,stm32_software1,stm32_software0=self.Stm32.get_hardware_version()
- self.node.get_logger().info("*************************************************")
- self.node.get_logger().info("stm32 hardware_version is "+str(stm32_hardware0)+str(".")+str(stm32_hardware1))
- self.node.get_logger().info("stm32 software_version is "+str(stm32_software0)+str(".")+str(stm32_software1))
- self.node.get_logger().info("*************************************************")
-
-
-
- def resetSystemCallback(self, req):
- if req.data==1:
- try:
- res = self.Stm32.reset_system()
- if res==self.FAIL:
- self.node.get_logger().error("reset system failed ")
- except:
- self.node.get_logger().error("request to reset system exception ")
-
- def resetEncoderCallback(self, req):
- if req.data==1:
- try:
- res = self.Stm32.reset_encoders()
- if res==self.FAIL:
- self.node.get_logger().error("reset encoder failed ")
- except:
- self.node.get_logger().error("request to reset encoder exception ")
-
- def reqSetPidCallback(self, req):
- if req.data!='':
- set_list=req.data.split(",")
- if set_list[0]=='P':
- try:
- res=self.Stm32.set_pid(0x06, float(set_list[1]), float(set_list[2]))
- if res==self.FAIL:
- self.node.get_logger().error("set the P of PID failed ")
- except:
- self.node.get_logger().error("set the P of PID exception ")
- if set_list[0]=='I':
- try:
- res=self.Stm32.set_pid(0x08, float(set_list[1]), float(set_list[2]))
- if res==self.FAIL:
- self.node.get_logger().error("set the I of PID failed ")
- except:
- self.node.get_logger().error("set the I of PID exception ")
- if set_list[0]=='D':
- try:
- res=self.Stm32.set_pid(0x0A, float(set_list[1]), float(set_list[2]))
- if res==self.FAIL:
- self.node.get_logger().error("set the D of PID failed ")
- except:
- self.node.get_logger().error("set the D of PID exception ")
-
- def reqPidCallback(self, req):
- if req.data=='P':
- try:
- res,pl,pr = self.Stm32.get_pid(0x07)
- self.pid_p_pub.publish(str(pl) + "," + str(pr))
- if res==self.FAIL:
- self.node.get_logger().error("request the P of PID failed ")
- except:
- self.pid_p_pub.publish("")
- self.node.get_logger().error("request the P of PID exception ")
- if req.data=='I':
- try:
- res,il,ir = self.Stm32.get_pid(0x09)
- self.pid_i_pub.publish(str(il) + "," + str(ir))
- if res==self.FAIL:
- self.node.get_logger().error("request the I of PID failed ")
- except:
- self.pid_i_pub.publish("")
- self.node.get_logger().error("request the I of PID exception ")
- if req.data=='D':
- try:
- res,dl,dr = self.Stm32.get_pid(0x0B)
- self.pid_d_pub.publish(str(dl) + "," + str(dr))
- if res==self.FAIL:
- self.node.get_logger().error("request the D of PID failed ")
- except:
- self.pid_d_pub.publish("")
- self.node.get_logger().error("request the D of PID exception ")
-
- def reqVersionCallback(self, req):
- if req.data==1:
- try:
- res,ver0,ver1,ver2,ver3 = self.Stm32.get_hardware_version()
- self.version_pub.publish(str(ver0)+"."+str(ver1)+"-"+str(ver2)+"."+str(ver3))
- if res==self.FAIL:
- self.node.get_logger().error("request the version of hardware failed ")
- except:
- self.version_pub.publish("")
- self.node.get_logger().error("request the version of hardware exception ")
- if req.data==2:
- try:
- res,ver0,ver1,ver2,ver3 = self.Stm32.get_firmware_version()
- self.version_pub.publish(str(ver0)+"."+str(ver1)+"-"+str(ver2)+"."+str(ver3))
- if res==self.FAIL:
- self.node.get_logger().error("request the version of firmware failed ")
- except:
- self.version_pub.publish("")
- self.node.get_logger().error("request the version of firmware exception ")
-
- def resetImuCallback(self, req):
- if req.data==1:
- try:
- res = self.Stm32.reset_imu()
- if res==self.FAIL:
- self.node.get_logger().error("reset imu failed ")
- except:
- self.node.get_logger().error("request to reset imu exception ")
-
- def handleRechargeCallback(self, req):
- if req.data==1:
- try:
- res = self.Stm32.start_automatic_recharge()
- self.is_recharge = True
- except:
- self.node.get_logger().error("start automatic recharge exception ")
- else:
- try:
- res = self.Stm32.stop_automatic_recharge()
- self.is_recharge = False
- except:
- self.node.get_logger().error("stop automatic recharge exception ")
-
- # Probably not needed
- def isPassedCallback(self, msg):
- if(msg.data>2):
- self.isPassed = False
- else:
- self.isPassed = True
-
- #Only for smoother
- def cmdVelCallback(self, req):
- # Handle velocity-based movement requests
- self.last_cmd_vel = self.node.get_clock().now()
-
- robot_cmd_vel = Twist()
- x = req.linear.x # m/s
- th = req.angular.z # rad/s
-
- if self.emergencybt_val==1:
- robot_cmd_vel.linear.x = 0.0
- robot_cmd_vel.linear.y = 0.0
- robot_cmd_vel.angular.z = 0.0
- else:
- robot_cmd_vel.linear.x = x
- robot_cmd_vel.linear.y = 0.0
- robot_cmd_vel.angular.z = th
-
- if (self.use_smotheer == True):
- self.robot_cmd_vel_pub.publish(robot_cmd_vel)
-
- if not self.isPassed and x>0 :
- x = 0
-
- if (self.useSonar == True) :
- #sonar0
- if((self.sonar_r0<=self.safe_range_0 and self.sonar_r0>=2) and (x<0)):
- x= 0.0
- self.node.get_logger().warn("sonar0 smaller than safe_range_0, cannot back")
- #sonar1
- if((self.sonar_r1<=self.safe_range_0 and self.sonar_r1>=2) and (x>0)):
- x=0.0
- th=0.2
- self.node.get_logger().warn("sonar1 smaller than safe_range_0, only trun left")
-
- if((self.sonar_r1<=self.safe_range_0 and self.sonar_r1>=2) and (th<0)):
- x=0.0
- th=0.2
- #sonar2
- if((self.sonar_r2<=self.safe_range_0 and self.sonar_r2>=2) and (x>0)):
- x=0.0
- th=0.2
- self.node.get_logger().warn("sonar2 smaller than safe_range_0, only trun left")
-
- #sonar3
- if((self.sonar_r3<=self.safe_range_0 and self.sonar_r3>=2) and (x>0)):
- x=0.0
- th=-0.2
- self.node.get_logger().warn("sonar3 smaller than safe_range_0, only trun left")
-
- if((self.sonar_r3<=self.safe_range_0 and self.sonar_r3>=2) and (th>0)):
- x=0.0
- th=-0.2
- #sonar4
- if((self.sonar_r4<=self.safe_range_0 and self.sonar_r0>=2) and (x<0)):
- x= 0.0
- self.node.get_logger().warn("sonar4 smaller than safe_range_0, cannot back")
-
-
-
- if x == 0:
- # Turn in place
- if th>0.0 and th<0.15:
- th=0.15
- elif th>-0.15 and th<0.0:
- th=-0.15
- right = th * self.wheel_track * self.gear_reduction / 2.0
- left = -right
- elif th == 0:
- # Pure forward/backward motion
- left = right = x
- else:
- if (th>0.0 and th-0.1 and x<0):
- th=self.start_rotation_limit_w
- if (th<0.0 and th >-1.0*self.start_rotation_limit_w) and (x>-0.1 and x<0):
- th=-1.0*self.start_rotation_limit_w
-
-
- left = x - th * self.wheel_track * self.gear_reduction / 2.0
- right = x + th * self.wheel_track * self.gear_reduction / 2.0
-
- self.v_des_left = int(left * self.ticks_per_meter / self.Stm32.PID_RATE)
- self.v_des_right = int(right * self.ticks_per_meter / self.Stm32.PID_RATE)
-
- def volTransPerentage(self, vo):
- if(vo == -1):
- return -1;
- if(vo>4.2*7*1000):
- COUNT = 10*1000
- else:
- COUNT = 7*1000
-
- if(vo >= 4.0*COUNT):
- return 100
- elif(vo >= 3.965*COUNT):
- return 95
- elif(vo >= 3.93*COUNT):
- return 90
- elif(vo >= 3.895*COUNT):
- return 85
- elif(vo >= 3.86*COUNT):
- return 80
- elif(vo >= 3.825*COUNT):
- return 75
- elif(vo >= 3.79*COUNT):
- return 70
- elif(vo >= 3.755*COUNT):
- return 65
- elif (vo >= 3.72*COUNT):
- return 60
- elif (vo >= 3.685*COUNT):
- return 55
- elif (vo >= 3.65*COUNT):
- return 50
- elif (vo >= 3.615*COUNT):
- return 45
- elif (vo >= 3.58*COUNT):
- return 40
- elif (vo >= 3.545*COUNT):
- return 35
- elif (vo >= 3.51*COUNT):
- return 30
- elif (vo >= 3.475*COUNT):
- return 25
- elif (vo >= 3.44*COUNT):
- return 20
- elif (vo >= 3.405*COUNT):
- return 15
- elif (vo >= 3.37*COUNT):
- return 10
- elif (vo >= 3.335*COUNT):
- return 5
- else:
- return 0
-
- def poll(self):
- now = self.node.get_clock().now()
- if now >= self.t_next:
- try:
- stat_, left_enc,right_enc = self.Stm32.get_encoder_counts()
- if self.PubEncoders == True:
- self.lEncoderPub.publish(left_enc)
- self.rEncoderPub.publish(right_enc)
- except:
- self.bad_encoder_count += 1
- self.node.get_logger().error("Encoder exception count: " + str(self.bad_encoder_count))
- return
- try:
- res,vol1,vol2,vol3,vol4,vol5,vol6 = self.Stm32.get_voltage()
- self.lwheel_ele = vol3*4
- self.rwheel_ele = vol4*4
- self.voltage_val = vol5
-
- if self.show_statics == True:
- self.lwheel_ele_pub.publish(self.lwheel_ele)
- self.rwheel_ele_pub.publish(self.rwheel_ele)
- if self.voltage_pub == True:
- self.voltagePub.publish(self.voltage_val)
- self.voltage_str_pub.publish(str(vol1) + "," + str(vol2) + "," + str(vol3) + "," + str(vol4) + "," + str(vol5) + "," + str(vol6))
- self.voltage_percentage_pub.publish(self.volTransPerentage(self.voltage_val))
- except:
- if self.voltage_pub == True:
- self.voltagePub.publish(-1)
- self.voltage_str_pub.publish("")
-
- if self.show_statics == True:
- self.lwheel_ele_pub.publish(-1)
- self.rwheel_ele_pub.publish(-1)
-
- self.node.get_logger().error("get voltage exception")
- try:
- res,ir1,ir2,ir3,ir4,ir5,ir6 = self.Stm32.get_infrareds()
- if self.show_statics == True:
- self.ir0_pub.publish(ir1)
- self.ir1_pub.publish(ir2)
- self.ir2_pub.publish(ir3)
- self.ir3_pub.publish(ir4)
- self.ir4_pub.publish(ir5)
- self.ir5_pub.publish(ir6)
- except:
- if self.show_statics == True:
- self.ir0_pub.publish(-1)
- self.ir1_pub.publish(-1)
- self.ir2_pub.publish(-1)
- self.ir3_pub.publish(-1)
- self.ir4_pub.publish(-1)
- self.ir5_pub.publish(-1)
- self.node.get_logger().error("get infrared ray exception")
- try:
- res,way = self.Stm32.get_recharge_way()
- self.recharge_way = way
- if self.show_statics == True:
- self.recharge_way_pub.publish(self.recharge_way)
- except:
- if self.show_statics == True:
- self.recharge_way_pub.publish(-1)
- self.node.get_logger().error("get recharge way exception")
- try:
- res,status = self.Stm32.get_automatic_recharge_status()
- self.recharge_status = status
- if self.recharge_status == 3:
- self.is_recharge = False
- if self.show_statics == True:
- self.recharge_pub.publish(self.recharge_status)
- except:
- if self.show_statics == True:
- self.recharge_pub.publish(-1)
- self.node.get_logger().error("get recharge status exception")
- if(not self.is_recharge):
- try:
- res,eme_val,rech_val = self.Stm32.get_embtn_recharge()
- self.emergencybt_val = eme_val
- if self.show_statics == True:
- self.emergencybt_pub.publish(eme_val)
- self.recharge_ir_pub.publish(rech_val)
- except:
- if self.show_statics == True:
- self.emergencybt_val = -1
- self.emergencybt_pub.publish(-1)
- self.recharge_ir_pub.publish(-1)
- self.node.get_logger().error("get emergency button exception")
- if (self.useSonar == True) :
- pcloud = PointCloud2()
- try:
- stat_, self.sonar_r0, self.sonar_r1, self.sonar_r2, self.sonar_r3, self.sonar_r4,_ = self.Stm32.get_sonar_range()
- sonar0_range = Range()
- sonar0_range.header.stamp = now
- sonar0_range.header.frame_id = "/sonar0"
- sonar0_range.radiation_type = Range.ULTRASOUND
- sonar0_range.field_of_view = 0.3
- sonar0_range.min_range = 0.04
- sonar0_range.max_range = 0.8
- sonar0_range.range = self.sonar_r0/100.0
-
- if sonar0_range.range == 0.0: #sonar0 error or not exist flag
- sonar0_range.range=1.0
- elif sonar0_range.range>=sonar0_range.max_range:
- sonar0_range.range = sonar0_range.max_range
- self.sonar0_pub.publish(sonar0_range)
- if sonar0_range.range>=0.5 or sonar0_range.range == 0.0:
- self.sonar_cloud[0][0] = self.sonar0_offset_x + self.sonar_maxval * math.cos(self.sonar0_offset_yaw)
- self.sonar_cloud[0][1] = self.sonar0_offset_y + self.sonar_maxval * math.sin(self.sonar0_offset_yaw)
- else:
- self.sonar_cloud[0][0] = self.sonar0_offset_x + sonar0_range.range * math.cos(self.sonar0_offset_yaw)
- self.sonar_cloud[0][1] = self.sonar0_offset_y + sonar0_range.range * math.sin(self.sonar0_offset_yaw)
-
-
- sonar1_range = Range()
- sonar1_range.header.stamp = now
- sonar1_range.header.frame_id = "/sonar1"
- sonar1_range.radiation_type = Range.ULTRASOUND
- sonar1_range.field_of_view = 0.3
- sonar1_range.min_range = 0.04
- sonar1_range.max_range = 0.8
- sonar1_range.range = self.sonar_r1/100.0
- # if sonar1_range.range>=sonar0_range.max_range or sonar1_range.range == 0.0:
- if sonar1_range.range == 0.0: #sonar1 error or not exist flag
- sonar1_range.range=1.0
- elif sonar1_range.range>=sonar0_range.max_range:
- sonar1_range.range = sonar1_range.max_range
- self.sonar1_pub.publish(sonar1_range)
- if sonar1_range.range>=0.5 or sonar1_range.range == 0.0:
- self.sonar_cloud[1][0] = self.sonar1_offset_x + self.sonar_maxval * math.cos(self.sonar1_offset_yaw)
- self.sonar_cloud[1][1] = self.sonar1_offset_y + self.sonar_maxval * math.sin(self.sonar1_offset_yaw)
- else:
- self.sonar_cloud[1][0] = self.sonar1_offset_x + sonar1_range.range * math.cos(self.sonar1_offset_yaw)
- self.sonar_cloud[1][1] = self.sonar1_offset_y + sonar1_range.range * math.sin(self.sonar1_offset_yaw)
-
- sonar2_range = Range()
- sonar2_range.header.stamp = now
- sonar2_range.header.frame_id = "/sonar2"
- sonar2_range.radiation_type = Range.ULTRASOUND
- sonar2_range.field_of_view = 0.3
- sonar2_range.min_range = 0.04
- sonar2_range.max_range = 0.8
- sonar2_range.range = self.sonar_r2/100.0
- #if sonar2_range.range>=sonar2_range.max_range or sonar2_range.range == 0.0:
- if sonar2_range.range == 0.0: #sonar2 error or not exist flag
- sonar2_range.range=1.0
- elif sonar2_range.range>=sonar2_range.max_range :
- sonar2_range.range = sonar2_range.max_range
- self.sonar2_pub.publish(sonar2_range)
- if sonar2_range.range>=0.5 or sonar2_range.range == 0.0:
- self.sonar_cloud[2][0] = self.sonar2_offset_x + self.sonar_maxval * math.cos(self.sonar2_offset_yaw)
- self.sonar_cloud[2][1] = self.sonar2_offset_y + self.sonar_maxval * math.sin(self.sonar2_offset_yaw)
- else:
- self.sonar_cloud[2][0] = self.sonar2_offset_x + sonar2_range.range * math.cos(self.sonar2_offset_yaw)
- self.sonar_cloud[2][1] = self.sonar2_offset_y + sonar2_range.range * math.sin(self.sonar2_offset_yaw)
-
- sonar3_range = Range()
- sonar3_range.header.stamp = now
- sonar3_range.header.frame_id = "/sonar3"
- sonar3_range.radiation_type = Range.ULTRASOUND
- sonar3_range.field_of_view = 0.3
- sonar3_range.min_range = 0.04
- sonar3_range.max_range = 0.8
- sonar3_range.range = self.sonar_r3/100.0
- #if sonar3_range.range>=sonar3_range.max_range or sonar3_range.range == 0.0:
- if sonar3_range.range == 0.0: #sonar3 error or not exist flag
- sonar3_range.range=1.0
- elif sonar3_range.range>=sonar3_range.max_range :
- sonar3_range.range = sonar3_range.max_range
- self.sonar3_pub.publish(sonar3_range)
- if sonar3_range.range>=0.5 or sonar3_range.range == 0.0:
- self.sonar_cloud[3][0] = self.sonar3_offset_x + self.sonar_maxval * math.cos(self.sonar3_offset_yaw)
- self.sonar_cloud[3][1] = self.sonar3_offset_y + self.sonar_maxval * math.sin(self.sonar3_offset_yaw)
- else:
- self.sonar_cloud[3][0] = self.sonar3_offset_x + sonar3_range.range * math.cos(self.sonar3_offset_yaw)
- self.sonar_cloud[3][1] = self.sonar3_offset_y + sonar3_range.range * math.sin(self.sonar3_offset_yaw)
-
- sonar4_range = Range()
- sonar4_range.header.stamp = now
- sonar4_range.header.frame_id = "/sonar4"
- sonar4_range.radiation_type = Range.ULTRASOUND
- sonar4_range.field_of_view = 0.3
- sonar4_range.min_range = 0.04
- sonar4_range.max_range = 0.8
- sonar4_range.range = self.sonar_r4/100.0
- #if sonar4_range.range>=sonar4_range.max_range or sonar4_range.range == 0.0:
- if sonar4_range.range == 0.0: #sonar4 error or not exist flag
- sonar4_range.range=1.0
- elif sonar4_range.range>=sonar4_range.max_range :
- sonar4_range.range = sonar4_range.max_range
- self.sonar4_pub.publish(sonar4_range)
- if sonar4_range.range>=0.5 or sonar4_range.range == 0.0:
- self.sonar_cloud[4][0] = self.sonar4_offset_x + self.sonar_maxval * math.cos(self.sonar4_offset_yaw)
- self.sonar_cloud[4][1] = self.sonar4_offset_y + self.sonar_maxval * math.sin(self.sonar4_offset_yaw)
- else:
- self.sonar_cloud[4][0] = self.sonar4_offset_x + sonar4_range.range * math.cos(self.sonar4_offset_yaw)
- self.sonar_cloud[4][1] = self.sonar4_offset_y + sonar4_range.range * math.sin(self.sonar4_offset_yaw)
-
- pcloud.header.frame_id="/base_footprint"
- pcloud = PointCloud2.create_cloud_xyz32(pcloud.header, self.sonar_cloud)
- self.sonar_pub_cloud.publish(pcloud)
- except:
- self.node.get_logger().error("Get Sonar exception")
- return
- if (self.useImu == True):
- try:
- stat_, yaw, yaw_vel, acc_x, acc_y, acc_z = self.Stm32.get_imu_val()
- if yaw>=18000:
- yaw = yaw-65535
- yaw = yaw/100.0
- if yaw_vel>=32768:
- yaw_vel = yaw_vel-65535
- yaw_vel = yaw_vel/100.0
- imu_data = Imu()
- imu_data.header.stamp = self.node.get_clock().now().to_msg()
- imu_data.header.frame_id = self.imu_frame_id
- imu_data.orientation_covariance[0] = 1000000.0
- imu_data.orientation_covariance[1] = 0.0
- imu_data.orientation_covariance[2] = 0.0
- imu_data.orientation_covariance[3] = 0.0
- imu_data.orientation_covariance[4] = 1000000.0
- imu_data.orientation_covariance[5] = 0.0
- imu_data.orientation_covariance[6] = 0.0
- imu_data.orientation_covariance[7] = 0.0
- imu_data.orientation_covariance[8] = 0.000001
- imu_quaternion = Quaternion()
- imu_quaternion.x = 0.0
- imu_quaternion.y = 0.0
- imu_quaternion.z = sin(-1*self.imu_offset*yaw*3.1416/(180 *2.0))
- imu_quaternion.w = cos(-1*self.imu_offset*yaw*3.1416/(180 *2.0))
- imu_data.orientation = imu_quaternion
- imu_data.linear_acceleration_covariance[0] = -1
- imu_data.angular_velocity_covariance[0] = -1
-
- imu_data.angular_velocity.x = 0.0
- imu_data.angular_velocity.y = 0.0
- imu_data.angular_velocity.z = (yaw_vel*3.1416/(180*100))
- self.imuPub.publish(imu_data)
- messageFl32 = Float32()
- messageFl32.data = -1*self.imu_offset*yaw*3.1416/(180 *2.0)
- self.imuAnglePub.publish(messageFl32)
- except Exception as e:
- self.bad_encoder_count += 1
- self.node.get_logger().error("IMU exception count: " + str(self.bad_encoder_count))
- self.node.get_logger().error(str(e))
- return
-
- dt = now - self.then
- self.then = now
- dt = dt.nanoseconds * 1e-9
- if self.enc_left == None:
- dright = 0
- dleft = 0
- else:
- if (left_enc < self.encoder_low_wrap and self.enc_left > self.encoder_high_wrap) :
- self.l_wheel_mult = self.l_wheel_mult + 1
- elif (left_enc > self.encoder_high_wrap and self.enc_left < self.encoder_low_wrap) :
- self.l_wheel_mult = self.l_wheel_mult - 1
- else:
- self.l_wheel_mult = 0
- if (right_enc < self.encoder_low_wrap and self.enc_right > self.encoder_high_wrap) :
- self.r_wheel_mult = self.r_wheel_mult + 1
- elif (right_enc > self.encoder_high_wrap and self.enc_right < self.encoder_low_wrap) :
- self.r_wheel_mult = self.r_wheel_mult - 1
- else:
- self.r_wheel_mult = 0
-
- dleft = 1.0 * (left_enc + self.l_wheel_mult * (self.encoder_max - self.encoder_min)-self.enc_left) / self.ticks_per_meter
- dright = 1.0 * (right_enc + self.r_wheel_mult * (self.encoder_max - self.encoder_min)-self.enc_right) / self.ticks_per_meter
-
- self.enc_right = right_enc
- self.enc_left = left_enc
-
- dxy_ave = (dright + dleft) / 2.0
- dth = (dright - dleft) / self.wheel_track
- vxy = dxy_ave / dt
- vth = dth / dt
-
- if (dxy_ave != 0):
- dx = cos(dth) * dxy_ave
- dy = -sin(dth) * dxy_ave
- self.x += (cos(self.th) * dx - sin(self.th) * dy)
- self.y += (sin(self.th) * dx + cos(self.th) * dy)
-
- if (dth != 0):
- self.th += dth
-
- quaternion = Quaternion()
- quaternion.x = 0.0
- quaternion.y = 0.0
- quaternion.z = sin(self.th / 2.0)
- quaternion.w = cos(self.th / 2.0)
-
- t = TransformStamped()
- t.header.stamp = self.node.get_clock().now().to_msg()
- t.header.frame_id = self.base_frame
- t.child_frame_id = "odom"
- t.transform.translation.x = float(self.x)
- t.transform.translation.y = float(self.y)
- t.transform.translation.z = 0.0
- t.transform.rotation.x = quaternion.x
- t.transform.rotation.y = quaternion.y
- t.transform.rotation.z = quaternion.z
- t.transform.rotation.w = quaternion.w
-
- if (self.useImu == False) :
- self.odomBroadcaster.sendTransform(t)
-
- odom = Odometry()
- odom.header.frame_id = "odom"
- odom.child_frame_id = self.base_frame
- odom.header.stamp = self.node.get_clock().now().to_msg()
- odom.pose.pose.position.x = float(self.x)
- odom.pose.pose.position.y = float(self.y)
- odom.pose.pose.position.z = 0.0
- odom.pose.pose.orientation = quaternion
- odom.twist.twist.linear.x = vxy
- odom.twist.twist.linear.y = 0.0
- odom.twist.twist.angular.z = vth
- odom.pose.covariance = ODOM_POSE_COVARIANCE
- odom.twist.covariance = ODOM_TWIST_COVARIANCE
- self.odomPub.publish(odom)
- if now > (self.last_cmd_vel + Duration(seconds=self.timeout)):
- self.v_des_left = 0
- self.v_des_right = 0
-
- if self.v_left < self.v_des_left:
- self.v_left += self.max_accel
- if self.v_left > self.v_des_left:
- self.v_left = self.v_des_left
- else:
- self.v_left -= self.max_accel
- if self.v_left < self.v_des_left:
- self.v_left = self.v_des_left
-
- if self.v_right < self.v_des_right:
- self.v_right += self.max_accel
- if self.v_right > self.v_des_right:
- self.v_right = self.v_des_right
- else:
- self.v_right -= self.max_accel
- if self.v_right < self.v_des_right:
- self.v_right = self.v_des_right
- if self.PubEncoders == True:
- self.lVelPub.publish(self.v_left)
- self.rVelPub.publish(self.v_right)
- if ((not self.stopped) and (not self.is_recharge)):
- self.Stm32.drive(self.v_left, self.v_right)
-
- self.t_next = now + self.t_delta
-
-class Stm32ROS(Node):
-
- def __init__(self):
- super().__init__('DashgoDriver')
- #Declare all parameters
- self.declare_parameter("port", "/dev/ttyUSB0")
- self.declare_parameter("baudrate", 115200)
- self.declare_parameter("timeout", 0.5)
- self.declare_parameter("base_frame", 'base_footprint')
- self.declare_parameter("rate", 50)
- self.declare_parameter("sensorstate_rate", 10)
- self.declare_parameter("use_base_controller", True)
- self.declare_parameter("stm32_timeout", 3)
- ##Base controller parameters
- self.declare_parameter("base_controller_rate", 10) #self.rate = float(rospy.get_param("~base_controller_rate", 10))
- self.declare_parameter("base_controller_timeout", 1.0)
- self.declare_parameter("useImu", False)
- self.declare_parameter("useSonar", False)
- self.declare_parameter("wheel_diameter", 0.1518)
- self.declare_parameter("wheel_track", 0.375)
- self.declare_parameter("encoder_resolution", 42760)
- self.declare_parameter("gear_reduction", 1.0)
- self.declare_parameter("accel_limit", 1.0)
- self.declare_parameter("motors_reversed", False)
- self.declare_parameter("start_rotation_limit_w", 0.4)
- self.declare_parameter("encoder_min", 0)
- self.declare_parameter("encoder_max", 65535)
- self.declare_parameter("use_smotheer",False)
- self.declare_parameter("sonar_height",0.15)
- self.declare_parameter("sonar0_offset_yaw",0.0)
- self.declare_parameter("sonar0_offset_x",0.27)
- self.declare_parameter("sonar0_offset_y",0.19)
- self.declare_parameter("sonar1_offset_yaw",0.0)
- self.declare_parameter("sonar1_offset_x",0.27)
- self.declare_parameter("sonar1_offset_y",-0.19)
- self.declare_parameter("sonar2_offset_yaw",1.57)
- self.declare_parameter("sonar2_offset_x",0.24)
- self.declare_parameter("sonar2_offset_y",0.15)
- self.declare_parameter("sonar3_offset_yaw",-1.57)
- self.declare_parameter("sonar3_offset_x",0.24)
- self.declare_parameter("sonar3_offset_y",-0.15)
- self.declare_parameter("sonar4_offset_yaw",3.14)
- self.declare_parameter("sonar4_offset_x",-0.1)
- self.declare_parameter("sonar4_offset_y",0.0)
- self.declare_parameter("imu_frame_id",'imu_base')
- self.declare_parameter("imu_offset",1.01)
- self.declare_parameter("pub_encoders",False)
- self.declare_parameter("voltage_pub",False)
- self.declare_parameter("show_statics",False)
- self.encoder_min = self.get_parameter('encoder_min').get_parameter_value().integer_value
- self.encoder_max = self.get_parameter('encoder_max').get_parameter_value().integer_value
- self.declare_parameter('wheel_low_wrap', (self.encoder_max - self.encoder_min) * 0.3 + self.encoder_min )
- self.declare_parameter('wheel_high_wrap', (self.encoder_max - self.encoder_min) * 0.3 + self.encoder_min )
-
- #Initial Setup
-
- #Rate reference
- r = self.get_parameter('rate').get_parameter_value().integer_value
- hz = 1.0/r
- self.timer = self.create_timer(hz, self.timer_callback)
-
- #Sensor Rate reference
- self.sensorstate_rate = self.get_parameter('sensorstate_rate').get_parameter_value().integer_value
- now = self.get_clock().now()
- t_rate = (1.0 / self.sensorstate_rate)
- self.t_delta_sensors = Duration(seconds=t_rate)
- self.t_next_sensors = now + self.t_delta_sensors
-
- #Starting STM32 instance
- self.port = self.get_parameter('port').get_parameter_value().string_value
- self.baud = self.get_parameter('baudrate').get_parameter_value().integer_value
- self.timeout = self.get_parameter('timeout').get_parameter_value().double_value
- self.stm32_timeout = self.get_parameter('stm32_timeout').get_parameter_value().integer_value
- self.controller = Stm32(self,self.port, self.baud, self.timeout, self.stm32_timeout)
-
- #Start STM32 connection
- self.controller.connect()
- self.get_logger().info("Connected to Stm32 on port " + self.port + " at " + str(self.baud) + " baud")
-
- #Start base controller
- self.use_base_controller = self.get_parameter('use_base_controller').get_parameter_value().bool_value
-
- if self.use_base_controller:
- self.base_controller_rate = self.get_parameter('base_controller_rate').get_parameter_value().integer_value
- self.useImu = self.get_parameter('useImu').get_parameter_value().bool_value
- self.useSonar = self.get_parameter('useSonar').get_parameter_value().bool_value
- self.wheel_diameter = self.get_parameter('wheel_diameter').get_parameter_value().double_value
- self.wheel_track = self.get_parameter('wheel_track').get_parameter_value().double_value
- self.encoder_resolution = self.get_parameter('encoder_resolution').get_parameter_value().integer_value
- self.gear_reduction = self.get_parameter('gear_reduction').get_parameter_value().double_value
- self.accel_limit = self.get_parameter('accel_limit').get_parameter_value().double_value
- self.motors_reversed = self.get_parameter('motors_reversed').get_parameter_value().bool_value
- self.start_rotation_limit_w = self.get_parameter('start_rotation_limit_w').get_parameter_value().double_value
- self.use_smotheer = self.get_parameter('use_smotheer').get_parameter_value().bool_value
- self.PubEncoders = self.get_parameter('pub_encoders').get_parameter_value().bool_value
- self.voltage_pub = self.get_parameter('voltage_pub').get_parameter_value().bool_value
- self.show_statics = self.get_parameter('show_statics').get_parameter_value().bool_value
- self.encoder_min = self.get_parameter('encoder_min').get_parameter_value().integer_value
- self.encoder_max = self.get_parameter('encoder_max').get_parameter_value().integer_value
- self.sonar_height = self.get_parameter('sonar_height').get_parameter_value().double_value
- self.sonar0_offset_yaw = self.get_parameter('sonar0_offset_yaw').get_parameter_value().double_value
- self.sonar0_offset_x = self.get_parameter('sonar0_offset_x').get_parameter_value().double_value
- self.sonar0_offset_y = self.get_parameter('sonar0_offset_y').get_parameter_value().double_value
- self.sonar1_offset_yaw = self.get_parameter('sonar1_offset_yaw').get_parameter_value().double_value
- self.sonar1_offset_x = self.get_parameter('sonar1_offset_x').get_parameter_value().double_value
- self.sonar1_offset_y = self.get_parameter('sonar1_offset_y').get_parameter_value().double_value
- self.sonar2_offset_yaw = self.get_parameter('sonar2_offset_yaw').get_parameter_value().double_value
- self.sonar2_offset_x = self.get_parameter('sonar2_offset_x').get_parameter_value().double_value
- self.sonar2_offset_y = self.get_parameter('sonar2_offset_y').get_parameter_value().double_value
- self.sonar3_offset_yaw = self.get_parameter('sonar3_offset_yaw').get_parameter_value().double_value
- self.sonar3_offset_x = self.get_parameter('sonar3_offset_x').get_parameter_value().double_value
- self.sonar3_offset_y = self.get_parameter('sonar3_offset_y').get_parameter_value().double_value
- self.sonar4_offset_yaw = self.get_parameter('sonar4_offset_yaw').get_parameter_value().double_value
- self.sonar4_offset_x = self.get_parameter('sonar4_offset_x').get_parameter_value().double_value
- self.sonar4_offset_y = self.get_parameter('sonar4_offset_y').get_parameter_value().double_value
- self.imu_frame_id = self.get_parameter('imu_frame_id').get_parameter_value().string_value
- self.imu_offset = self.get_parameter('imu_offset').get_parameter_value().double_value
- self.base_frame = self.get_parameter('base_frame').get_parameter_value().string_value
- self.base_controller_timeout = self.get_parameter('base_controller_timeout').get_parameter_value().double_value
- self.encoder_low_wrap = self.get_parameter('wheel_low_wrap').get_parameter_value().double_value
- self.encoder_high_wrap = self.get_parameter('wheel_high_wrap').get_parameter_value().double_value
-
- self.myBaseController = BaseController(
- self,self.base_controller_rate,self.controller,self.base_frame,self.wheel_diameter,
- self.wheel_track,self.encoder_resolution,self.gear_reduction,self.accel_limit,
- self.motors_reversed,self.start_rotation_limit_w,self.use_smotheer,self.useImu,
- self.useSonar,self.encoder_min, self.encoder_max, self.sonar_height,
- self.sonar0_offset_yaw,self.sonar0_offset_x,self.sonar0_offset_y, self.sonar1_offset_yaw,
- self.sonar1_offset_x,self.sonar1_offset_y,self.sonar2_offset_yaw,self.sonar2_offset_x,
- self.sonar2_offset_y,self.sonar3_offset_yaw,self.sonar3_offset_x,self.sonar3_offset_y,
- self.sonar4_offset_yaw,self.sonar4_offset_x,self.sonar4_offset_y,self.imu_frame_id,
- self.imu_offset,self.PubEncoders,self.voltage_pub,self.show_statics,self.base_controller_timeout,
- self.encoder_high_wrap ,self.encoder_low_wrap
- )
-
-
-
-
- def timer_callback(self):
- if(self.use_base_controller):
- self.myBaseController.poll()
- else:
- self.get_logger().info("Base controller is not enabled")
-
-
-
-def main():
- rclpy.init()
-
- Stm32Node = Stm32ROS()
-
- try:
- rclpy.spin(Stm32Node)
- except KeyboardInterrupt:
- pass
-
- rclpy.shutdown()
-
-
-if __name__ == '__main__':
- main()
\ No newline at end of file
diff --git a/navigation/packages/nav_main/CMakeLists.txt b/navigation/packages/nav_main/CMakeLists.txt
index 08ce8c3a..d6bcb92c 100644
--- a/navigation/packages/nav_main/CMakeLists.txt
+++ b/navigation/packages/nav_main/CMakeLists.txt
@@ -13,7 +13,7 @@ find_package(rclpy REQUIRED)
find_package(sensor_msgs REQUIRED)
find_package(frida_interfaces REQUIRED)
-install(DIRECTORY launch
+install(DIRECTORY launch config maps
DESTINATION share/${PROJECT_NAME})
# instalar el json
diff --git a/navigation/packages/nav_main/config/mapper_params_online_async.yaml b/navigation/packages/nav_main/config/mapper_params_online_async.yaml
new file mode 100644
index 00000000..2398f014
--- /dev/null
+++ b/navigation/packages/nav_main/config/mapper_params_online_async.yaml
@@ -0,0 +1,75 @@
+slam_toolbox:
+ ros__parameters:
+
+ # Plugin params
+ solver_plugin: solver_plugins::CeresSolver
+ ceres_linear_solver: SPARSE_NORMAL_CHOLESKY
+ ceres_preconditioner: SCHUR_JACOBI
+ ceres_trust_strategy: LEVENBERG_MARQUARDT
+ ceres_dogleg_type: TRADITIONAL_DOGLEG
+ ceres_loss_function: None
+
+ # ROS Parameters
+ odom_frame: odom
+ map_frame: map
+ base_frame: base_footprint
+ scan_topic: /scan
+ use_map_saver: true
+ mode: mapping #localization #mapping
+
+ # if you'd like to immediately star t continuing a map at a given pose
+ # or at the dock, but they are mutually exclusive, if pose is given
+ # will use pose
+ #map_file_name: /home/gerardo/Roborregos/Repos/home_repo/first_map_serial
+ # map_start_pose: [0.0, 0.0, 0.0]
+ #map_start_at_dock: true
+
+ debug_logging: false
+ throttle_scans: 1
+ transform_publish_period: 0.02 #if 0 never publishes odometry
+ map_update_interval: 5.0
+ resolution: 0.05
+ min_laser_range: 0.0 #for rastering images
+ max_laser_range: 20.0 #for rastering images
+ minimum_time_interval: 0.5
+ transform_timeout: 0.2
+ tf_buffer_duration: 30.
+ stack_size_to_use: 40000000 #// program needs a larger stack size to serialize large maps
+ enable_interactive_mode: true
+
+ # General Parameters
+ use_scan_matching: true
+ use_scan_barycenter: true
+ minimum_travel_distance: 0.5
+ minimum_travel_heading: 0.5
+ scan_buffer_size: 10
+ scan_buffer_maximum_scan_distance: 10.0
+ link_match_minimum_response_fine: 0.1
+ link_scan_maximum_distance: 1.5
+ loop_search_maximum_distance: 3.0
+ do_loop_closing: true
+ loop_match_minimum_chain_size: 10
+ loop_match_maximum_variance_coarse: 3.0
+ loop_match_minimum_response_coarse: 0.35
+ loop_match_minimum_response_fine: 0.45
+
+ # Correlation Parameters - Correlation Parameters
+ correlation_search_space_dimension: 0.5
+ correlation_search_space_resolution: 0.01
+ correlation_search_space_smear_deviation: 0.1
+
+ # Correlation Parameters - Loop Closure Parameters
+ loop_search_space_dimension: 8.0
+ loop_search_space_resolution: 0.05
+ loop_search_space_smear_deviation: 0.03
+
+ # Scan Matcher Parameters
+ distance_variance_penalty: 0.5
+ angle_variance_penalty: 1.0
+
+ fine_search_angle_offset: 0.00349
+ coarse_search_angle_offset: 0.349
+ coarse_angle_resolution: 0.0349
+ minimum_angle_penalty: 0.9
+ minimum_distance_penalty: 0.5
+ use_response_expansion: true
diff --git a/navigation/packages/nav_main/config/nav2_params.yaml b/navigation/packages/nav_main/config/nav2_params.yaml
new file mode 100644
index 00000000..81dfc981
--- /dev/null
+++ b/navigation/packages/nav_main/config/nav2_params.yaml
@@ -0,0 +1,350 @@
+amcl:
+ ros__parameters:
+ use_sim_time: True
+ alpha1: 0.2
+ alpha2: 0.2
+ alpha3: 0.2
+ alpha4: 0.2
+ alpha5: 0.2
+ base_frame_id: "base_footprint"
+ beam_skip_distance: 0.5
+ beam_skip_error_threshold: 0.9
+ beam_skip_threshold: 0.3
+ do_beamskip: false
+ global_frame_id: "map"
+ lambda_short: 0.1
+ laser_likelihood_max_dist: 2.0
+ laser_max_range: 100.0
+ laser_min_range: -1.0
+ laser_model_type: "likelihood_field"
+ max_beams: 60
+ max_particles: 2000
+ min_particles: 500
+ odom_frame_id: "odom"
+ pf_err: 0.05
+ pf_z: 0.99
+ recovery_alpha_fast: 0.0
+ recovery_alpha_slow: 0.0
+ resample_interval: 1
+ robot_model_type: "nav2_amcl::DifferentialMotionModel"
+ save_pose_rate: 0.5
+ sigma_hit: 0.2
+ tf_broadcast: true
+ transform_tolerance: 1.0
+ update_min_a: 0.2
+ update_min_d: 0.25
+ z_hit: 0.5
+ z_max: 0.05
+ z_rand: 0.5
+ z_short: 0.05
+ scan_topic: scan
+
+bt_navigator:
+ ros__parameters:
+ use_sim_time: True
+ global_frame: map
+ robot_base_frame: base_link
+ odom_topic: /odom
+ bt_loop_duration: 10
+ default_server_timeout: 20
+ wait_for_service_timeout: 1000
+ # 'default_nav_through_poses_bt_xml' and 'default_nav_to_pose_bt_xml' are use defaults:
+ # nav2_bt_navigator/navigate_to_pose_w_replanning_and_recovery.xml
+ # nav2_bt_navigator/navigate_through_poses_w_replanning_and_recovery.xml
+ # They can be set here or via a RewrittenYaml remap from a parent launch file to Nav2.
+ plugin_lib_names:
+ - nav2_compute_path_to_pose_action_bt_node
+ - nav2_compute_path_through_poses_action_bt_node
+ - nav2_smooth_path_action_bt_node
+ - nav2_follow_path_action_bt_node
+ - nav2_spin_action_bt_node
+ - nav2_wait_action_bt_node
+ - nav2_assisted_teleop_action_bt_node
+ - nav2_back_up_action_bt_node
+ - nav2_drive_on_heading_bt_node
+ - nav2_clear_costmap_service_bt_node
+ - nav2_is_stuck_condition_bt_node
+ - nav2_goal_reached_condition_bt_node
+ - nav2_goal_updated_condition_bt_node
+ - nav2_globally_updated_goal_condition_bt_node
+ - nav2_is_path_valid_condition_bt_node
+ - nav2_initial_pose_received_condition_bt_node
+ - nav2_reinitialize_global_localization_service_bt_node
+ - nav2_rate_controller_bt_node
+ - nav2_distance_controller_bt_node
+ - nav2_speed_controller_bt_node
+ - nav2_truncate_path_action_bt_node
+ - nav2_truncate_path_local_action_bt_node
+ - nav2_goal_updater_node_bt_node
+ - nav2_recovery_node_bt_node
+ - nav2_pipeline_sequence_bt_node
+ - nav2_round_robin_node_bt_node
+ - nav2_transform_available_condition_bt_node
+ - nav2_time_expired_condition_bt_node
+ - nav2_path_expiring_timer_condition
+ - nav2_distance_traveled_condition_bt_node
+ - nav2_single_trigger_bt_node
+ - nav2_goal_updated_controller_bt_node
+ - nav2_is_battery_low_condition_bt_node
+ - nav2_navigate_through_poses_action_bt_node
+ - nav2_navigate_to_pose_action_bt_node
+ - nav2_remove_passed_goals_action_bt_node
+ - nav2_planner_selector_bt_node
+ - nav2_controller_selector_bt_node
+ - nav2_goal_checker_selector_bt_node
+ - nav2_controller_cancel_bt_node
+ - nav2_path_longer_on_approach_bt_node
+ - nav2_wait_cancel_bt_node
+ - nav2_spin_cancel_bt_node
+ - nav2_back_up_cancel_bt_node
+ - nav2_assisted_teleop_cancel_bt_node
+ - nav2_drive_on_heading_cancel_bt_node
+ - nav2_is_battery_charging_condition_bt_node
+
+bt_navigator_navigate_through_poses_rclcpp_node:
+ ros__parameters:
+ use_sim_time: True
+
+bt_navigator_navigate_to_pose_rclcpp_node:
+ ros__parameters:
+ use_sim_time: True
+
+controller_server:
+ ros__parameters:
+ use_sim_time: True
+ controller_frequency: 20.0
+ min_x_velocity_threshold: 0.001
+ min_y_velocity_threshold: 0.5
+ min_theta_velocity_threshold: 0.001
+ failure_tolerance: 0.3
+ progress_checker_plugin: "progress_checker"
+ goal_checker_plugins: ["general_goal_checker"] # "precise_goal_checker"
+ controller_plugins: ["FollowPath"]
+
+ # Progress checker parameters
+ progress_checker:
+ plugin: "nav2_controller::SimpleProgressChecker"
+ required_movement_radius: 0.5
+ movement_time_allowance: 10.0
+ # Goal checker parameters
+ #precise_goal_checker:
+ # plugin: "nav2_controller::SimpleGoalChecker"
+ # xy_goal_tolerance: 0.25
+ # yaw_goal_tolerance: 0.25
+ # stateful: True
+ general_goal_checker:
+ stateful: True
+ plugin: "nav2_controller::SimpleGoalChecker"
+ xy_goal_tolerance: 0.25
+ yaw_goal_tolerance: 0.25
+ # DWB parameters
+ FollowPath:
+ plugin: "dwb_core::DWBLocalPlanner"
+ debug_trajectory_details: True
+ min_vel_x: 0.0
+ min_vel_y: 0.0
+ max_vel_x: 0.26
+ max_vel_y: 0.0
+ max_vel_theta: 1.0
+ min_speed_xy: 0.0
+ max_speed_xy: 0.26
+ min_speed_theta: 0.0
+ # Add high threshold velocity for turtlebot 3 issue.
+ # https://github.com/ROBOTIS-GIT/turtlebot3_simulations/issues/75
+ acc_lim_x: 2.5
+ acc_lim_y: 0.0
+ acc_lim_theta: 3.2
+ decel_lim_x: -2.5
+ decel_lim_y: 0.0
+ decel_lim_theta: -3.2
+ vx_samples: 20
+ vy_samples: 5
+ vtheta_samples: 20
+ sim_time: 1.7
+ linear_granularity: 0.05
+ angular_granularity: 0.025
+ transform_tolerance: 0.2
+ xy_goal_tolerance: 0.25
+ trans_stopped_velocity: 0.25
+ short_circuit_trajectory_evaluation: True
+ stateful: True
+ critics: ["RotateToGoal", "Oscillation", "BaseObstacle", "GoalAlign", "PathAlign", "PathDist", "GoalDist"]
+ BaseObstacle.scale: 0.02
+ PathAlign.scale: 32.0
+ PathAlign.forward_point_distance: 0.1
+ GoalAlign.scale: 24.0
+ GoalAlign.forward_point_distance: 0.1
+ PathDist.scale: 32.0
+ GoalDist.scale: 24.0
+ RotateToGoal.scale: 32.0
+ RotateToGoal.slowing_factor: 5.0
+ RotateToGoal.lookahead_time: -1.0
+
+local_costmap:
+ local_costmap:
+ ros__parameters:
+ update_frequency: 5.0
+ publish_frequency: 2.0
+ global_frame: odom
+ robot_base_frame: base_link
+ use_sim_time: True
+ rolling_window: true
+ width: 3
+ height: 3
+ resolution: 0.05
+ robot_radius: 0.22
+ plugins: ["voxel_layer", "inflation_layer"]
+ inflation_layer:
+ plugin: "nav2_costmap_2d::InflationLayer"
+ cost_scaling_factor: 3.0
+ inflation_radius: 0.55
+ voxel_layer:
+ plugin: "nav2_costmap_2d::VoxelLayer"
+ enabled: True
+ publish_voxel_map: True
+ origin_z: 0.0
+ z_resolution: 0.05
+ z_voxels: 16
+ max_obstacle_height: 2.0
+ mark_threshold: 0
+ observation_sources: scan
+ scan:
+ topic: /scan
+ max_obstacle_height: 2.0
+ clearing: True
+ marking: True
+ data_type: "LaserScan"
+ raytrace_max_range: 3.0
+ raytrace_min_range: 0.0
+ obstacle_max_range: 2.5
+ obstacle_min_range: 0.0
+ static_layer:
+ plugin: "nav2_costmap_2d::StaticLayer"
+ map_subscribe_transient_local: True
+ always_send_full_costmap: True
+
+global_costmap:
+ global_costmap:
+ ros__parameters:
+ update_frequency: 1.0
+ publish_frequency: 1.0
+ global_frame: map
+ robot_base_frame: base_link
+ use_sim_time: True
+ robot_radius: 0.22
+ resolution: 0.05
+ track_unknown_space: true
+ plugins: ["static_layer", "obstacle_layer", "inflation_layer"]
+ obstacle_layer:
+ plugin: "nav2_costmap_2d::ObstacleLayer"
+ enabled: True
+ observation_sources: scan
+ scan:
+ topic: /scan
+ max_obstacle_height: 2.0
+ clearing: True
+ marking: True
+ data_type: "LaserScan"
+ raytrace_max_range: 3.0
+ raytrace_min_range: 0.0
+ obstacle_max_range: 2.5
+ obstacle_min_range: 0.0
+ static_layer:
+ plugin: "nav2_costmap_2d::StaticLayer"
+ map_subscribe_transient_local: True
+ inflation_layer:
+ plugin: "nav2_costmap_2d::InflationLayer"
+ cost_scaling_factor: 3.0
+ inflation_radius: 0.55
+ always_send_full_costmap: True
+
+map_server:
+ ros__parameters:
+ use_sim_time: True
+ # Overridden in launch by the "map" launch configuration or provided default value.
+ # To use in yaml, remove the default "map" value in the tb3_simulation_launch.py file & provide full path to map below.
+ yaml_filename: ""
+
+map_saver:
+ ros__parameters:
+ use_sim_time: True
+ save_map_timeout: 5.0
+ free_thresh_default: 0.25
+ occupied_thresh_default: 0.65
+ map_subscribe_transient_local: True
+
+planner_server:
+ ros__parameters:
+ expected_planner_frequency: 20.0
+ use_sim_time: True
+ planner_plugins: ["GridBased"]
+ GridBased:
+ plugin: "nav2_navfn_planner/NavfnPlanner"
+ tolerance: 0.5
+ use_astar: false
+ allow_unknown: true
+
+smoother_server:
+ ros__parameters:
+ use_sim_time: True
+ smoother_plugins: ["simple_smoother"]
+ simple_smoother:
+ plugin: "nav2_smoother::SimpleSmoother"
+ tolerance: 1.0e-10
+ max_its: 1000
+ do_refinement: True
+
+behavior_server:
+ ros__parameters:
+ costmap_topic: local_costmap/costmap_raw
+ footprint_topic: local_costmap/published_footprint
+ cycle_frequency: 10.0
+ behavior_plugins: ["spin", "backup", "drive_on_heading", "assisted_teleop", "wait"]
+ spin:
+ plugin: "nav2_behaviors/Spin"
+ backup:
+ plugin: "nav2_behaviors/BackUp"
+ drive_on_heading:
+ plugin: "nav2_behaviors/DriveOnHeading"
+ wait:
+ plugin: "nav2_behaviors/Wait"
+ assisted_teleop:
+ plugin: "nav2_behaviors/AssistedTeleop"
+ global_frame: odom
+ robot_base_frame: base_link
+ transform_tolerance: 0.1
+ use_sim_time: true
+ simulate_ahead_time: 2.0
+ max_rotational_vel: 1.0
+ min_rotational_vel: 0.4
+ rotational_acc_lim: 3.2
+
+robot_state_publisher:
+ ros__parameters:
+ use_sim_time: True
+
+waypoint_follower:
+ ros__parameters:
+ use_sim_time: True
+ loop_rate: 20
+ stop_on_failure: false
+ waypoint_task_executor_plugin: "wait_at_waypoint"
+ wait_at_waypoint:
+ plugin: "nav2_waypoint_follower::WaitAtWaypoint"
+ enabled: True
+ waypoint_pause_duration: 200
+
+velocity_smoother:
+ ros__parameters:
+ use_sim_time: True
+ smoothing_frequency: 20.0
+ scale_velocities: False
+ feedback: "OPEN_LOOP"
+ max_velocity: [0.26, 0.0, 1.0]
+ min_velocity: [-0.26, 0.0, -1.0]
+ max_accel: [2.5, 0.0, 3.2]
+ max_decel: [-2.5, 0.0, -3.2]
+ odom_topic: "odom"
+ odom_duration: 0.1
+ deadband_velocity: [0.0, 0.0, 0.0]
+ velocity_timeout: 1.0
diff --git a/navigation/packages/nav_main/launch/dualshock_cmd_vel.launch.py b/navigation/packages/nav_main/launch/dualshock_cmd_vel.launch.py
new file mode 100644
index 00000000..461829cf
--- /dev/null
+++ b/navigation/packages/nav_main/launch/dualshock_cmd_vel.launch.py
@@ -0,0 +1,51 @@
+
+from launch import LaunchDescription
+from launch.actions import DeclareLaunchArgument
+from launch.substitutions import LaunchConfiguration
+from launch.substitutions import TextSubstitution
+from launch_ros.actions import ComposableNodeContainer, Node
+from launch_ros.descriptions import ComposableNode
+
+
+def generate_launch_description():
+ """Generate launch description."""
+ hw_type_arg = DeclareLaunchArgument(
+ 'hw_type', default_value=TextSubstitution(text='DualShock4'))
+ topic_name_arg = DeclareLaunchArgument(
+ 'topic_name', default_value=TextSubstitution(text='/cmd_vel'))
+
+ joy_container = ComposableNodeContainer(
+ name='joy_container',
+ package='rclcpp_components',
+ executable='component_container',
+ namespace='',
+ composable_node_descriptions=[
+ ComposableNode(
+ package='joy',
+ plugin='joy::Joy',
+ name='joy',
+ namespace='',
+ ),
+ ComposableNode(
+ package='p9n_node',
+ plugin='p9n_node::TeleopTwistJoyNode',
+ name='teleop_twist_joy_node',
+ namespace='',
+ parameters=[{
+ 'hw_type': LaunchConfiguration('hw_type')
+ }],
+ remappings=[
+ ('cmd_vel', LaunchConfiguration('topic_name'))
+ ],
+ )
+ ],
+ )
+
+
+ ld = LaunchDescription()
+
+ ld.add_action(hw_type_arg)
+ ld.add_action(topic_name_arg)
+ ld.add_action(joy_container)
+
+ return ld
diff --git a/navigation/packages/nav_main/launch/localization_launch.py b/navigation/packages/nav_main/launch/localization_launch.py
new file mode 100644
index 00000000..4d36bbb0
--- /dev/null
+++ b/navigation/packages/nav_main/launch/localization_launch.py
@@ -0,0 +1,192 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+from ament_index_python.packages import get_package_share_directory
+
+from launch import LaunchDescription
+from launch.actions import DeclareLaunchArgument, GroupAction, SetEnvironmentVariable
+from launch.conditions import IfCondition
+from launch.substitutions import LaunchConfiguration, PythonExpression
+from launch_ros.actions import LoadComposableNodes
+from launch_ros.actions import Node
+from launch_ros.descriptions import ComposableNode, ParameterFile
+from nav2_common.launch import RewrittenYaml
+
+
+def generate_launch_description():
+ # Get the launch directory
+ bringup_dir = get_package_share_directory('nav_main')
+
+ namespace = LaunchConfiguration('namespace')
+ map_yaml_file = LaunchConfiguration('map')
+ use_sim_time = LaunchConfiguration('use_sim_time')
+ autostart = LaunchConfiguration('autostart')
+ params_file = LaunchConfiguration('params_file')
+ use_composition = LaunchConfiguration('use_composition')
+ container_name = LaunchConfiguration('container_name')
+ container_name_full = (namespace, '/', container_name)
+ use_respawn = LaunchConfiguration('use_respawn')
+ log_level = LaunchConfiguration('log_level')
+
+ lifecycle_nodes = ['map_server', 'amcl']
+
+ # Map fully qualified names to relative ones so the node's namespace can be prepended.
+ # In case of the transforms (tf), currently, there doesn't seem to be a better alternative
+ # https://github.com/ros/geometry2/issues/32
+ # https://github.com/ros/robot_state_publisher/pull/30
+ # TODO(orduno) Substitute with `PushNodeRemapping`
+ # https://github.com/ros2/launch_ros/issues/56
+ remappings = [('/tf', 'tf'),
+ ('/tf_static', 'tf_static')]
+
+ # Create our own temporary YAML files that include substitutions
+ param_substitutions = {
+ 'use_sim_time': use_sim_time,
+ 'yaml_filename': map_yaml_file}
+
+ configured_params = ParameterFile(
+ RewrittenYaml(
+ source_file=params_file,
+ root_key=namespace,
+ param_rewrites=param_substitutions,
+ convert_types=True),
+ allow_substs=True)
+
+ stdout_linebuf_envvar = SetEnvironmentVariable(
+ 'RCUTILS_LOGGING_BUFFERED_STREAM', '1')
+
+ declare_namespace_cmd = DeclareLaunchArgument(
+ 'namespace',
+ default_value='',
+ description='Top-level namespace')
+
+ declare_map_yaml_cmd = DeclareLaunchArgument(
+ 'map',
+ description='Full path to map yaml file to load')
+
+ declare_use_sim_time_cmd = DeclareLaunchArgument(
+ 'use_sim_time',
+ default_value='false',
+ description='Use simulation (Gazebo) clock if true')
+
+ declare_params_file_cmd = DeclareLaunchArgument(
+ 'params_file',
+ default_value=os.path.join(bringup_dir, 'config', 'nav2_params.yaml'),
+ description='Full path to the ROS2 parameters file to use for all launched nodes')
+
+ declare_autostart_cmd = DeclareLaunchArgument(
+ 'autostart', default_value='true',
+ description='Automatically startup the nav2 stack')
+
+ declare_use_composition_cmd = DeclareLaunchArgument(
+ 'use_composition', default_value='False',
+ description='Use composed bringup if True')
+
+ declare_container_name_cmd = DeclareLaunchArgument(
+ 'container_name', default_value='nav2_container',
+ description='the name of conatiner that nodes will load in if use composition')
+
+ declare_use_respawn_cmd = DeclareLaunchArgument(
+ 'use_respawn', default_value='False',
+ description='Whether to respawn if a node crashes. Applied when composition is disabled.')
+
+ declare_log_level_cmd = DeclareLaunchArgument(
+ 'log_level', default_value='info',
+ description='log level')
+
+ load_nodes = GroupAction(
+ condition=IfCondition(PythonExpression(['not ', use_composition])),
+ actions=[
+ Node(
+ package='nav2_map_server',
+ executable='map_server',
+ name='map_server',
+ output='screen',
+ respawn=use_respawn,
+ respawn_delay=2.0,
+ parameters=[configured_params],
+ arguments=['--ros-args', '--log-level', log_level],
+ remappings=remappings),
+ Node(
+ package='nav2_amcl',
+ executable='amcl',
+ name='amcl',
+ output='screen',
+ respawn=use_respawn,
+ respawn_delay=2.0,
+ parameters=[configured_params],
+ arguments=['--ros-args', '--log-level', log_level],
+ remappings=remappings),
+ Node(
+ package='nav2_lifecycle_manager',
+ executable='lifecycle_manager',
+ name='lifecycle_manager_localization',
+ output='screen',
+ arguments=['--ros-args', '--log-level', log_level],
+ parameters=[{'use_sim_time': use_sim_time},
+ {'autostart': autostart},
+ {'node_names': lifecycle_nodes}])
+ ]
+ )
+
+ load_composable_nodes = LoadComposableNodes(
+ condition=IfCondition(use_composition),
+ target_container=container_name_full,
+ composable_node_descriptions=[
+ ComposableNode(
+ package='nav2_map_server',
+ plugin='nav2_map_server::MapServer',
+ name='map_server',
+ parameters=[configured_params],
+ remappings=remappings),
+ ComposableNode(
+ package='nav2_amcl',
+ plugin='nav2_amcl::AmclNode',
+ name='amcl',
+ parameters=[configured_params],
+ remappings=remappings),
+ ComposableNode(
+ package='nav2_lifecycle_manager',
+ plugin='nav2_lifecycle_manager::LifecycleManager',
+ name='lifecycle_manager_localization',
+ parameters=[{'use_sim_time': use_sim_time,
+ 'autostart': autostart,
+ 'node_names': lifecycle_nodes}]),
+ ],
+ )
+
+ # Create the launch description and populate
+ ld = LaunchDescription()
+
+ # Set environment variables
+ ld.add_action(stdout_linebuf_envvar)
+
+ # Declare the launch options
+ ld.add_action(declare_namespace_cmd)
+ ld.add_action(declare_map_yaml_cmd)
+ ld.add_action(declare_use_sim_time_cmd)
+ ld.add_action(declare_params_file_cmd)
+ ld.add_action(declare_autostart_cmd)
+ ld.add_action(declare_use_composition_cmd)
+ ld.add_action(declare_container_name_cmd)
+ ld.add_action(declare_use_respawn_cmd)
+ ld.add_action(declare_log_level_cmd)
+
+ # Add the actions to launch all of the localiztion nodes
+ ld.add_action(load_nodes)
+ ld.add_action(load_composable_nodes)
+
+ return ld
diff --git a/navigation/packages/nav_main/launch/nav_amcl.launch.py b/navigation/packages/nav_main/launch/nav_amcl.launch.py
new file mode 100644
index 00000000..f141d864
--- /dev/null
+++ b/navigation/packages/nav_main/launch/nav_amcl.launch.py
@@ -0,0 +1,73 @@
+import os
+from ament_index_python.packages import get_package_share_directory
+from launch import LaunchDescription
+from launch_ros.actions import Node
+from launch.actions import IncludeLaunchDescription, DeclareLaunchArgument
+from launch.launch_description_sources import PythonLaunchDescriptionSource
+from launch.substitutions import LaunchConfiguration, PathJoinSubstitution
+from launch_ros.substitutions import FindPackageShare
+
+def generate_launch_description():
+ lifecycle_nodes = ['map_server', 'amcl']
+
+ publish_urdf = LaunchConfiguration('publish_tf')
+ map_route = LaunchConfiguration('map')
+
+ declare_publish_tf = DeclareLaunchArgument(
+ 'publish_tf',
+ default_value='true',
+ description='Whether to publish URDF'
+ )
+ declare_map_route = DeclareLaunchArgument(
+ 'map',
+ default_value=os.path.join(get_package_share_directory('nav_main'), 'maps', 'Lab01.yaml'),
+ description='Path to the map file'
+ )
+ nav_main_package = get_package_share_directory('nav_main')
+
+ nav_basics = IncludeLaunchDescription(
+ PythonLaunchDescriptionSource(
+ PathJoinSubstitution(
+ [
+ FindPackageShare("nav_main"),
+ "launch",
+ "nav_basics.launch.py",
+ ]
+ )),
+ launch_arguments={'publish_tf': publish_urdf }.items()
+ )
+
+ map_server = Node(
+ package='nav2_map_server',
+ executable='map_server',
+ name='map_server',
+ output='screen',
+ parameters=[{'yaml_filename': LaunchConfiguration('map'),
+ 'use_sim_time' : False}],
+ )
+ amcl_server = Node(
+ package='nav2_amcl',
+ executable='amcl',
+ name='amcl',
+ output='screen',
+ parameters=[{'use_sim_time' : False}],
+ )
+
+ lifecycle_node = Node(
+ package='nav2_lifecycle_manager',
+ executable='lifecycle_manager',
+ name='lifecycle_manager_localization',
+ output='screen',
+ parameters=[{'use_sim_time': False},
+ {'autostart': True},
+ {'node_names': lifecycle_nodes}])
+
+ return LaunchDescription([
+ declare_publish_tf,
+ nav_basics,
+ declare_map_route,
+ map_server,
+ amcl_server,
+ lifecycle_node
+
+ ])
diff --git a/navigation/packages/nav_main/launch/nav_basics.launch.py b/navigation/packages/nav_main/launch/nav_basics.launch.py
new file mode 100644
index 00000000..c10efcbc
--- /dev/null
+++ b/navigation/packages/nav_main/launch/nav_basics.launch.py
@@ -0,0 +1,83 @@
+import os
+from ament_index_python.packages import get_package_share_directory
+from launch import LaunchDescription
+from launch_ros.actions import Node
+from launch.actions import IncludeLaunchDescription, DeclareLaunchArgument, GroupAction
+from launch.launch_description_sources import PythonLaunchDescriptionSource
+from launch.substitutions import LaunchConfiguration, PathJoinSubstitution
+from launch_ros.substitutions import FindPackageShare
+from launch.conditions import IfCondition
+
+def generate_launch_description():
+ publish_urdf = LaunchConfiguration('publish_tf')
+
+ # Declare the launch argument
+ declare_publish_tf = DeclareLaunchArgument(
+ 'publish_tf',
+ default_value='true', # LaunchConfiguration values are strings, so use 'true'/'false'
+ description='Whether to publish URDF'
+ )
+
+ dashgo_driver = IncludeLaunchDescription(
+ PythonLaunchDescriptionSource(
+ PathJoinSubstitution(
+ [
+ FindPackageShare("dashgo_driver"),
+ "launch",
+ "dashgo_driver.launch.py",
+ ]
+ )
+ ))
+ ekf_launch = IncludeLaunchDescription(
+ PythonLaunchDescriptionSource(
+ PathJoinSubstitution(
+ [
+ FindPackageShare("dashgo_driver"),
+ "launch",
+ "ekf.launch.py",
+ ]
+ )
+ ))
+
+ robot_description_launch = IncludeLaunchDescription(
+ PythonLaunchDescriptionSource(
+ PathJoinSubstitution(
+ [
+ FindPackageShare("urdf_launch"),
+ "launch",
+ "description.launch.py",
+ ]
+ )
+ ),
+ launch_arguments={
+ 'urdf_package': 'frida_description',
+ 'urdf_package_path': PathJoinSubstitution(['urdf','FRIDA_Real.urdf.xacro'])
+ }.items(),
+ condition=IfCondition(publish_urdf), # Condition to include this launch file only if publish_tf is true
+ )
+
+ laser_launch = IncludeLaunchDescription(
+ PythonLaunchDescriptionSource(
+ PathJoinSubstitution(
+ [
+ FindPackageShare("nav_main"),
+ "launch",
+ "rplidar_fixed.launch.py",
+ ]
+ )
+ ))
+
+ joint_state = Node(
+ package='joint_state_publisher',
+ executable='joint_state_publisher',
+ condition=IfCondition(publish_urdf), # Only launch joint_state if publish_tf is true
+ )
+
+ return LaunchDescription([
+ declare_publish_tf, # Declare launch argument
+ dashgo_driver,
+ ekf_launch,
+ robot_description_launch,
+ joint_state,
+ laser_launch
+ ])
diff --git a/navigation/packages/nav_main/launch/nav_mapping.launch.py b/navigation/packages/nav_main/launch/nav_mapping.launch.py
new file mode 100644
index 00000000..e3583670
--- /dev/null
+++ b/navigation/packages/nav_main/launch/nav_mapping.launch.py
@@ -0,0 +1,53 @@
+import os
+from ament_index_python.packages import get_package_share_directory
+from launch import LaunchDescription
+from launch_ros.actions import Node
+from launch.actions import IncludeLaunchDescription, DeclareLaunchArgument
+from launch.launch_description_sources import PythonLaunchDescriptionSource
+from launch.substitutions import LaunchConfiguration, PathJoinSubstitution
+from launch_ros.substitutions import FindPackageShare
+
+def generate_launch_description():
+ publish_urdf = LaunchConfiguration('publish_tf')
+
+ # Declare the launch argument
+ declare_publish_tf = DeclareLaunchArgument(
+ 'publish_tf',
+ default_value='true', # LaunchConfiguration values are strings, so use 'true'/'false'
+ description='Whether to publish URDF'
+ )
+
+ nav_main_package = get_package_share_directory('nav_main')
+ params_file = os.path.join(nav_main_package, 'config', 'mapper_params_online_async.yaml')
+
+ nav_basics = IncludeLaunchDescription(
+ PythonLaunchDescriptionSource(
+ PathJoinSubstitution(
+ [
+ FindPackageShare("nav_main"),
+ "launch",
+ "nav_basics.launch.py",
+ ]
+ )),
+ launch_arguments={'publish_tf': publish_urdf }.items()
+ )
+
+ slam_toolbox = IncludeLaunchDescription(
+ PythonLaunchDescriptionSource(
+ PathJoinSubstitution(
+ [
+ FindPackageShare("slam_toolbox"),
+ "launch",
+ "online_async_launch.py",
+ ]
+ )),
+ launch_arguments={'params_file': params_file,
+ 'use_sim_time': 'false'}.items()
+ )
+
+ return LaunchDescription([
+ declare_publish_tf,
+ nav_basics,
+ slam_toolbox
+
+ ])
diff --git a/navigation/packages/nav_main/launch/navigation_launch.py b/navigation/packages/nav_main/launch/navigation_launch.py
new file mode 100644
index 00000000..549eba64
--- /dev/null
+++ b/navigation/packages/nav_main/launch/navigation_launch.py
@@ -0,0 +1,272 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+from ament_index_python.packages import get_package_share_directory
+
+from launch import LaunchDescription
+from launch.actions import DeclareLaunchArgument, GroupAction, SetEnvironmentVariable
+from launch.conditions import IfCondition
+from launch.substitutions import LaunchConfiguration, PythonExpression
+from launch_ros.actions import LoadComposableNodes
+from launch_ros.actions import Node
+from launch_ros.descriptions import ComposableNode, ParameterFile
+from nav2_common.launch import RewrittenYaml
+
+
+def generate_launch_description():
+ # Get the launch directory
+ bringup_dir = get_package_share_directory('nav_main')
+
+ namespace = LaunchConfiguration('namespace')
+ use_sim_time = LaunchConfiguration('use_sim_time')
+ autostart = LaunchConfiguration('autostart')
+ params_file = LaunchConfiguration('params_file')
+ use_composition = LaunchConfiguration('use_composition')
+ container_name = LaunchConfiguration('container_name')
+ container_name_full = (namespace, '/', container_name)
+ use_respawn = LaunchConfiguration('use_respawn')
+ log_level = LaunchConfiguration('log_level')
+
+ lifecycle_nodes = ['controller_server',
+ 'smoother_server',
+ 'planner_server',
+ 'behavior_server',
+ 'bt_navigator',
+ 'waypoint_follower',
+ 'velocity_smoother']
+
+ # Map fully qualified names to relative ones so the node's namespace can be prepended.
+ # In case of the transforms (tf), currently, there doesn't seem to be a better alternative
+ # https://github.com/ros/geometry2/issues/32
+ # https://github.com/ros/robot_state_publisher/pull/30
+ # TODO(orduno) Substitute with `PushNodeRemapping`
+ # https://github.com/ros2/launch_ros/issues/56
+ remappings = [('/tf', 'tf'),
+ ('/tf_static', 'tf_static')]
+
+ # Create our own temporary YAML files that include substitutions
+ param_substitutions = {
+ 'use_sim_time': use_sim_time,
+ 'autostart': autostart}
+
+ configured_params = ParameterFile(
+ RewrittenYaml(
+ source_file=params_file,
+ root_key=namespace,
+ param_rewrites=param_substitutions,
+ convert_types=True),
+ allow_substs=True)
+
+ stdout_linebuf_envvar = SetEnvironmentVariable(
+ 'RCUTILS_LOGGING_BUFFERED_STREAM', '1')
+
+ declare_namespace_cmd = DeclareLaunchArgument(
+ 'namespace',
+ default_value='',
+ description='Top-level namespace')
+
+ declare_use_sim_time_cmd = DeclareLaunchArgument(
+ 'use_sim_time',
+ default_value='false',
+ description='Use simulation (Gazebo) clock if true')
+
+ declare_params_file_cmd = DeclareLaunchArgument(
+ 'params_file',
+ default_value=os.path.join(bringup_dir, 'config', 'nav2_params.yaml'),
+ description='Full path to the ROS2 parameters file to use for all launched nodes')
+
+ declare_autostart_cmd = DeclareLaunchArgument(
+ 'autostart', default_value='true',
+ description='Automatically startup the nav2 stack')
+
+ declare_use_composition_cmd = DeclareLaunchArgument(
+ 'use_composition', default_value='False',
+ description='Use composed bringup if True')
+
+ declare_container_name_cmd = DeclareLaunchArgument(
+ 'container_name', default_value='nav2_container',
+ description='the name of conatiner that nodes will load in if use composition')
+
+ declare_use_respawn_cmd = DeclareLaunchArgument(
+ 'use_respawn', default_value='False',
+ description='Whether to respawn if a node crashes. Applied when composition is disabled.')
+
+ declare_log_level_cmd = DeclareLaunchArgument(
+ 'log_level', default_value='info',
+ description='log level')
+
+ load_nodes = GroupAction(
+ condition=IfCondition(PythonExpression(['not ', use_composition])),
+ actions=[
+ Node(
+ package='nav2_controller',
+ executable='controller_server',
+ output='screen',
+ respawn=use_respawn,
+ respawn_delay=2.0,
+ parameters=[configured_params],
+ arguments=['--ros-args', '--log-level', log_level],
+ remappings=remappings + [('cmd_vel', 'cmd_vel_nav')]),
+ Node(
+ package='nav2_smoother',
+ executable='smoother_server',
+ name='smoother_server',
+ output='screen',
+ respawn=use_respawn,
+ respawn_delay=2.0,
+ parameters=[configured_params],
+ arguments=['--ros-args', '--log-level', log_level],
+ remappings=remappings),
+ Node(
+ package='nav2_planner',
+ executable='planner_server',
+ name='planner_server',
+ output='screen',
+ respawn=use_respawn,
+ respawn_delay=2.0,
+ parameters=[configured_params],
+ arguments=['--ros-args', '--log-level', log_level],
+ remappings=remappings),
+ Node(
+ package='nav2_behaviors',
+ executable='behavior_server',
+ name='behavior_server',
+ output='screen',
+ respawn=use_respawn,
+ respawn_delay=2.0,
+ parameters=[configured_params],
+ arguments=['--ros-args', '--log-level', log_level],
+ remappings=remappings),
+ Node(
+ package='nav2_bt_navigator',
+ executable='bt_navigator',
+ name='bt_navigator',
+ output='screen',
+ respawn=use_respawn,
+ respawn_delay=2.0,
+ parameters=[configured_params],
+ arguments=['--ros-args', '--log-level', log_level],
+ remappings=remappings),
+ Node(
+ package='nav2_waypoint_follower',
+ executable='waypoint_follower',
+ name='waypoint_follower',
+ output='screen',
+ respawn=use_respawn,
+ respawn_delay=2.0,
+ parameters=[configured_params],
+ arguments=['--ros-args', '--log-level', log_level],
+ remappings=remappings),
+ Node(
+ package='nav2_velocity_smoother',
+ executable='velocity_smoother',
+ name='velocity_smoother',
+ output='screen',
+ respawn=use_respawn,
+ respawn_delay=2.0,
+ parameters=[configured_params],
+ arguments=['--ros-args', '--log-level', log_level],
+ remappings=remappings +
+ [('cmd_vel', 'cmd_vel_nav'), ('cmd_vel_smoothed', 'cmd_vel')]),
+ Node(
+ package='nav2_lifecycle_manager',
+ executable='lifecycle_manager',
+ name='lifecycle_manager_navigation',
+ output='screen',
+ arguments=['--ros-args', '--log-level', log_level],
+ parameters=[{'use_sim_time': use_sim_time},
+ {'autostart': autostart},
+ {'node_names': lifecycle_nodes}]),
+ ]
+ )
+
+ load_composable_nodes = LoadComposableNodes(
+ condition=IfCondition(use_composition),
+ target_container=container_name_full,
+ composable_node_descriptions=[
+ ComposableNode(
+ package='nav2_controller',
+ plugin='nav2_controller::ControllerServer',
+ name='controller_server',
+ parameters=[configured_params],
+ remappings=remappings + [('cmd_vel', 'cmd_vel_nav')]),
+ ComposableNode(
+ package='nav2_smoother',
+ plugin='nav2_smoother::SmootherServer',
+ name='smoother_server',
+ parameters=[configured_params],
+ remappings=remappings),
+ ComposableNode(
+ package='nav2_planner',
+ plugin='nav2_planner::PlannerServer',
+ name='planner_server',
+ parameters=[configured_params],
+ remappings=remappings),
+ ComposableNode(
+ package='nav2_behaviors',
+ plugin='behavior_server::BehaviorServer',
+ name='behavior_server',
+ parameters=[configured_params],
+ remappings=remappings),
+ ComposableNode(
+ package='nav2_bt_navigator',
+ plugin='nav2_bt_navigator::BtNavigator',
+ name='bt_navigator',
+ parameters=[configured_params],
+ remappings=remappings),
+ ComposableNode(
+ package='nav2_waypoint_follower',
+ plugin='nav2_waypoint_follower::WaypointFollower',
+ name='waypoint_follower',
+ parameters=[configured_params],
+ remappings=remappings),
+ ComposableNode(
+ package='nav2_velocity_smoother',
+ plugin='nav2_velocity_smoother::VelocitySmoother',
+ name='velocity_smoother',
+ parameters=[configured_params],
+ remappings=remappings +
+ [('cmd_vel', 'cmd_vel_nav'), ('cmd_vel_smoothed', 'cmd_vel')]),
+ ComposableNode(
+ package='nav2_lifecycle_manager',
+ plugin='nav2_lifecycle_manager::LifecycleManager',
+ name='lifecycle_manager_navigation',
+ parameters=[{'use_sim_time': use_sim_time,
+ 'autostart': autostart,
+ 'node_names': lifecycle_nodes}]),
+ ],
+ )
+
+ # Create the launch description and populate
+ ld = LaunchDescription()
+
+ # Set environment variables
+ ld.add_action(stdout_linebuf_envvar)
+
+ # Declare the launch options
+ ld.add_action(declare_namespace_cmd)
+ ld.add_action(declare_use_sim_time_cmd)
+ ld.add_action(declare_params_file_cmd)
+ ld.add_action(declare_autostart_cmd)
+ ld.add_action(declare_use_composition_cmd)
+ ld.add_action(declare_container_name_cmd)
+ ld.add_action(declare_use_respawn_cmd)
+ ld.add_action(declare_log_level_cmd)
+ # Add the actions to launch all of the navigation nodes
+ ld.add_action(load_nodes)
+ ld.add_action(load_composable_nodes)
+
+ return ld
diff --git a/navigation/packages/nav_main/launch/rplidar_fixed.launch.py b/navigation/packages/nav_main/launch/rplidar_fixed.launch.py
index 75d10aea..17607c3e 100644
--- a/navigation/packages/nav_main/launch/rplidar_fixed.launch.py
+++ b/navigation/packages/nav_main/launch/rplidar_fixed.launch.py
@@ -1,43 +1,6 @@
-# from launch_ros.substitutions import FindPackageShare
-# from launch import LaunchDescription
-# from launch.actions import IncludeLaunchDescription, Node
-# from launch.launch_description_sources import PythonLaunchDescriptionSource
-# from launch.substitutions import PathJoinSubstitution, TextSubstitution
-
-# def generate_launch_description():
-
-# return LaunchDescription([
-# # IncludeLaunchDescription(
-# # PythonLaunchDescriptionSource([
-# # PathJoinSubstitution([
-# # FindPackageShare('sllidar_ros2'),
-# # 'launch',
-# # 'sllidar_a1.launch.py'
-# # ])
-# # ]),
-# # launch_arguments={
-# # 'serial_port': '/dev/ttyUSB0',
-# # 'frame_id': 'laser',
-# # 'inverted':'true',
-# # }.items()
-# # ),
-# Node(
-# package='tf2_ros',
-# executable='static_transform_publisher',
-# name='laser_to_base_link',
-# parameters=[
-# "0.1", "0.0", "0.2", "-1.57", "0.0", "0.0", "/base_link", "/laser", "40"
-# ],
-# ),
-# ])
-
from launch import LaunchDescription
from launch_ros.actions import Node
-from launch.actions import IncludeLaunchDescription
-from launch.launch_description_sources import PythonLaunchDescriptionSource
-from launch.substitutions import PathJoinSubstitution
-from launch_ros.substitutions import FindPackageShare
def generate_launch_description():
return LaunchDescription([
@@ -61,6 +24,4 @@ def generate_launch_description():
('/scan', '/scan_input')]
),
- ])
-
-#
\ No newline at end of file
+ ])
\ No newline at end of file
diff --git a/navigation/packages/nav_main/maps/Lab01.pgm b/navigation/packages/nav_main/maps/Lab01.pgm
new file mode 100644
index 00000000..1e501d99
Binary files /dev/null and b/navigation/packages/nav_main/maps/Lab01.pgm differ
diff --git a/navigation/packages/nav_main/maps/Lab01.yaml b/navigation/packages/nav_main/maps/Lab01.yaml
new file mode 100644
index 00000000..d471e3aa
--- /dev/null
+++ b/navigation/packages/nav_main/maps/Lab01.yaml
@@ -0,0 +1,7 @@
+image: Lab01.pgm
+mode: trinary
+resolution: 0.05
+origin: [-3.99, -10.4, 0]
+negate: 0
+occupied_thresh: 0.65
+free_thresh: 0.25
\ No newline at end of file
diff --git a/navigation/packages/nav_main/package.xml b/navigation/packages/nav_main/package.xml
index c0f85afd..e9495fa6 100644
--- a/navigation/packages/nav_main/package.xml
+++ b/navigation/packages/nav_main/package.xml
@@ -15,7 +15,10 @@
sensor_msgs
frida_interfaces
frida_constants
-
+ p9n_node
+ rclcpp_components
+ navigation2
+ nav2_bringup
action_msgs
diff --git a/robot_description/frida_description/package.xml b/robot_description/frida_description/package.xml
index ec2b75a6..879d5a3e 100644
--- a/robot_description/frida_description/package.xml
+++ b/robot_description/frida_description/package.xml
@@ -14,7 +14,6 @@
rclpy
urdf_launch
xarm_description
- zed_msgs
ament_lint_auto
ament_lint_common
diff --git a/robot_description/frida_description/urdf/FRIDA_Real.urdf.xacro b/robot_description/frida_description/urdf/FRIDA_Real.urdf.xacro
index a7cd918c..a26c558e 100644
--- a/robot_description/frida_description/urdf/FRIDA_Real.urdf.xacro
+++ b/robot_description/frida_description/urdf/FRIDA_Real.urdf.xacro
@@ -41,15 +41,6 @@
-
-
-
-
-
-
-
-
-
@@ -77,10 +68,9 @@
-
-
-
-
+
+
+
+
\ No newline at end of file
diff --git a/robot_description/frida_description/urdf/zed_macro.urdf.xacro b/robot_description/frida_description/urdf/zed_macro.urdf.xacro
deleted file mode 100644
index d2bb2310..00000000
--- a/robot_description/frida_description/urdf/zed_macro.urdf.xacro
+++ /dev/null
@@ -1,265 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/run.sh b/run.sh
index da144fe7..4f64467b 100755
--- a/run.sh
+++ b/run.sh
@@ -28,6 +28,10 @@ case $AREA in
echo "Running manipulation"
area="manipulation"
;;
+ navigation)
+ echo "Running manipulation"
+ area="navigation"
+ ;;
*)
echo "Invalid service name provided. Valid args are: vision, hri, etc"
exit 1
diff --git a/task_manager/scripts/subtask_managers/hri_tasks.py b/task_manager/scripts/subtask_managers/hri_tasks.py
index 31f68687..fa1be0dd 100755
--- a/task_manager/scripts/subtask_managers/hri_tasks.py
+++ b/task_manager/scripts/subtask_managers/hri_tasks.py
@@ -203,6 +203,41 @@ def ask(self, question: str) -> str:
rclpy.spin_until_future_complete(self.node, future)
return future.result().answer
+ def add_item(self, document: list, item_id: list, collection: str, metadata: list) -> str:
+ """
+ Adds new items to the ChromaDB collection.
+
+ Args:
+ document (list): List of documents to be added.
+ item_id (list): List of item IDs corresponding to each document.
+ collection (str): The collection to add the items to.
+ metadata (list): List of metadata corresponding to each document.
+
+ Returns:
+ str: A message indicating the success or failure of the operation.
+ """
+ try:
+ # Prepare the request with the necessary arguments
+ request = AddItem.Request(
+ document=document, # List of documents
+ id=item_id, # List of item IDs
+ collection=collection, # The collection to add the items to
+ metadata=metadata, # Metadata as a JSON
+ )
+
+ # Make the service call
+ future = self.add_item_client.call_async(request)
+ rclpy.spin_until_future_complete(self.node, future)
+
+ # Check if the operation was successful
+ if future.result().success:
+ return "Items added successfully"
+ else:
+ return f"Failed to add items: {future.result().message}"
+
+ except Exception as e:
+ return f"Error: {str(e)}"
+
def command_interpreter(self, text: str) -> CommandInterpreter.Response:
request = CommandInterpreter.Request(text=text)
future = self.command_interpreter_client.call_async(request)