Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
.git
build
install
log
docker
map
base_station
chrony_conf
desktop_buttons
img
*.md
LICENSE
81 changes: 81 additions & 0 deletions docker/.env
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
# Hardware Configuration Environment Variables
# Customize for your hardware setup

# ============================================
# Docker Runtime
# ============================================
# Set to nvidia for GPU support (requires nvidia-container-toolkit)
#DOCKER_RUNTIME=nvidia

# Image tag (default: latest)
IMAGE_TAG=latest

# ============================================
# ROS Configuration
# ============================================
# ROS domain ID for multi-robot setups
ROS_DOMAIN_ID=42

# ============================================
# Mid-360 Lidar Configuration
# ============================================
# Network interface connected to the lidar (e.g., eth0, enp0s3)
# Find with: ip addr show
LIDAR_INTERFACE=eth0

# Processing computer IP address on the lidar subnet
# Must be on the same subnet as the lidar (e.g., 192.168.1.5)
LIDAR_COMPUTER_IP=192.168.1.5

# Gateway IP address for the lidar subnet
LIDAR_GATEWAY=192.168.1.1

# Full IP address of your Mid-360 lidar
# Common pattern: 192.168.1.1XX where XX = last 2 digits of serial
LIDAR_IP=192.168.1.116

# ============================================
# Motor Controller Configuration
# ============================================
# Serial device for motor controller
# Check with: ls /dev/ttyACM* or ls /dev/ttyUSB*
MOTOR_SERIAL_DEVICE=/dev/ttyACM0

# ============================================
# Network Communication
# ============================================
# Enable WiFi buffer optimization for wireless data transmission
ENABLE_WIFI_BUFFER=false

# ============================================
# Navigation Options
# ============================================
# Enable RViz visualization (set to true for debugging)
USE_RVIZ=false

# Map path for localization mode (leave empty for SLAM/mapping mode)
# Set to file prefix (no .pcd extension), e.g., /ros2_ws/maps/warehouse
MAP_PATH=

# ============================================
# ROS Bridge Server (TCP API for programmatic control)
# ============================================
# Bind address (0.0.0.0 = all interfaces)
#ROS_BRIDGE_HOST=0.0.0.0

# TCP port for the bridge API
#ROS_BRIDGE_PORT=9090

# Default autonomy speed (0.0 to 1.0)
#ROS_BRIDGE_AUTONOMY_SPEED=1.0

# ============================================
# Device Group IDs
# ============================================
# Group ID for /dev/input devices (joystick)
# Find with: getent group input | cut -d: -f3
INPUT_GID=995

# Group ID for serial devices
# Find with: getent group dialout | cut -d: -f3
DIALOUT_GID=20
288 changes: 288 additions & 0 deletions docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,288 @@
# =============================================================================
# Navigation Autonomy Stack Docker Image
# =============================================================================
#
# Multi-stage build for ROS 2 navigation with SLAM support.
# Includes arise_slam and FASTLIO2.
#
# Build:
# ./docker/build.sh # Build for ROS 2 Jazzy (default)
# ./docker/build.sh --humble # Build for ROS 2 Humble
#
# =============================================================================

ARG ROS_DISTRO=jazzy
ARG TARGETARCH

# Platform-specific base images
FROM osrf/ros:${ROS_DISTRO}-desktop-full AS base-amd64
FROM ros:${ROS_DISTRO}-ros-base AS base-arm64

# =============================================================================
# STAGE 1: Build
# =============================================================================
FROM base-${TARGETARCH} AS builder

ARG ROS_DISTRO
ENV DEBIAN_FRONTEND=noninteractive
ENV ROS_DISTRO=${ROS_DISTRO}
ENV WORKSPACE=/ros2_ws

# Install build dependencies
RUN apt-get update && apt-get install -y --no-install-recommends \
git \
cmake \
build-essential \
python3-colcon-common-extensions \
libpcl-dev \
libgoogle-glog-dev \
libgflags-dev \
libatlas-base-dev \
libeigen3-dev \
libsuitesparse-dev \
ros-${ROS_DISTRO}-pcl-ros \
ros-${ROS_DISTRO}-cv-bridge \
&& rm -rf /var/lib/apt/lists/*

# On arm64, ros-base doesn't include rviz2; install separately for building rviz plugins
ARG TARGETARCH
RUN if [ "${TARGETARCH}" = "arm64" ]; then \
apt-get update && apt-get install -y --no-install-recommends \
ros-${ROS_DISTRO}-rviz2 \
&& rm -rf /var/lib/apt/lists/*; \
fi

# On arm64, build or-tools from source (pre-built binaries are x86_64 only)
ENV OR_TOOLS_VERSION=9.8
RUN if [ "${TARGETARCH}" = "arm64" ]; then \
echo "Building or-tools v${OR_TOOLS_VERSION} from source for arm64..." && \
apt-get update && apt-get install -y --no-install-recommends \
lsb-release \
wget \
&& rm -rf /var/lib/apt/lists/* && \
cd /tmp && \
wget -q https://github.com/google/or-tools/archive/refs/tags/v${OR_TOOLS_VERSION}.tar.gz && \
tar xzf v${OR_TOOLS_VERSION}.tar.gz && \
cd or-tools-${OR_TOOLS_VERSION} && \
cmake -S . -B build \
-DCMAKE_BUILD_TYPE=Release \
-DBUILD_DEPS=ON \
-DBUILD_SAMPLES=OFF \
-DBUILD_EXAMPLES=OFF \
-DBUILD_FLATZINC=OFF \
-DUSE_SCIP=OFF \
-DUSE_COINOR=OFF && \
cmake --build build --config Release -j$(($(nproc) > 4 ? 4 : $(nproc))) && \
cmake --install build --prefix /opt/or-tools && \
rm -rf /tmp/or-tools-${OR_TOOLS_VERSION} /tmp/v${OR_TOOLS_VERSION}.tar.gz; \
fi

# Create workspace and copy source
RUN mkdir -p ${WORKSPACE}/src
COPY src ${WORKSPACE}/src

# On arm64, replace x86_64 or-tools with arm64 build
RUN if [ "${TARGETARCH}" = "arm64" ] && [ -d "/opt/or-tools" ]; then \
echo "Replacing x86_64 or-tools with arm64 build..." && \
OR_TOOLS_DIR=${WORKSPACE}/src/exploration_planner/tare_planner/or-tools && \
if [ -d "${OR_TOOLS_DIR}" ]; then \
rm -rf ${OR_TOOLS_DIR}/lib/*.so* ${OR_TOOLS_DIR}/lib/*.a && \
cp -r /opt/or-tools/lib/* ${OR_TOOLS_DIR}/lib/ && \
rm -rf ${OR_TOOLS_DIR}/include && \
cp -r /opt/or-tools/include ${OR_TOOLS_DIR}/ && \
ldconfig; \
fi; \
fi

# Compatibility fix: In Humble, cv_bridge uses .h extension; Jazzy uses .hpp
RUN if [ "${ROS_DISTRO}" = "humble" ]; then \
CV_BRIDGE_DIR=$(find /opt/ros/humble/include -name "cv_bridge.h" -printf "%h\n" 2>/dev/null | head -1) && \
if [ -n "$CV_BRIDGE_DIR" ]; then \
ln -sf "$CV_BRIDGE_DIR/cv_bridge.h" "$CV_BRIDGE_DIR/cv_bridge.hpp"; \
fi; \
fi

# Build Livox-SDK2
RUN cd ${WORKSPACE}/src/utilities/livox_ros_driver2/Livox-SDK2 && \
mkdir -p build && cd build && \
cmake .. && make -j$(nproc) && make install && ldconfig && \
rm -rf ${WORKSPACE}/src/utilities/livox_ros_driver2/Livox-SDK2/build

# Build Sophus
RUN cd ${WORKSPACE}/src/slam/dependency/Sophus && \
mkdir -p build && cd build && \
cmake .. -DBUILD_TESTS=OFF && make -j$(nproc) && make install && \
rm -rf ${WORKSPACE}/src/slam/dependency/Sophus/build

# Build Ceres Solver
RUN cd ${WORKSPACE}/src/slam/dependency/ceres-solver && \
mkdir -p build && cd build && \
cmake .. && make -j$(nproc) && make install && \
rm -rf ${WORKSPACE}/src/slam/dependency/ceres-solver/build

# Build GTSAM
RUN cd ${WORKSPACE}/src/slam/dependency/gtsam && \
mkdir -p build && cd build && \
cmake .. -DGTSAM_USE_SYSTEM_EIGEN=ON -DGTSAM_BUILD_WITH_MARCH_NATIVE=OFF && \
make -j$(nproc) && make install && ldconfig && \
rm -rf ${WORKSPACE}/src/slam/dependency/gtsam/build

# Build ROS workspace
RUN /bin/bash -c "source /opt/ros/${ROS_DISTRO}/setup.bash && \
cd ${WORKSPACE} && \
colcon build --cmake-args -DCMAKE_BUILD_TYPE=Release"

# =============================================================================
# STAGE 2: Runtime
# =============================================================================
ARG ROS_DISTRO
ARG TARGETARCH
FROM base-${TARGETARCH} AS runtime

ARG ROS_DISTRO
ENV DEBIAN_FRONTEND=noninteractive
ENV ROS_DISTRO=${ROS_DISTRO}
ENV WORKSPACE=/ros2_ws
ENV RMW_IMPLEMENTATION=rmw_fastrtps_cpp

# Install runtime dependencies
RUN apt-get update && apt-get install -y --no-install-recommends \
ros-${ROS_DISTRO}-pcl-ros \
ros-${ROS_DISTRO}-cv-bridge \
ros-${ROS_DISTRO}-rviz2 \
ros-${ROS_DISTRO}-joy \
ros-${ROS_DISTRO}-rmw-fastrtps-cpp \
ros-${ROS_DISTRO}-foxglove-bridge \
libpcl-dev \
libgoogle-glog-dev \
libgflags-dev \
libatlas-base-dev \
libeigen3-dev \
libsuitesparse-dev \
libx11-6 \
libxext6 \
libxrender1 \
libgl1 \
libglib2.0-0 \
iputils-ping \
net-tools \
iproute2 \
usbutils \
joystick \
&& rm -rf /var/lib/apt/lists/*

# Copy installed libraries from builder (Livox-SDK2, Sophus, Ceres, GTSAM)
COPY --from=builder /usr/local/lib /usr/local/lib
COPY --from=builder /usr/local/include /usr/local/include
RUN ldconfig

# Copy built ROS workspace
COPY --from=builder ${WORKSPACE}/install ${WORKSPACE}/install

# Copy rviz config files from source
COPY --from=builder ${WORKSPACE}/src/base_autonomy/vehicle_simulator/rviz ${WORKSPACE}/src/base_autonomy/vehicle_simulator/rviz
COPY --from=builder ${WORKSPACE}/src/route_planner/far_planner/rviz ${WORKSPACE}/src/route_planner/far_planner/rviz
COPY --from=builder ${WORKSPACE}/src/exploration_planner/tare_planner/rviz ${WORKSPACE}/src/exploration_planner/tare_planner/rviz

# Copy lidar config
COPY --from=builder ${WORKSPACE}/src/utilities/livox_ros_driver2/config ${WORKSPACE}/src/utilities/livox_ros_driver2/config

# Copy SLAM config files (arise_slam + FASTLIO2)
RUN --mount=from=builder,source=${WORKSPACE}/src,target=/tmp/src \
mkdir -p ${WORKSPACE}/src/slam/arise_slam_mid360 && \
cp -r /tmp/src/slam/arise_slam_mid360/config ${WORKSPACE}/src/slam/arise_slam_mid360/ 2>/dev/null || true && \
mkdir -p ${WORKSPACE}/src/slam/FASTLIO2_ROS2 && \
for pkg in fastlio2 localizer pgo hba; do \
if [ -d "/tmp/src/slam/FASTLIO2_ROS2/$pkg/config" ]; then \
mkdir -p ${WORKSPACE}/src/slam/FASTLIO2_ROS2/$pkg && \
cp -r /tmp/src/slam/FASTLIO2_ROS2/$pkg/config ${WORKSPACE}/src/slam/FASTLIO2_ROS2/$pkg/; \
fi; \
if [ -d "/tmp/src/slam/FASTLIO2_ROS2/$pkg/rviz" ]; then \
cp -r /tmp/src/slam/FASTLIO2_ROS2/$pkg/rviz ${WORKSPACE}/src/slam/FASTLIO2_ROS2/$pkg/; \
fi; \
done

# Copy system launch scripts
COPY system_real_robot_with_route_planner.sh ${WORKSPACE}/
COPY system_real_robot.sh ${WORKSPACE}/
COPY system_real_robot_with_exploration_planner.sh ${WORKSPACE}/
COPY system_bagfile.sh ${WORKSPACE}/
COPY system_bagfile_with_route_planner.sh ${WORKSPACE}/
COPY system_bagfile_with_exploration_planner.sh ${WORKSPACE}/

# Copy foxglove relay scripts
COPY docker/foxglove_utility/twist_relay.py /usr/local/bin/twist_relay.py
COPY docker/foxglove_utility/goal_autonomy_relay.py /usr/local/bin/goal_autonomy_relay.py
RUN chmod +x /usr/local/bin/twist_relay.py /usr/local/bin/goal_autonomy_relay.py

# Create directories for maps and logs
RUN mkdir -p ${WORKSPACE}/maps ${WORKSPACE}/logs

# Set up shell environment
RUN echo "source /opt/ros/${ROS_DISTRO}/setup.bash" >> ~/.bashrc && \
echo "source ${WORKSPACE}/install/setup.bash" >> ~/.bashrc && \
echo "export RMW_IMPLEMENTATION=rmw_fastrtps_cpp" >> ~/.bashrc

# Entrypoint script
RUN cat > /ros_entrypoint.sh <<'ENTRYPOINT_EOF'
#!/bin/bash
set -e

# Source ROS environment
source /opt/ros/${ROS_DISTRO}/setup.bash
source ${WORKSPACE}/install/setup.bash
export RMW_IMPLEMENTATION=rmw_fastrtps_cpp

# Configure lidar network interface if specified
if [ -n "${LIDAR_INTERFACE}" ] && [ -n "${LIDAR_COMPUTER_IP}" ]; then
ip addr add ${LIDAR_COMPUTER_IP}/24 dev ${LIDAR_INTERFACE} 2>/dev/null || true
ip link set ${LIDAR_INTERFACE} up 2>/dev/null || true
fi

# Generate MID360_config.json if lidar IPs are set
if [ -n "${LIDAR_COMPUTER_IP}" ] && [ -n "${LIDAR_IP}" ]; then
cat > ${WORKSPACE}/src/utilities/livox_ros_driver2/config/MID360_config.json <<EOF
{
"lidar_summary_info": { "lidar_type": 8 },
"MID360": {
"lidar_net_info": {
"cmd_data_port": 56100, "push_msg_port": 56200,
"point_data_port": 56300, "imu_data_port": 56400, "log_data_port": 56500
},
"host_net_info": {
"cmd_data_ip": "${LIDAR_COMPUTER_IP}", "cmd_data_port": 56101,
"push_msg_ip": "${LIDAR_COMPUTER_IP}", "push_msg_port": 56201,
"point_data_ip": "${LIDAR_COMPUTER_IP}", "point_data_port": 56301,
"imu_data_ip": "${LIDAR_COMPUTER_IP}", "imu_data_port": 56401,
"log_data_ip": "${LIDAR_COMPUTER_IP}", "log_data_port": 56501
}
},
"lidar_configs": [{
"ip": "${LIDAR_IP}",
"pcl_data_type": 1, "pattern_mode": 0,
"extrinsic_parameter": { "roll": 0.0, "pitch": 0.0, "yaw": 0.0, "x": 0, "y": 0, "z": 0 }
}]
}
EOF
cp ${WORKSPACE}/src/utilities/livox_ros_driver2/config/MID360_config.json \
${WORKSPACE}/install/livox_ros_driver2/share/livox_ros_driver2/config/MID360_config.json 2>/dev/null || true
echo "Generated MID360_config.json (LIDAR_IP=${LIDAR_IP}, COMPUTER_IP=${LIDAR_COMPUTER_IP})"
fi

# WiFi buffer optimization
if [ "${ENABLE_WIFI_BUFFER}" = "true" ]; then
sysctl -w net.core.rmem_max=67108864 net.core.rmem_default=67108864 2>/dev/null || true
sysctl -w net.core.wmem_max=67108864 net.core.wmem_default=67108864 2>/dev/null || true
fi

# Launch foxglove relay scripts in background
python3 /usr/local/bin/twist_relay.py &
python3 /usr/local/bin/goal_autonomy_relay.py &

exec "$@"
ENTRYPOINT_EOF
RUN chmod +x /ros_entrypoint.sh

WORKDIR ${WORKSPACE}
ENTRYPOINT ["/ros_entrypoint.sh"]
CMD ["bash"]
Loading