diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index eb948e194..7ce58df4d 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -50,7 +50,7 @@ jobs:
- name: Install BlueView Sonar SDK
run: |
cd $GITHUB_WORKSPACE/catkin_ws/src/mil
- ./scripts/hardware_installers/install_bvtsdk --pass ${{ secrets.ZOBELISK_PASSWORD }}
+ ./scripts/hw/install_bvtsdk --pass ${{ secrets.ZOBELISK_PASSWORD }}
ls mil_common/drivers/mil_blueview_driver
echo $PWD
- name: Install system dependencies
diff --git a/NaviGator/mission_control/navigator_launch/launch/hardware/gps.launch b/NaviGator/mission_control/navigator_launch/launch/hardware/gps.launch
index 0b4347dce..e6777a6b7 100644
--- a/NaviGator/mission_control/navigator_launch/launch/hardware/gps.launch
+++ b/NaviGator/mission_control/navigator_launch/launch/hardware/gps.launch
@@ -2,7 +2,7 @@
-
+
diff --git a/NaviGator/perception/navigator_vision/nodes/classifier.py b/NaviGator/perception/navigator_vision/nodes/classifier.py
index fd0f9b1cf..5ea31ffa8 100755
--- a/NaviGator/perception/navigator_vision/nodes/classifier.py
+++ b/NaviGator/perception/navigator_vision/nodes/classifier.py
@@ -1,16 +1,23 @@
#!/usr/bin/env python3
+from __future__ import annotations
+
+import bisect
+import datetime
import math
+import random
+from collections import OrderedDict
from threading import Lock
+import cv2
import numpy as np
import rospy
import tf2_ros
+from cv_bridge import CvBridge
from image_geometry import PinholeCameraModel
from mil_msgs.msg import PerceptionObjectArray
from mil_msgs.srv import ObjectDBQuery, ObjectDBQueryRequest
from mil_ros_tools import Image_Publisher, Image_Subscriber, rosmsg_to_numpy
from mil_vision_tools import ImageMux, rect_from_roi, roi_enclosing_points
-from sensor_msgs.msg import Image
from std_srvs.srv import SetBool, Trigger
from tf.transformations import quaternion_matrix
from vision_msgs.msg import Detection2DArray
@@ -55,7 +62,10 @@ def __init__(self):
self.last_panel_points_msg = None
self.database_client = rospy.ServiceProxy("/database/requests", ObjectDBQuery)
self.is_perception_task = False
+ self.prev_objects = OrderedDict()
+ self.prev_images = OrderedDict()
self.sub = Image_Subscriber(self.image_topic, self.image_cb)
+ self.bridge = CvBridge()
self.camera_info = self.sub.wait_for_camera_info()
self.camera_model = PinholeCameraModel()
self.camera_model.fromCameraInfo(self.camera_info)
@@ -66,7 +76,8 @@ def __init__(self):
labels=["Result", "Mask"],
)
self.debug_pub = Image_Publisher("~debug_image")
- self.last_objects = None
+ self.bbox_pub = Image_Publisher("~bbox_image")
+ self.bbox_image = None
self.last_update_time = rospy.Time.now()
self.objects_sub = rospy.Subscriber(
"/pcodar/objects",
@@ -80,9 +91,9 @@ def __init__(self):
self.process_boxes,
)
self.enabled_srv = rospy.Service("~set_enabled", SetBool, self.set_enable_srv)
- self.last_image = None
if self.is_training:
self.enabled = True
+ self.seconds_to_keep = 5
self.queue = []
if self.is_simulation:
@@ -104,6 +115,15 @@ def __init__(self):
self.pcodar_reset = rospy.ServiceProxy("/pcodar/reset", Trigger)
self.pcodar_reset()
+ self.clean_timer = rospy.Timer(rospy.Duration(10), self.clean_old_data)
+
+ def clean_old_data(self, event):
+ for k, v in self.prev_images.copy().items():
+ if k < (rospy.Time.now() - rospy.Duration(self.seconds_to_keep)):
+ del self.prev_images[k]
+ for k, v in self.prev_objects.copy().items():
+ if k < (rospy.Time.now() - rospy.Duration(self.seconds_to_keep)):
+ del self.prev_objects[k]
# GH-880
# @thread_lock(lock)
@@ -111,8 +131,47 @@ def set_enable_srv(self, req):
self.enabled = req.data
return {"success": True}
- def image_cb(self, msg: Image):
- self.last_image = msg
+ def image_cb(self, msg: np.ndarray):
+ stamp = self.sub.last_image_header.stamp
+ self.prev_images[stamp] = msg
+
+ def get_prev_data(
+ self,
+ stamp: rospy.Time,
+ ) -> tuple[np.ndarray, rospy.Time, PerceptionObjectArray, rospy.Time]:
+ image_keys = list(self.prev_images.keys())
+ image_index = bisect.bisect_left(image_keys, stamp)
+ if image_index < 0:
+ rospy.logerr_throttle(
+ 5,
+ "Classifier looking for images that are too early to be in storage...",
+ )
+ image_index = 0
+ if image_index > len(image_keys) - 1:
+ rospy.logerr_throttle(
+ 5,
+ "Classifier looking for images that are too late...",
+ )
+ image_index = len(image_keys) - 1
+ image_key = image_keys[image_index]
+ relevant_image = self.prev_images[image_keys[image_index]]
+ object_keys = list(self.prev_objects.keys())
+ object_index = bisect.bisect_left(object_keys, stamp)
+ if object_index < 0:
+ rospy.logerr_throttle(
+ 5,
+ "Classifier looking for objects that are too early to be in storage...",
+ )
+ object_index = 0
+ if object_index > len(object_keys) - 1:
+ rospy.logerr_throttle(
+ 5,
+ f"Classifier looking for objects that are too late {stamp} > {object_keys[-1]}...",
+ )
+ object_index = len(object_keys) - 1
+ object_key = object_keys[object_index]
+ relevant_object = self.prev_objects[object_key]
+ return relevant_image, image_key, relevant_object, object_key
def taskinfoSubscriber(self, msg):
self.is_perception_task = msg.name == "perception"
@@ -128,15 +187,18 @@ def in_frame(self, pixel):
# GH-880
# @thread_lock(lock)
- def process_objects(self, msg):
- self.last_objects = msg
+ def process_objects(self, msg: PerceptionObjectArray):
+ time = rospy.Time.now()
+ self.prev_objects[time] = msg
def in_rect(self, point, bbox):
+ x_buf = self.camera_info.width * 0.04
+ y_buf = self.camera_info.height * 0.04
return bool(
- point[0] >= bbox.bbox.center.x - bbox.bbox.size_x / 2
- and point[1] >= bbox.bbox.center.y - bbox.bbox.size_y / 2
- and point[0] <= bbox.bbox.center.x + bbox.bbox.size_x / 2
- and point[1] <= bbox.bbox.center.y + bbox.bbox.size_y / 2,
+ point[0] >= (bbox.bbox.center.x - bbox.bbox.size_x / 2) - x_buf
+ and point[1] >= (bbox.bbox.center.y - bbox.bbox.size_y / 2 - y_buf)
+ and point[0] <= (bbox.bbox.center.x + bbox.bbox.size_x / 2 + x_buf)
+ and point[1] <= (bbox.bbox.center.y + bbox.bbox.size_y / 2 + y_buf),
)
def distance(self, first, second):
@@ -144,24 +206,103 @@ def distance(self, first, second):
y_diff = second[1] - first[1]
return math.sqrt(x_diff * x_diff + y_diff * y_diff)
+ def _color_from_label(self, label: str) -> tuple[int, int, int]:
+ r = random.Random(hash(label))
+ return (
+ r.randrange(180, 255),
+ r.randrange(180, 255),
+ r.randrange(180, 255),
+ )
+
+ def _draw_point_vis(self, center: tuple[int, int], label) -> None:
+ if self.bbox_image is None:
+ return
+ color = self._color_from_label(label)
+ cv2.circle(self.bbox_image, center, radius=3, color=color, thickness=-1)
+ cv2.putText(
+ self.bbox_image,
+ label,
+ center,
+ cv2.FONT_HERSHEY_DUPLEX,
+ 0.5,
+ color,
+ 1,
+ )
+
+ def _draw_corner_text(self, label: str, height_from_bottom: int):
+ x = 10
+ y = self.camera_info.height - 10 - height_from_bottom
+ cv2.putText(
+ self.bbox_image,
+ label,
+ (x, y),
+ cv2.FONT_HERSHEY_DUPLEX,
+ 0.9,
+ (0, 0, 0),
+ 1,
+ )
+
+ def _draw_bbox_vis(
+ self,
+ top_left: tuple[int, int],
+ bottom_right: tuple[int, int],
+ label: str,
+ successful: bool = True,
+ ):
+ if self.bbox_image is None:
+ return
+ color = (0, 255, 0) if successful else (0, 0, 255)
+ cv2.rectangle(self.bbox_image, top_left, bottom_right, color, 1)
+ cv2.putText(
+ self.bbox_image,
+ label,
+ (top_left[0], top_left[1] - 18),
+ cv2.FONT_HERSHEY_DUPLEX,
+ 0.5,
+ color,
+ 2,
+ )
+
def process_boxes(self, msg):
+ if not msg.detections:
+ return
if not self.enabled:
return
if self.camera_model is None:
return
- if self.last_objects is None or len(self.last_objects.objects) == 0:
- return
now = rospy.Time.now()
if now - self.last_update_time < self.update_period:
return
self.last_update_time = now
# Get Transform from ENU to optical at the time of this image
- transform = self.tf_buffer.lookup_transform(
- self.sub.last_image_header.frame_id,
- "enu",
- self.sub.last_image_header.stamp,
- timeout=rospy.Duration(1),
+ image_at_time, image_time, objects_at_time, object_time = self.get_prev_data(
+ msg.detections[0].source_img.header.stamp,
)
+ self.bbox_image = image_at_time.copy()
+ if self.bbox_image is not None:
+ image_stamp = datetime.datetime.fromtimestamp(image_time.to_sec())
+ object_stamp = datetime.datetime.fromtimestamp(object_time.to_sec())
+ current_stamp = datetime.datetime.fromtimestamp(rospy.Time.now().to_sec())
+ delay = (current_stamp - image_stamp).total_seconds()
+ self._draw_corner_text(
+ f"Current time: {current_stamp} (delay: {delay:.2f}s)",
+ 50,
+ )
+ self._draw_corner_text(f"Image time used: {image_stamp}", 10)
+ self._draw_corner_text(f"LIDAR time used: {object_stamp}", 30)
+ try:
+ transform = self.tf_buffer.lookup_transform(
+ self.sub.last_image_header.frame_id,
+ "enu",
+ msg.detections[0].source_img.header.stamp,
+ # timeout=rospy.Duration(1),
+ )
+ except Exception:
+ rospy.logerr_throttle(
+ 1,
+ "Attempting to process detections, but not enough tf buffer has accumulated.",
+ )
+ return
translation = rosmsg_to_numpy(transform.transform.translation)
rotation = rosmsg_to_numpy(transform.transform.rotation)
rotation_mat = quaternion_matrix(rotation)[:3, :3]
@@ -169,7 +310,7 @@ def process_boxes(self, msg):
# Transform the center of each object into optical frame
positions_camera = [
translation + rotation_mat.dot(rosmsg_to_numpy(obj.pose.position))
- for obj in self.last_objects.objects
+ for obj in objects_at_time.objects
]
pixel_centers = [
self.camera_model.project3dToPixel(point) for point in positions_camera
@@ -182,7 +323,7 @@ def process_boxes(self, msg):
# Get a list of indices of objects who are sufficiently close and can be seen by camera
met_criteria = []
- for i in range(len(self.last_objects.objects)):
+ for i in range(len(objects_at_time.objects)):
distance = distances[i]
if (
self.in_frame(pixel_centers[i])
@@ -190,17 +331,22 @@ def process_boxes(self, msg):
and positions_camera[i][2] > 0
):
met_criteria.append(i)
- # print 'Keeping {} of {}'.format(len(met_criteria), len(self.last_objects.objects))
+ sel_object = objects_at_time.objects[i]
+ name = f"({sel_object.id})"
+ pixel_x = int(pixel_centers[i][0])
+ pixel_y = int(pixel_centers[i][1])
+ self._draw_point_vis((pixel_x, pixel_y), name)
+ # print 'Keeping {} of {}'.format(len(met_criteria), len(objects_at_time.objects))
classified = set()
# for each bounding box,check which buoy is closest to boat within pixel range of bounding box
-
- for a in msg.detections:
+ failed_detections = []
+ for detection in msg.detections:
buoys = []
for i in met_criteria:
- if self.in_rect(pixel_centers[i], a):
+ if self.in_rect(pixel_centers[i], detection):
buoys.append(i)
if len(buoys) > 0:
@@ -212,47 +358,81 @@ def process_boxes(self, msg):
closest_to_box = i
closest_to_boat = i
- classified.add(self.last_objects.objects[closest_to_box].id)
+ label = self.CLASSES[detection.results[0].id]
+ classified.add(objects_at_time.objects[closest_to_box].id)
+ bbox_left, bbox_right = (
+ round(detection.bbox.center.x - detection.bbox.size_x / 2),
+ round(detection.bbox.center.x + detection.bbox.size_x / 2),
+ )
+ bbox_bottom, bbox_top = (
+ round(detection.bbox.center.y - detection.bbox.size_y / 2),
+ round(detection.bbox.center.y + detection.bbox.size_y / 2),
+ )
+ self._draw_bbox_vis(
+ (bbox_left, bbox_top),
+ (bbox_right, bbox_bottom),
+ label,
+ )
# print(
# "Object {} classified as {}".format(
- # self.last_objects.objects[closest_to_box].id,
+ # objects_at_time.objects[closest_to_box].id,
# self.CLASSES[a.results[0].id],
# )
# )
- cmd = f"{self.last_objects.objects[closest_to_box].id}={self.CLASSES[a.results[0].id]}"
+ cmd = f"{objects_at_time.objects[closest_to_box].id}={self.CLASSES[detection.results[0].id]}"
self.database_client(ObjectDBQueryRequest(cmd=cmd))
+ else:
+ failed_detections.append(detection)
+
+ for detection in failed_detections:
+ bbox_left, bbox_right = (
+ round(detection.bbox.center.x - detection.bbox.size_x / 2),
+ round(detection.bbox.center.x + detection.bbox.size_x / 2),
+ )
+ bbox_bottom, bbox_top = (
+ round(detection.bbox.center.y - detection.bbox.size_y / 2),
+ round(detection.bbox.center.y + detection.bbox.size_y / 2),
+ )
+ self._draw_bbox_vis(
+ (bbox_left, bbox_top),
+ (bbox_right, bbox_bottom),
+ "failed",
+ successful=False,
+ )
+
+ if self.bbox_pub.get_num_connections():
+ self.bbox_pub.publish(self.bbox_image)
if not self.is_perception_task:
return
- for a in met_criteria:
- if self.last_objects.objects[a].id in classified:
+ for detection in met_criteria:
+ if objects_at_time.objects[detection].id in classified:
continue
- height = self.last_objects.objects[a].scale.z
+ height = objects_at_time.objects[detection].scale.z
# if pixel_centers[i][0] > 1280 or pixel_centers[i][0] > 720:
# return
if height > 0.45:
- print("Reclassified as white")
print(
"Object {} classified as {}".format(
- self.last_objects.objects[a].id,
+ objects_at_time.objects[detection].id,
"mb_marker_buoy_white",
),
)
cmd = "{}={}".format(
- self.last_objects.objects[a].id,
+ objects_at_time.objects[detection].id,
"mb_marker_buoy_white",
)
self.database_client(ObjectDBQueryRequest(cmd=cmd))
else:
print(
"Object {} classified as {}".format(
- self.last_objects.objects[a].id,
+ objects_at_time.objects[detection].id,
"mb_round_buoy_black",
),
)
cmd = "{}={}".format(
- self.last_objects.objects[a].id,
+ objects_at_time.objects[detection].id,
"mb_round_buoy_black",
)
self.database_client(ObjectDBQueryRequest(cmd=cmd))
diff --git a/docs/conf.py b/docs/conf.py
index 86a815dc2..5de93c163 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -16,6 +16,8 @@
import os.path
import sys
+import pytz
+
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
@@ -89,7 +91,7 @@
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
-now = datetime.datetime.now().astimezone()
+now = datetime.datetime.now().astimezone(pytz.timezone("US/Eastern"))
# Sun, Jan 30th, 2024 11:59 PM (EST)
html_last_updated_fmt = now.strftime("%a, %b %d, %Y %I:%M %p (%Z)")
diff --git a/docs/culture.rst b/docs/culture.rst
index 7ff939054..f83eae3cc 100644
--- a/docs/culture.rst
+++ b/docs/culture.rst
@@ -17,3 +17,85 @@ Diversity, Equity, and Inclusion
--------------------------------
MIL has always been, and will always be, strongly committed to diversity,
equity, and inclusion.
+
+Glossary
+--------
+This page lists common words / acronyms uses in MIL and robotics more generally. This should help get past some of our jargon.
+
+.. glossary::
+ :sorted:
+
+ mission
+ The program that makes high level decision making to complete a
+ given task. Sets :term:`waypoint` using information from :term:`perception`
+
+ waypoint
+ A new :term:`pose` for the robot to attempt to get to
+
+ pose
+ A position (point in the world) and orientation (rotation in the world)
+
+ perception
+ The program(s) which attempt to understand what is in the environment around a robot.
+ Often uses cameras, lidar, radar, etc.
+
+ State Estimation
+ The process / program which determines where we are in the world.
+ Usually fuses multiple sensors ( :term:`DVL` , gps, imu) to
+ produce an estimate of the robot's :term:`pose`
+
+ DVL
+ A Doppler velocity log. Is used primarily in underwater
+ systems to measure the linear velocity of the robot.
+
+ RobotX
+ The :term:`AUVSI`-run competition which :term:`NaviGator` competes in.
+
+ VRX
+ A competition named Virtual :term:`RobotX`. Similar to RobotX, but no physical
+ components or travel are involved.
+
+ NaviGator
+ An autonomous boat developed by MIL.
+
+ AUVSI
+ A non-profit organization which runs several competitions which MIL participates in
+
+ Zobelisk
+ A powerful computer in MIL used to run simulation tasks. Named after former member
+ David Zobel
+
+ Shuttle
+ A powerful computer in MIL used to run simulation tasks. Has been mostly replaced
+ by :term:`Zobelisk`
+
+ node
+ A program running on a robot. Usually performs a very specific purpose
+
+ alarms
+ Global Boolean state shared by many nodes, such as :term:`kill` or autonomous
+
+ kill
+ A global state indicating the the robot cannot move / actuate. This is largely
+ a safety feature. The robot can usually be killed remotely
+
+ bag
+ A file which stores a set of ROS messages received over a certain interval.
+ Used to collect data to later play back for testing / debugging
+
+ Drew Bagnell
+ An external USB hard-drive used to store :term:`bag` files.
+ Named after former member and legend Drew Bagnell, now CTO of Aurora Innovation
+
+ root
+ 1. The administrator / superuser account on Linux. Has permission to do anything on the system
+ 2. The top most directory or a project or the entire filesystem. For example,
+ the root of the repository is installed to ~/catkin_ws/src/mil
+
+ Food train
+ An spontaneous event where people working in MIL go out somewhere to eat
+
+ POE
+ Power-Over-Ethernet, a variety of protocols to provide
+ power over an ethernet cable, so that the same cable can be used
+ to both power and transmit data.
diff --git a/docs/glossary.rst b/docs/glossary.rst
deleted file mode 100644
index b1d7ecf61..000000000
--- a/docs/glossary.rst
+++ /dev/null
@@ -1,81 +0,0 @@
-Glossary
-========
-This page lists common words / acronyms uses in MIL and robotics more generally. This should help get past some of our jargon.
-
-.. glossary::
- :sorted:
-
- mission
- The program that makes high level decision making to complete a
- given task. Sets :term:`waypoint` using information from :term:`perception`
-
- waypoint
- A new :term:`pose` for the robot to attempt to get to
-
- pose
- A position (point in the world) and orientation (rotation in the world)
-
- perception
- The program(s) which attempt to understand what is in the environment around a robot.
- Often uses cameras, lidar, radar, etc.
-
- State Estimation
- The process / program which determines where we are in the world.
- Usually fuses multiple sensors ( :term:`DVL` , gps, imu) to
- produce an estimate of the robot's :term:`pose`
-
- DVL
- A Doppler velocity log. Is used primarily in underwater
- systems to measure the linear velocity of the robot.
-
- RobotX
- The :term:`AUVSI`-run competition which :term:`NaviGator` competes in.
-
- VRX
- A competition named Virtual :term:`RobotX`. Similar to RobotX, but no physical
- components or travel are involved.
-
- NaviGator
- An autonomous boat developed by MIL.
-
- AUVSI
- A non-profit organization which runs several competitions which MIL participates in
-
- Zobelisk
- A powerful computer in MIL used to run simulation tasks. Named after former member
- David Zobel
-
- Shuttle
- A powerful computer in MIL used to run simulation tasks. Has been mostly replaced
- by :term:`Zobelisk`
-
- node
- A program running on a robot. Usually performs a very specific purpose
-
- alarms
- Global Boolean state shared by many nodes, such as :term:`kill` or autonomous
-
- kill
- A global state indicating the the robot cannot move / actuate. This is largely
- a safety feature. The robot can usually be killed remotely
-
- bag
- A file which stores a set of ROS messages received over a certain interval.
- Used to collect data to later play back for testing / debugging
-
- Drew Bagnell
- An external USB hard-drive used to store :term:`bag` files.
- Named after former member and legend Drew Bagnell, now CTO of Aurora Innovation
-
- root
- 1. The administrator / superuser account on Linux. Has permission to do anything on the system
- 2. The top most directory or a project or the entire filesystem. For example,
- the root of the repository is installed to ~/catkin_ws/src/mil
-
- Food train
- An spontaneous event where people working in MIL go out somewhere to eat
-
- POE
- Power-Over-Ethernet, a variety of protocols to provide
- power over an ethernet cable, so that the same cable can be used
- to both power and transmit data.
diff --git a/docs/index.rst b/docs/index.rst
index f90caccee..4e6035aaa 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -2,7 +2,6 @@
Machine Intelligence Lab
===============================
-
Welcome to the documentation site for the Machine Intelligence Lab at the University of Florida, a robotics lab in Gainesville, Florida. Our lab has built `several robotic systems `_ since its inception several decades ago. Our current projects include:
* **SubjuGator:** An autonomous, underwater submarine-like vehicle. Three-time champion of the AUVSI/ONR underwater competition. (`Website `_)
@@ -33,7 +32,9 @@ New Members
infrastructure/index
culture
+ purchasing
branding
+ deprecated
* **Interested software members**: Please read the :doc:`Software Getting Started Guide `.
* **Interested electrical members**: Please read the :doc:`Electrical Onboarding Guide `
@@ -43,8 +44,8 @@ Relevant Links
--------------
* **Websites:** `MIL Website `_ / `NaviGator `_ / `SubjuGator `_
* **Source:** `GitHub `_
-* **Branding:** :doc:`Branding `
-* **Culture:** :doc:`Glossary ` / :doc:`Culture ` / :doc:`Technical Design `
+* **Laboratory Resources:** :doc:`Purchasing ` / :doc:`Branding `
+* **Culture:** :doc:`Culture ` / :doc:`Technical Design `
Software Reference
------------------
@@ -52,12 +53,3 @@ Software Reference
* **Practices:** :doc:`Our Development Practices ` / :doc:`Scripts ` / :doc:`Extensions `
* **Project Procedures:** :doc:`SubjuGator ` / :doc:`VRX `
* **Getting Help:** :doc:`Development Help `
-
-Meta
-----
-.. toctree::
- :maxdepth: 1
-
- glossary
- testingprocedures.md
- deprecated
diff --git a/docs/navigator/index.rst b/docs/navigator/index.rst
index abb96368e..1ec2053aa 100644
--- a/docs/navigator/index.rst
+++ b/docs/navigator/index.rst
@@ -25,6 +25,14 @@ Procedures related to operation of the NaviGator AMS.
Testing Day Checklist
networking
+Software
+--------
+
+.. toctree::
+ :maxdepth: 2
+
+ simulating.md
+
Lessons Learned + Reflections
-----------------------------
Lessons we've learned throughout the process that are helpful for future projects.
diff --git a/docs/navigator/simulating.md b/docs/navigator/simulating.md
new file mode 100644
index 000000000..4dd8aa482
--- /dev/null
+++ b/docs/navigator/simulating.md
@@ -0,0 +1,64 @@
+# Simulating NaviGator
+
+If we had to test all of our new code on the physical build of NaviGator each time
+we made a change, we'd never be able to make changes quickly! Therefore, when
+making changes, it's recommended to simulate them before deploying NaviGator
+in a physical testing environment. Luckily, we have a great simulation environment,
+powered by VRX.
+
+## VRX
+
+[VRX](https://github.com/osrf/vrx) is a simulation environment for aquatic unmanned vehicles, developed by the
+[Open Source Robotics Foundation](https://www.openrobotics.org/). It was originally
+developed for the RobotX competition, and it has continued development in strong
+collaboration with RoboNation, the organization running RobotX. Eventually, the
+VRX competition was launched, which was an exclusively-virtual robotics competition
+that used the VRX platform as a simulator.
+
+VRX is a powerful simulator built on top of [Gazebo](https://gazebosim.org), a standard
+simulator for ROS environments. In VRX, you can customize the vehicle used in the simulator,
+as well as the surrounding environmental conditions (such as the wind, waves, fog,
+and ambient light). Also, the VRX platform typically provides world files and object
+models that are similar to actual task elements used in the biannual international RobotX competition,
+which MIL competes in.
+
+## Launching the NaviGator simulation
+
+To launch the simulation for NaviGator, run the following command in your terminal:
+```bash
+roslaunch navigator_launch simulation.launch --screen
+```
+
+There are some variables you can provide to this launch file to customize the
+simulation environment. These include:
+
+* `use_mil_world`: If `true`, use world files from `navigator_gazebo` (our own
+ ROS package built for NaviGator). Otherwise, use world files from `vrx_gazebo`
+ (aka, world files provided to everyone in VRX). Default: `false`.
+* `world`: Which world file to launch. Typically, there are several world files,
+ each of which will focus on one part of the competition.
+
+## Viewing and inspecting the environment
+
+Now, you've launched the simulation environment, but you're not actually able
+to see NaviGator -- what gives? At this point, you can run one of two commands.
+
+```bash
+gazebogui
+```
+
+`gazebogui` will launch Gazebo, which will provide a beautiful, high-level view
+of the simulation.
+
+```bash
+nviz
+```
+
+`nviz` will launch Rviz, which will give you a much more granular view of NaviGator.
+Here, you can inspect the frames and joints that make up NaviGator, the inputs of
+its sensors, etc.
+
+## Doing things
+
+At this point, it should be just like NaviGator is in the lake -- you can run
+missions, move NaviGator around using `nmove`, or take a peek at the cameras/sensors.
diff --git a/docs/purchasing.rst b/docs/purchasing.rst
new file mode 100644
index 000000000..02ed8c92e
--- /dev/null
+++ b/docs/purchasing.rst
@@ -0,0 +1,42 @@
+Purchasing
+==========
+Purchasing and shipping is integral to the proper and efficient functioning of our laboratory. Purchasing helps us to build complex projects, learn about new engineering techniques and tools, and most importantly, enjoy our time in MIL. However, if purchasing is not done correctly, annoying issues can arise! To prevent these, read below before making your first purchase.
+
+Food and Beverage
+-----------------
+* **Beverage Vendors:** When buying beverages, you must buy **Pepsi** (including Gatorade and Aquafina, notably). You **cannot** buy other products, including Coke or Dr. Pepper products.
+* **Meal Reimbursement Limits:** When buying meals, there are limits for reimbursements set by UF per meal. They are:
+ * **Breakfast**: $6/person
+ * **Lunch**: $12/person
+ * **Dinner**: $19/person
+* **Buying Pizza:** When purchasing pizza, please use coupons rather than paying full menu price for the pizzas.
+
+Submitting reimbursements
+-------------------------
+When submitting receipts for reimbursements (for whatever), please ensure:
+
+* You have **signed** the physical receipt with your name, UFID, and signature. You can do this electronically, if it's easier.
+* You have submitted the receipt in a **timely** manner (ie, don't submit it six months after the purchase date).
+* You provide the reason for the purchase, and if it is for food, who was in attendance at the event.
+
+If you've met the above criteria, feel free to email your receipt to Dr. Schwartz! Please include an appropriate subject.
+
+Making online purchases
+-----------------------
+When making purchases online, remember to follow practices specific to the business you're buying from.
+
+Amazon
+~~~~~~
+Only approved faculty members can purchase from Amazon. Therefore, if you'd like
+to purchase something from Amazon (such as even more monitors), the best route is
+to go through someone like Dr. Schwartz.
+
+Other Websites
+~~~~~~~~~~~~~~
+When purchasing from other websites, make sure that your purchases are **tax-exempt.**
+If you need help completing this, contact a faculty or member familiar with making
+tax-exempt purchases.
+
+Furthermore, when creating new accounts for websites for purchasing, ensure that
+the passwords are added to the team's Bitwarden, so other members can access it
+once you graduate/leave/sadly get eaten by an alligator in Lake Alice.
diff --git a/docs/reference/can.rst b/docs/reference/can.rst
index 817163f78..84d6b1de1 100644
--- a/docs/reference/can.rst
+++ b/docs/reference/can.rst
@@ -290,6 +290,22 @@ byte length.
| Status) | | | |
+------------+--------------+----------------+-------------------------------------------------------------------------+
+Exceptions
+~~~~~~~~~~
+
+Exception Hierarchy
+"""""""""""""""""""
+.. currentmodule:: mil_usb_to_can.sub9
+
+.. exception_hierarchy::
+
+ - :exc:`ChecksumException`
+
+Exception List
+"""""""""""""""""""
+.. autoclass:: mil_usb_to_can.sub9.ChecksumException
+ :members:
+
CANDeviceHandle
~~~~~~~~~~~~~~~
.. attributetable:: mil_usb_to_can.sub9.CANDeviceHandle
diff --git a/docs/software/getting_started.md b/docs/software/getting_started.md
index a40845665..0d41fa6ea 100644
--- a/docs/software/getting_started.md
+++ b/docs/software/getting_started.md
@@ -64,7 +64,7 @@ methods.
| Architecture | URL |
| ------------ | --- |
| AMD64 (most Windows computers, Intel-based Mac computers) | [focal-desktop-amd64.iso](https://releases.ubuntu.com/focal/ubuntu-20.04.6-desktop-amd64.iso) |
-| ARM64 (Apple Silicon Mac computers) | Unfortunately, Ubuntu 20.04 no longer has an ARM-compatible ISO. While we are working on finding/building a new iso image, you may not be able to install. |
+| ARM64 (Apple Silicon Mac computers) | 1. [ubuntu-20.04.5-live-server.arm64.iso](https://mil.ufl.edu/software/ubuntu-20.04.5-live-server-arm64.iso)
2. After setting up the server, run:
`sudo apt update && sudo apt install ubuntu-desktop && sudo reboot now` |
The following subsections cover various installation methods. Please choose the
installation option that best meets your use case. If you're not sure what the
diff --git a/docs/software/help.md b/docs/software/help.md
index f8250fa70..054698a88 100644
--- a/docs/software/help.md
+++ b/docs/software/help.md
@@ -35,7 +35,7 @@ or finding a docs page in the table of contents. The search bar will also help
you to find related code to the problem you may be encountering, as it can search
through the entire software reference.
-You may find the [Glossary](/glossary) especially useful.
+You may find the [Culture](/culture) page especially useful.
## View the docs on related classes/methods
Although you may be developing a new system/class/method (or not!), finding the
diff --git a/docs/software/index.rst b/docs/software/index.rst
index 299168e7c..266ff402b 100644
--- a/docs/software/index.rst
+++ b/docs/software/index.rst
@@ -2,31 +2,88 @@ Software
=====================
Various documentation related to practices followed by the MIL Software team.
+Getting Started
+---------------
+Welcome to the software team, take a look at the following to get started!
+
.. toctree::
:maxdepth: 1
Getting Started
devtools
Contributing Changes
- documentation_syntax
- adding_documentation
help
- zobelisk
+
+
+Technical Tools
+---------------
+Learn about some of the libraries and programs we use in MIL!
+
+.. toctree::
+ :maxdepth: 1
+
asyncio
rqt
rostest
- alarms
- preflight
+ gazebo_guide
+ Developing with Submodules
+
+
+Best Practices
+--------------
+Read below for the best practices for our team!
+
+.. toctree::
+ :maxdepth: 1
+
Bash Style Guide
C++ Style Guide
Python Style Guide
- Gazebo Guide
- Migrating to ROS Noetic
+
+
+MIL Tools
+---------
+Read about some of the tools used in MIL!
+
+.. toctree::
+ :maxdepth: 1
+
+ documentation_syntax
+ adding_documentation
+ alarms
+ preflight
+
+
+Hardware
+--------
+Check out some information about hardware used in MIL!
+
+.. toctree::
+ :maxdepth: 1
+
+ calibrating_cams
Installing NVIDIA Drivers for RTX 2080
- Installing Ubuntu 18.04 on an M-series Apple computer
+ zobelisk
+
+
+Miscellaneous
+-------------
+We couldn't find a good spot for this stuff lol
+
+.. toctree::
+ :maxdepth: 1
+
Resetting Ubuntu Administrative Password
Network with Vehicles and Simulation Servers <../infrastructure/network.md>
- calibrating_cams
- Developing with Submodules
Optional Extensions
Helpful Scripts
+
+Archived
+--------
+Below are documentation pages that are no longer used.
+
+.. toctree::
+ :maxdepth: 1
+
+ Installing Ubuntu 18.04 on an M-series Apple computer
+ Migrating to ROS Noetic
diff --git a/docs/subjugator/index.rst b/docs/subjugator/index.rst
index 7d60efae4..14422e0ec 100644
--- a/docs/subjugator/index.rst
+++ b/docs/subjugator/index.rst
@@ -12,6 +12,7 @@ SubjuGator 8
:maxdepth: 1
design
+ testingprocedures.md
Simulating
Enabling
Cameras
diff --git a/docs/testingprocedures.md b/docs/subjugator/testingprocedures.md
similarity index 100%
rename from docs/testingprocedures.md
rename to docs/subjugator/testingprocedures.md
diff --git a/mil_common/axros b/mil_common/axros
index a86842c93..55395aa65 160000
--- a/mil_common/axros
+++ b/mil_common/axros
@@ -1 +1 @@
-Subproject commit a86842c93370a62c0dad3fba0e6ef250137edf21
+Subproject commit 55395aa65b612ea5d57bc675a15ea0446f6be51a
diff --git a/mil_common/drivers/mil_usb_to_can/mil_usb_to_can/sub9/__init__.py b/mil_common/drivers/mil_usb_to_can/mil_usb_to_can/sub9/__init__.py
index 2501f364e..fcfe4bd5c 100644
--- a/mil_common/drivers/mil_usb_to_can/mil_usb_to_can/sub9/__init__.py
+++ b/mil_common/drivers/mil_usb_to_can/mil_usb_to_can/sub9/__init__.py
@@ -1,2 +1,2 @@
from .device import CANDeviceHandle, SimulatedCANDeviceHandle
-from .packet import AckPacket, NackPacket, Packet
+from .packet import AckPacket, ChecksumException, NackPacket, Packet
diff --git a/mil_common/drivers/mil_usb_to_can/mil_usb_to_can/sub9/packet.py b/mil_common/drivers/mil_usb_to_can/mil_usb_to_can/sub9/packet.py
index 8b123bfc9..500aa0706 100644
--- a/mil_common/drivers/mil_usb_to_can/mil_usb_to_can/sub9/packet.py
+++ b/mil_common/drivers/mil_usb_to_can/mil_usb_to_can/sub9/packet.py
@@ -4,7 +4,7 @@
from dataclasses import dataclass
from enum import Enum
from functools import lru_cache
-from typing import ClassVar, get_type_hints
+from typing import ClassVar, TypeVar, get_type_hints
SYNC_CHAR_1 = 0x37
SYNC_CHAR_2 = 0x01
@@ -12,6 +12,9 @@
_packet_registry: dict[int, dict[int, type[Packet]]] = {}
+PacketSelf = TypeVar("PacketSelf", bound="Packet")
+
+
def hexify(data: bytes) -> str:
return ":".join(f"{c:02x}" for c in data)
@@ -22,12 +25,22 @@ def get_cache_hints(cls):
class ChecksumException(OSError):
+ """
+ An invalid checksum appeared.
+ """
+
def __init__(
self,
packet: type[Packet],
received: tuple[int, int],
expected: tuple[int, int],
):
+ """
+ Attributes:
+ packet (Type[:class:`~.Packet`]): The packet with the invalid checksum.
+ received (Tuple[int, int]): The received Fletcher's checksum.
+ expected (Tuple[int, int]): The expected Fletcher's checksum.
+ """
super().__init__(
f"Invalid checksum in packet of type {packet.__qualname__}: received {received}, expected {expected}",
)
@@ -129,12 +142,18 @@ def __bytes__(self):
return data + struct.pack(" Packet:
+ def from_bytes(cls: type[PacketSelf], packed: bytes) -> PacketSelf:
"""
Constructs a packet from a packed packet in a :class:`bytes` object.
If a packet is found with the corresponding message and subclass ID,
- then an instance of that packet class will be returned, else :class:`Packet`
- will be returned.
+ then an instance (or subclass) of that packet class will be returned.
+
+ Raises:
+ ChecksumException: The checksum is invalid.
+ LookupError: No packet with the specified class and subclass IDs exist.
+
+ Returns:
+ An instance of the appropriate packet subclass.
"""
msg_id = packed[2]
subclass_id = packed[3]
@@ -150,7 +169,12 @@ def from_bytes(cls, packed: bytes) -> Packet:
cls._calculate_checksum(packed[2:-2]),
)
unpacked = struct.unpack(subclass.payload_format, payload)
- return subclass(*unpacked)
+ packet = subclass(*unpacked)
+ if not isinstance(packet, cls):
+ raise RuntimeError(
+ f"Attempted to resolve packet of type {cls.__qualname__}, but found {packet.__class__.__qualname__} for bytes: {hexify(payload)}",
+ )
+ return packet
raise LookupError(
f"Attempted to reconstruct packet with msg_id 0x{msg_id:02x} and subclass_id 0x{subclass_id:02x}, but no packet with IDs was found.",
)
diff --git a/mil_common/drivers/mil_usb_to_can/test/test_packets_sub9.py b/mil_common/drivers/mil_usb_to_can/test/test_packets_sub9.py
index ba20cd34b..7d1509800 100755
--- a/mil_common/drivers/mil_usb_to_can/test/test_packets_sub9.py
+++ b/mil_common/drivers/mil_usb_to_can/test/test_packets_sub9.py
@@ -15,6 +15,11 @@ class TestPacket(Packet, msg_id=0x47, subclass_id=0x44, payload_format="?Hd"):
example_float: float
+@dataclass
+class TestPacketTwo(Packet, msg_id=0x47, subclass_id=0x45, payload_format=""):
+ pass
+
+
class BasicApplicationPacketTest(unittest.IsolatedAsyncioTestCase):
"""
Tests basic application packet functionality.
@@ -43,6 +48,12 @@ def test_format(self):
TestPacket.from_bytes(TestPacket.__bytes__(packet)),
packet,
)
+ self.assertEqual(
+ Packet.from_bytes(TestPacket.__bytes__(packet)),
+ packet,
+ )
+ with self.assertRaises(RuntimeError):
+ TestPacketTwo.from_bytes(bytes(packet))
def test_comparisons(self):
packet = TestPacket(False, 42, 3.14)
diff --git a/mil_common/mil_missions/nodes/mission_client b/mil_common/mil_missions/nodes/mission_client
index 230547f8e..e793693cf 100755
--- a/mil_common/mil_missions/nodes/mission_client
+++ b/mil_common/mil_missions/nodes/mission_client
@@ -34,6 +34,7 @@ class MissionClientCli:
)
return
print("Available Missions:")
+ missions.sort()
for mission in missions:
print("- ", mission)
diff --git a/mil_common/perception/point_cloud_object_detection_and_recognition/src/object.cpp b/mil_common/perception/point_cloud_object_detection_and_recognition/src/object.cpp
index 48497508c..2feb13cff 100644
--- a/mil_common/perception/point_cloud_object_detection_and_recognition/src/object.cpp
+++ b/mil_common/perception/point_cloud_object_detection_and_recognition/src/object.cpp
@@ -60,7 +60,7 @@ void Object::update_msg()
// Classification field is unused, always set to empty string
msg_.classification = "";
msg_.header.frame_id = "enu";
- msg_.header.stamp = ros::Time();
+ msg_.header.stamp = ros::Time::now();
msg_.points.clear();
if (points_->empty())
diff --git a/mil_common/perception/yolov7-ros b/mil_common/perception/yolov7-ros
index dba012183..7ddd6b4f9 160000
--- a/mil_common/perception/yolov7-ros
+++ b/mil_common/perception/yolov7-ros
@@ -1 +1 @@
-Subproject commit dba012183a5d2ab1035c232fa7174debdfdf37e9
+Subproject commit 7ddd6b4f9d04fde7048ed409aa2fa97c522b9678
diff --git a/requirements.txt b/requirements.txt
index a0aa94ef3..56e4fa6a9 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -47,7 +47,7 @@ black>=24.3.0
modernize>=0.8.0 # This can be removed after Noetic Migration is complete.
# Testing
-pre-commit>=2.20.0
+pre-commit>=3.5.0
pylint>=2.15.5
# YOLOv7
diff --git a/scripts/hw/install_8bitdoo_udev b/scripts/hw/install_8bitdoo_udev
new file mode 100755
index 000000000..81f544879
--- /dev/null
+++ b/scripts/hw/install_8bitdoo_udev
@@ -0,0 +1,58 @@
+#!/bin/bash
+
+# 8bitdoo Ultimate Wireless Controller installation script, by Robert Michael Smith
+# Original source: https://gist.github.com/loopyd/a7ccbf37e27580ccbed859c26aa0a167
+
+# Block script from running as non-root user
+if [ "$EUID" -ne 0 ]; then
+ echo "Please run as root"
+ exit 1
+fi
+# Blacklist Nintendo Switch Pro Controller Driver
+echo blacklist hid_nintendo | tee /etc/modprobe.d/notendo.conf
+
+# Install xboxdrv
+apt-get update
+if ! dpkg -s 'xboxdrv' &>/dev/null; then
+ apt-get install -y xboxdrv
+fi
+
+# Create udev rules
+if [ ! -f '/etc/udev/rules.d' ]; then
+ mkdir -p '/etc/udev/rules.d'
+fi
+udev_rules='/etc/udev/rules.d/99-8bitdo-ultimate.rules'
+if [ -f $udev_rules ]; then
+ sudo rm $udev_rules
+fi
+echo 'SUBSYSTEM=="usb", ACTION=="add", ATTR{idVendor}=="2dc8", ATTR{idProduct}=="3106", ATTR{manufacturer}=="8BitDo", RUN+="/bin/systemctl start 8bitdo-ultimate-xinput@2dc8:3106"' | tee $udev_rules
+echo 'SUBSYSTEM=="usb", ACTION=="add", ATTR{idVendor}=="2dc8", ATTR{idProduct}=="3109", ATTR{manufacturer}=="8BitDo", RUN+="/bin/systemctl stop 8bitdo-ultimate-xinput@2dc8:3106"' | tee -a $udev_rules
+
+# Reload udev rules
+udevadm control --reload-rules
+udevadm trigger
+
+# Create systemd service
+service_file='/etc/systemd/system/8bitdo-ultimate-xinput@.service'
+if [ -f $service_file ]; then
+ rm $service_file
+fi
+echo '[Unit]' | tee $service_file
+echo 'Description=8BitDo Ultimate Controller XInput mode xboxdrv daemon' | tee -a $service_file
+echo '' | tee -a $service_file
+echo '[Service]' | tee -a $service_file
+echo 'Type=simple' | tee -a $service_file
+echo 'ExecStart=/usr/bin/xboxdrv --mimic-xpad --silent --type xbox360 --device-by-id %i --force-feedback --detach-kernel-driver' | tee -a $service_file
+
+systemctl daemon-reload
+
+# Now install jstest-gtk to test the controller, which also installs all the joystick dependencies
+# This fixes:
+# - /dev/input/js0 not found
+if ! dpkg -s 'jstest-gtk' &>/dev/null; then
+ apt-get install -y jstest-gtk
+fi
+
+# Now open jstest-gtk and test the controller, remap the axis / buttons, then exit
+# Save the configuration using the command:
+# sudo jscal-store /dev/input/js0
diff --git a/scripts/hardware_installers/install_bvtsdk b/scripts/hw/install_bvtsdk
similarity index 100%
rename from scripts/hardware_installers/install_bvtsdk
rename to scripts/hw/install_bvtsdk
diff --git a/scripts/hardware_installers/install_flycap b/scripts/hw/install_flycap
similarity index 100%
rename from scripts/hardware_installers/install_flycap
rename to scripts/hw/install_flycap
diff --git a/scripts/hardware_installers/install_sylphase_sonar b/scripts/hw/install_sylphase_sonar
similarity index 100%
rename from scripts/hardware_installers/install_sylphase_sonar
rename to scripts/hw/install_sylphase_sonar
diff --git a/scripts/hardware_installers/install_udev_rules b/scripts/hw/install_udev_rules
similarity index 100%
rename from scripts/hardware_installers/install_udev_rules
rename to scripts/hw/install_udev_rules
diff --git a/scripts/setup.bash b/scripts/setup.bash
index 4301a353b..fa3d442af 100755
--- a/scripts/setup.bash
+++ b/scripts/setup.bash
@@ -133,10 +133,66 @@ cm() {
fi
}
+# potentially borrowed from forrest
autopush() {
git push origin +"${1:-HEAD}":refs/heads/autopush-cameron-"$(uuidgen --random | cut -c1-8)"-citmp
}
+# uhhh maybe also borrowed from forrest
+cw() {
+ git add -u
+ git commit -m "work"
+}
+
+dmb() {
+ git diff "$(git merge-base --fork-point "$(git branch -l main master --format '%(refname:short)')" HEAD)"
+}
+
+subnet_ip() {
+ ifconfig | grep -Eo 'inet (addr:)?([0-9]*\.){3}[0-9]*' | grep -Eo '([0-9]*\.){2}37\.[0-9]*' | grep -v '127.0.0.1'
+}
+
+rosdisconnect() {
+ unset ROS_IP
+ unset ROS_MASTER_URI
+ echo "Disconnected! New values:"
+ echo "ROS_IP=$ROS_IP"
+ echo "ROS_MASTER_URI=$ROS_MASTER_URI"
+}
+
+rosconnect() {
+ # --no-subnet flag to avoid checking for the subnet
+ # Usage: rosconnect --no-subnet
+ if [[ $1 == "--no-subnet" ]]; then
+ if [[ -z $2 || -z $3 ]]; then
+ echo "Usage: rosconnect --no-subnet "
+ return
+ fi
+ export ROS_IP=${2}
+ export ROS_MASTER_URI="http://${3}:11311"
+ echo "ROS_IP=$ROS_IP"
+ echo "ROS_MASTER_URI=$ROS_MASTER_URI"
+ return
+ fi
+ if [[ -n $(subnet_ip) ]]; then
+ my_ip=$(subnet_ip)
+ export ROS_IP=$my_ip
+ export ROS_MASTER_URI="http://${1}:11311"
+ echo "ROS_IP=$ROS_IP"
+ echo "ROS_MASTER_URI=$ROS_MASTER_URI"
+ else
+ echo "No 37 subnet IP found, not setting ROS_IP or ROS_MASTER_URI"
+ fi
+}
+
+rosnavconnect() {
+ rosconnect "192.168.37.82"
+}
+
+rossubconnect() {
+ rosconnect "192.168.37.60"
+}
+
alias xbox=startxbox
# PYTHONPATH modifications