From eb2ed6cc519d12775cf214bc85b508434997bc8d Mon Sep 17 00:00:00 2001 From: Tropical <42101043+Tropix126@users.noreply.github.com> Date: Fri, 29 Mar 2024 11:04:06 -0500 Subject: [PATCH] feat: Vision API, imu refactors --- packages/vex-devices/Cargo.toml | 1 + packages/vex-devices/src/smart/imu.rs | 191 ++--- packages/vex-devices/src/smart/vision.rs | 917 +++++++++++++++++++---- 3 files changed, 835 insertions(+), 274 deletions(-) diff --git a/packages/vex-devices/Cargo.toml b/packages/vex-devices/Cargo.toml index 090ebcb0..d919a490 100644 --- a/packages/vex-devices/Cargo.toml +++ b/packages/vex-devices/Cargo.toml @@ -27,6 +27,7 @@ snafu = { version = "0.8.0", default-features = false, features = [ "rust_1_61", "unstable-core-error", ] } +mint = "0.5.9" no_std_io = { version = "0.6.0", features = ["alloc"] } bitflags = "2.4.2" diff --git a/packages/vex-devices/src/smart/imu.rs b/packages/vex-devices/src/smart/imu.rs index e0a421c6..640add52 100644 --- a/packages/vex-devices/src/smart/imu.rs +++ b/packages/vex-devices/src/smart/imu.rs @@ -1,16 +1,17 @@ //! Inertial sensor (IMU) device. use core::{ - pin::Pin, - task::{Context, Poll}, - time::Duration, + marker::PhantomData, pin::Pin, task::{Context, Poll}, time::Duration }; use bitflags::bitflags; use pros_core::{error::PortError, time::Instant}; use snafu::Snafu; use vex_sdk::{ - vexDeviceGetByIndex, vexDeviceImuAttitudeGet, vexDeviceImuDataRateSet, vexDeviceImuDegreesGet, vexDeviceImuHeadingGet, vexDeviceImuQuaternionGet, vexDeviceImuRawAccelGet, vexDeviceImuRawGyroGet, vexDeviceImuReset, vexDeviceImuStatusGet, V5ImuOrientationMode, V5_DeviceImuAttitude, V5_DeviceImuQuaternion, V5_DeviceImuRaw + vexDeviceGetByIndex, vexDeviceImuAttitudeGet, vexDeviceImuDataRateSet, vexDeviceImuDegreesGet, + vexDeviceImuHeadingGet, vexDeviceImuQuaternionGet, vexDeviceImuRawAccelGet, + vexDeviceImuRawGyroGet, vexDeviceImuReset, vexDeviceImuStatusGet, V5ImuOrientationMode, + V5_DeviceImuAttitude, V5_DeviceImuQuaternion, V5_DeviceImuRaw, }; use super::{validate_port, SmartDevice, SmartDeviceInternal, SmartDeviceType, SmartPort}; @@ -21,7 +22,6 @@ pub struct InertialSensor { port: SmartPort, rotation_offset: f64, heading_offset: f64, - euler_offset: Euler, } impl InertialSensor { @@ -43,7 +43,6 @@ impl InertialSensor { port, rotation_offset: 0.0, heading_offset: 0.0, - euler_offset: Euler::default(), } } @@ -60,9 +59,13 @@ impl InertialSensor { pub fn status(&self) -> Result { self.validate_port()?; - Ok(InertialStatus::from_bits_retain(unsafe { - vexDeviceImuStatusGet(self.device_handle()) - })) + let bits = unsafe { vexDeviceImuStatusGet(self.device_handle()) }; + + if bits == InertialStatus::STATUS_ERROR { + return Err(InertialError::BadStatus); + } + + Ok(InertialStatus::from_bits_retain(bits)) } /// Check if the Intertial Sensor is currently calibrating. @@ -111,7 +114,7 @@ impl InertialSensor { } /// Get a quaternion representing the Inertial Sensor’s orientation. - pub fn quaternion(&self) -> Result { + pub fn quaternion(&self) -> Result, InertialError> { self.validate()?; let mut data = V5_DeviceImuQuaternion::default(); @@ -119,11 +122,14 @@ impl InertialSensor { vexDeviceImuQuaternionGet(self.device_handle(), &mut data); } - Ok(data.into()) + Ok(mint::Quaternion { + v: mint::Vector3 { x: data.a, y: data.b, z: data.c }, + s: data.d, + }) } - /// Get the Euler angles (yaw, pitch, roll) representing the Inertial Sensor’s orientation. - pub fn euler(&self) -> Result { + /// Get the Euler angles (pitch, yaw, roll) representing the Inertial Sensor’s orientation. + pub fn euler(&self) -> Result, InertialError> { self.validate()?; let mut data = V5_DeviceImuAttitude::default(); @@ -131,11 +137,16 @@ impl InertialSensor { vexDeviceImuAttitudeGet(self.device_handle(), &mut data); } - Ok(data.into()) + Ok(mint::EulerAngles { + a: data.pitch.to_radians(), + b: data.yaw.to_radians(), + c: data.roll.to_radians(), + marker: PhantomData + }) } /// Get the Inertial Sensor’s raw gyroscope values. - pub fn gyro_rate(&self) -> Result { + pub fn gyro_rate(&self) -> Result, InertialError> { self.validate()?; let mut data = V5_DeviceImuRaw::default(); @@ -143,11 +154,17 @@ impl InertialSensor { vexDeviceImuRawGyroGet(self.device_handle(), &mut data); } - Ok(data.into()) + Ok(mint::Vector3 { + x: data.x, + y: data.y, + z: data.z, + // NOTE: data.w is unused in the SDK. + // See: + }) } /// Get the Inertial Sensor’s raw accelerometer values. - pub fn accel(&self) -> Result { + pub fn accel(&self) -> Result, InertialError> { self.validate()?; let mut data = V5_DeviceImuRaw::default(); @@ -155,7 +172,13 @@ impl InertialSensor { vexDeviceImuRawAccelGet(self.device_handle(), &mut data); } - Ok(data.into()) + Ok(mint::Vector3 { + x: data.x, + y: data.y, + z: data.z, + // NOTE: data.w is unused in the SDK. + // See: + }) } /// Resets the current reading of the Inertial Sensor’s heading to zero. @@ -168,38 +191,6 @@ impl InertialSensor { self.set_rotation(Default::default()) } - /// Reset all 3 euler values of the Inertial Sensor to 0. - pub fn reset_euler(&mut self) -> Result<(), InertialError> { - self.set_euler(Default::default()) - } - - /// Resets all values of the Inertial Sensor to 0. - pub fn reset(&mut self) -> Result<(), InertialError> { - self.reset_heading()?; - self.reset_rotation()?; - self.reset_euler()?; - - Ok(()) - } - - /// Sets the current reading of the Inertial Sensor’s euler values to target euler values. - pub fn set_euler(&mut self, euler: Euler) -> Result<(), InertialError> { - self.validate()?; - - let mut attitude = V5_DeviceImuAttitude::default(); - unsafe { - vexDeviceImuAttitudeGet(self.device_handle(), &mut attitude); - } - - self.euler_offset = Euler { - yaw: euler.yaw - attitude.yaw, - pitch: euler.pitch - attitude.pitch, - roll: euler.roll - attitude.roll, - }; - - Ok(()) - } - /// Sets the current reading of the Inertial Sensor’s rotation to target value. pub fn set_rotation(&mut self, rotation: f64) -> Result<(), InertialError> { self.validate()?; @@ -222,7 +213,7 @@ impl InertialSensor { /// Sets the update rate of the IMU. /// - /// This duration must be above [`Self::MIN_DATA_RATE`] (5 milliseconds). + /// This duration should be above [`Self::MIN_DATA_RATE`] (5 milliseconds). pub fn set_data_rate(&mut self, data_rate: Duration) -> Result<(), InertialError> { self.validate()?; @@ -243,83 +234,6 @@ impl SmartDevice for InertialSensor { } } -/// Standard quaternion consisting of a vector defining an axis of rotation -/// and a rotation value about the axis. -#[derive(Default, Debug, Clone, Copy, PartialEq)] -pub struct Quaternion { - /// The x-component of the axis of rotation. - pub x: f64, - - /// The y-component of the axis of rotation. - pub y: f64, - - /// The z-component of the axis of rotation. - pub z: f64, - - /// The magnitude of rotation about the axis. - pub w: f64, -} - -impl From for Quaternion { - fn from(value: V5_DeviceImuQuaternion) -> Self { - Self { - x: value.a, - y: value.b, - z: value.c, - w: value.d, - } - } -} - -/// A 3-axis set of euler angles. -#[derive(Default, Debug, Clone, Copy, PartialEq)] -pub struct Euler { - /// The angle measured along the pitch axis. - pub pitch: f64, - - /// The angle measured along the roll axis. - pub roll: f64, - - /// The angle measured along the yaw axis. - pub yaw: f64, -} - -impl From for Euler { - fn from(value: V5_DeviceImuAttitude) -> Self { - Self { - pitch: value.pitch, - roll: value.roll, - yaw: value.yaw, - } - } -} - -/// Represents raw data reported by the IMU. -/// -/// This is effectively a 3D vector containing either angular velocity or -/// acceleration values depending on the type of data requested.. -#[derive(Default, Debug, Clone, Copy, PartialEq)] -pub struct InertialRaw { - /// The x component of the raw data. - pub x: f64, - - /// The y component of the raw data. - pub y: f64, - - /// The z component of the raw data. - pub z: f64, -} - -impl From for InertialRaw { - fn from(value: V5_DeviceImuRaw) -> Self { - Self { - x: value.x, - y: value.y, - z: value.z, - } - } -} - /// Represents one of six possible physical IMU orientations relative /// to the earth's center of gravity. #[derive(Debug, Clone, Copy, Eq, PartialEq)] @@ -369,6 +283,10 @@ bitflags! { } impl InertialStatus { + /// The return value of [`vexDeviceImuStatusGet`] when the device fails to report its + /// status bits. + pub const STATUS_ERROR: u32 = 0xFF; + /// Returns the physical orientation of the sensor measured at calibration. pub fn physical_orientation(&self) -> InertialOrientation { match (self.bits() >> 1) & 0b111 { @@ -435,7 +353,16 @@ impl core::future::Future for InertialCalibrateFuture { return Poll::Ready(Err(InertialError::Port { source: err })); } else { // Get status flags from vexos. - unsafe { vexDeviceImuStatusGet(vexDeviceGetByIndex((port - 1) as u32)) } + let flags = unsafe { + vexDeviceImuStatusGet(vexDeviceGetByIndex((port - 1) as u32)) + }; + + // 0xFF is returned when the sensor fails to report flags. + if flags == InertialStatus::STATUS_ERROR { + return Poll::Ready(Err(InertialError::BadStatus)); + } + + flags }, ); @@ -462,10 +389,12 @@ impl core::future::Future for InertialCalibrateFuture { #[derive(Debug, Snafu)] /// Errors that can occur when interacting with an Inertial Sensor. pub enum InertialError { - /// The inertial sensor took longer than three seconds to calibrate. + /// The sensor took longer than three seconds to calibrate. CalibrationTimedOut, - /// The inertial is still calibrating. + /// The sensor is still calibrating. StillCalibrating, + /// The sensor failed to report its status flags (returned 0xFF). + BadStatus, #[snafu(display("{source}"), context(false))] /// Generic port related error. Port { diff --git a/packages/vex-devices/src/smart/vision.rs b/packages/vex-devices/src/smart/vision.rs index ad27ce5e..5861e6c8 100644 --- a/packages/vex-devices/src/smart/vision.rs +++ b/packages/vex-devices/src/smart/vision.rs @@ -1,127 +1,413 @@ -//! Vision sensor device. +//! Vision sensor device module. //! -//! Vision sensors take in a zero point at creation. +//! This module provides an interface for interacting with the VEX Vision Sensor. +//! +//! # Hardware Overview +//! +//! The VEX Vision Sensor is a device powered by an ARM Cortex M4 and Cortex M0 coprocessor +//! with a color camera for the purpose of performing object recognition. The sensor can be +//! trained to locate objects by color. The camera module itself is very similar internally +//! to the Pixy2 camera, and performs its own onboard image processing. Manually processing +//! raw image data from the sensor is not currently possible. +//! +//! Every 200 milliseconds, the camera provides a list of the objects found matching up +//! to seven unique [`VisionSignature`]s. The object’s height, width, and location is provided. +//! Multi-colored objects may also be programmed through the use of [`VisionCode`]s. +//! +//! The Vision Sensor has USB for a direct connection to a computer, where it can be configured +//! using VEX's proprietary vision utility tool to generate color signatures. The Vision Sensor +//! also has WiFi Direct and can act as web server, allowing a live video feed of the camera +//! from any computer equipped with a browser and WiFi. extern crate alloc; + use alloc::vec::Vec; +use core::time::Duration; -use pros_core::{bail_errno, bail_on, error::PortError, map_errno}; -use pros_sys::{PROS_ERR, VISION_OBJECT_ERR_SIG}; +use pros_core::error::PortError; use snafu::Snafu; +use vex_sdk::{ + vexDeviceVisionBrightnessGet, vexDeviceVisionBrightnessSet, vexDeviceVisionLedColorGet, + vexDeviceVisionLedColorSet, vexDeviceVisionLedModeGet, vexDeviceVisionLedModeSet, + vexDeviceVisionModeGet, vexDeviceVisionModeSet, vexDeviceVisionObjectCountGet, + vexDeviceVisionObjectGet, vexDeviceVisionSignatureGet, vexDeviceVisionSignatureSet, + vexDeviceVisionWhiteBalanceGet, vexDeviceVisionWhiteBalanceModeGet, + vexDeviceVisionWhiteBalanceModeSet, vexDeviceVisionWhiteBalanceSet, vexDeviceVisionWifiModeGet, + vexDeviceVisionWifiModeSet, V5VisionBlockType, V5VisionLedMode, V5VisionMode, V5VisionWBMode, + V5VisionWifiMode, V5_DeviceVisionObject, V5_DeviceVisionRgb, V5_DeviceVisionSignature, +}; -use super::{SmartDevice, SmartDeviceType, SmartPort}; +use super::{SmartDevice, SmartDeviceInternal, SmartDeviceType, SmartPort}; use crate::color::Rgb; -/// Represents a vision sensor plugged into the vex. +/// VEX Vision Sensor +/// +/// This struct represents a vision sensor plugged into a smart port. #[derive(Debug, Eq, PartialEq)] pub struct VisionSensor { port: SmartPort, + codes: Vec, } impl VisionSensor { - /// Creates a new vision sensor. - pub fn new(port: SmartPort, zero: VisionZeroPoint) -> Result { - unsafe { - bail_on!( - PROS_ERR, - pros_sys::vision_set_zero_point(port.index(), zero as _) - ); - } + /// The horizontal resolution of the vision sensor. + /// + /// This value is based on the `VISION_FOV_WIDTH` macro constant in PROS. + pub const RESOLUTION_WIDTH: u16 = 316; + + /// The vertical resolution of the vision sensor. + /// + /// This value is based on the `VISION_FOV_HEIGHT` msacro constant in PROS. + pub const RESOLUTION_HEIGHT: u16 = 212; + + /// The update rate of the vision sensor. + pub const UPDATE_RATE: Duration = Duration::from_millis(50); + + /// Creates a new vision sensor on a smart port. + /// + /// # Examples + /// + /// ``` + /// // Register a vision sensor on port 1. + /// let mut sensor = VisionSensor::new(peripherals.port_1); + /// ``` + pub fn new(port: SmartPort, mode: VisionMode) -> Result { + let mut sensor = Self { + port, + codes: Vec::new(), + }; + + sensor.set_mode(mode)?; - Ok(Self { port }) + Ok(sensor) } - /// Returns the nth largest object seen by the camera. - pub fn nth_largest_object(&self, n: u32) -> Result { - unsafe { pros_sys::vision_get_by_size(self.port.index(), n).try_into() } + /// Adds a detection signature to the sensor's onboard memory. This signature will be used to + /// identify objects when using [`VisionSensor::objects`]. + /// + /// The sensor can store up to 7 unique signatures, with each signature slot denoted by the + /// [`VisionSignature::id`] field. If a signature with an ID matching an existing signature + /// on the sensor is added, then the existing signature will be overwritten with the new one. + /// + /// # Volatile Memory + /// + /// The memory on the Vision Sensor is *volatile* and will therefore be wiped when the sensor + /// loses power. As a result, this function should be called every time the sensor is used on + /// program start. + pub fn set_signature(&mut self, id: u8, signature: VisionSignature) -> Result<(), VisionError> { + if !(1..7).contains(&id) { + return Err(VisionError::InvalidId); + } + + self.validate_port()?; + + let mut signature = V5_DeviceVisionSignature { + id, + uMin: signature.u_threshold.0, + uMean: signature.u_threshold.1, + uMax: signature.u_threshold.2, + vMin: signature.v_threshold.0, + vMean: signature.v_threshold.1, + vMax: signature.v_threshold.2, + range: signature.range, + mType: if self + .codes + .into_iter() + .any(|code| code.contains_signature(id)) + { + V5VisionBlockType::kVisionTypeColorCode + } else { + V5VisionBlockType::kVisionTypeNormal + } as _, + ..Default::default() + }; + + unsafe { vexDeviceVisionSignatureSet(self.device_handle(), &mut signature) } + + Ok(()) } - /// Returns a list of all objects in order of size (largest to smallest). - pub fn objects(&self) -> Result, VisionError> { - let obj_count = self.num_objects()?; - let mut objects_buf = Vec::with_capacity(obj_count); + fn raw_signature(&self, id: u8) -> Result { + if !(1..7).contains(&id) { + return Err(VisionError::InvalidId); + } - unsafe { - pros_sys::vision_read_by_size( - self.port.index(), - 0, - obj_count as _, - objects_buf.as_mut_ptr(), - ); + let mut raw_signature = V5_DeviceVisionSignature::default(); + let read_operation = unsafe { + vexDeviceVisionSignatureGet(self.device_handle(), id as u32, &mut raw_signature) + }; + + // pad[0] is actually an undocumented flags field on V5_DeviceVisionSignature. If the sensor returns + // no flags, then it has failed to send data back. + // + // TODO: Make sure this is correct and not the PROS docs being wrong here. + // + // We also check that the read operation succeeded from the return of vexDeviceVisionSignatureGet. + if !read_operation || raw_signature.pad[0] == 0 { + return Err(VisionError::ReadingFailed); } - bail_errno!(); + Ok(raw_signature) + } + + fn set_signature_type(&mut self, id: u8, sig_type: u32) -> Result<(), VisionError> { + let mut raw_sig = self.raw_signature(id)?; + + raw_sig.mType = sig_type; + + unsafe { vexDeviceVisionSignatureSet(self.device_handle(), &mut raw_sig) } + + Ok(()) + } + + /// Get a signature from the sensor's onboard volatile memory. + pub fn signature(&self, id: u8) -> Result { + self.validate_port()?; - Ok(objects_buf - .into_iter() - .filter_map(|object| object.try_into().ok()) - .collect()) + Ok(self.raw_signature(id)?.into()) } - /// Returns the number of objects seen by the camera. - pub fn num_objects(&self) -> Result { + /// Registers a color code to the sensor's onboard memory. This code will be used to identify objects + /// when using [`VisionSensor::objects`]. + /// + /// Color codes are effectively "signature groups" that the sensor will use to identify objects + /// containing the color of their signatures next to each other. + /// + /// # Volatile Memory + /// + /// The onboard memory of the Vision Sensor is *volatile* and will therefore be wiped when the + /// sensor loses its power source. As a result, this function should be called every time the + /// sensor is used on program start. + pub fn add_code(&mut self, code: impl Into) -> Result<(), VisionError> { + self.validate_port()?; + + let device = self.device_handle(); + + let code = code.into(); + unsafe { - Ok(bail_on!( - PROS_ERR, - pros_sys::vision_get_object_count(self.port.index()) - ) - .try_into() - .unwrap()) + self.set_signature_type(code.0, V5VisionBlockType::kVisionTypeColorCode as _)?; + self.set_signature_type(code.1, V5VisionBlockType::kVisionTypeColorCode as _)?; + if let Some(sig_3) = code.2 { + self.set_signature_type(sig_3, V5VisionBlockType::kVisionTypeColorCode as _)?; + } + if let Some(sig_4) = code.3 { + self.set_signature_type(sig_4, V5VisionBlockType::kVisionTypeColorCode as _)?; + } + if let Some(sig_5) = code.4 { + self.set_signature_type(sig_5, V5VisionBlockType::kVisionTypeColorCode as _)?; + } } + + self.codes.push(code); + + Ok(()) } - /// Get the current exposure percentage of the vision sensor. The returned result should be within 0.0 to 1.5. - pub fn exposure(&self) -> f32 { - unsafe { (pros_sys::vision_get_exposure(self.port.index()) as f32) * 1.5 / 150.0 } + /// Get the current brightness setting of the vision sensor as a percentage. + /// + /// The returned result should be from `0.0` (0%) to `1.0` (100%). + pub fn brightness(&self) -> Result { + self.validate_port()?; + + // SDK function gives us brightness percentage 0-100. + Ok(unsafe { vexDeviceVisionBrightnessGet(self.device_handle()) } as f64 / 100.0) } - /// Get the current white balance of the vision sensor. - pub fn current_white_balance(&self) -> Rgb { - unsafe { (pros_sys::vision_get_white_balance(self.port.index()) as u32).into() } + /// Get the current white balance of the vision sensor as an RGB color. + pub fn white_balance(&self) -> Result { + self.validate_port()?; + + let handle = self.device_handle(); + + Ok( + match unsafe { vexDeviceVisionWhiteBalanceModeGet(handle) } { + V5VisionWBMode::kVisionWBNormal => WhiteBalance::Auto, + V5VisionWBMode::kVisionWBStart => WhiteBalance::StartupAuto, + V5VisionWBMode::kVisionWBManual => { + WhiteBalance::Manual(unsafe { vexDeviceVisionWhiteBalanceGet(handle) }.into()) + } + }, + ) } - /// Sets the exposure percentage of the vision sensor. Should be between 0.0 and 1.5. - pub fn set_exposure(&mut self, exposure: f32) { - unsafe { - pros_sys::vision_set_exposure(self.port.index(), (exposure * 150.0 / 1.5) as u8); - } + /// Sets the brightness percentage of the vision sensor. Should be between 0.0 and 1.0. + pub fn set_brightness(&mut self, brightness: f64) -> Result<(), VisionError> { + self.validate_port()?; + + unsafe { vexDeviceVisionBrightnessSet(self.device_handle(), (brightness * 100.0) as u8) } + + Ok(()) } /// Sets the white balance of the vision sensor. - pub fn set_white_balance(&mut self, white_balance: WhiteBalance) { - unsafe { - match white_balance { - WhiteBalance::Auto => pros_sys::vision_set_auto_white_balance(self.port.index(), 1), - WhiteBalance::Rgb(rgb) => { - // Turn off automatic white balance - pros_sys::vision_set_auto_white_balance(self.port.index(), 0); - pros_sys::vision_set_white_balance( - self.port.index(), - >::into(rgb) as i32, + /// + /// White balance can be either automatically set or manually set through an RGB color. + pub fn set_white_balance(&mut self, white_balance: WhiteBalance) -> Result<(), VisionError> { + self.validate_port()?; + + unsafe { vexDeviceVisionWhiteBalanceModeSet(self.device_handle(), white_balance.into()) } + + if let WhiteBalance::Manual(rgb) = white_balance { + unsafe { + vexDeviceVisionWhiteBalanceSet( + self.device_handle(), + V5_DeviceVisionRgb { + red: rgb.red(), + green: rgb.green(), + blue: rgb.blue(), + + // Pretty sure this field does nothing, but PROS sets it to this. + // + // TODO: Run some hardware tests to see if this value actually influences + // white balance. Based on the Pixy2 API, I doubt it and bet this is just + // here for the LED setter, which uses the same type. + brightness: 255, + }, + ) + } + } + + Ok(()) + } + + /// Configure the behavior of the LED indicator on the sensor. + /// + /// The default behavior is represented by [`LedMode::Auto`], which will display the color of the most prominent + /// detected object's signature color. Alternatively, the LED can be configured to display a single RGB color. + pub fn set_led_mode(&mut self, mode: LedMode) -> Result<(), VisionError> { + self.validate_port()?; + + unsafe { vexDeviceVisionLedModeSet(self.device_handle(), mode.into()) } + + if let LedMode::Manual(rgb, brightness) = mode { + unsafe { + vexDeviceVisionLedColorSet( + self.device_handle(), + V5_DeviceVisionRgb { + red: rgb.red(), + green: rgb.green(), + blue: rgb.blue(), + brightness: (brightness * 100.0) as u8, + }, + ) + } + } + + Ok(()) + } + + /// Get the user-set behavior of the LED indicator on the sensor. + pub fn led_mode(&self) -> Result { + self.validate_port()?; + + Ok( + match unsafe { vexDeviceVisionLedModeGet(self.device_handle()) } { + V5VisionLedMode::kVisionLedModeAuto => LedMode::Auto, + V5VisionLedMode::kVisionLedModeManual => { + let led_color = unsafe { vexDeviceVisionLedColorGet(self.device_handle()) }; + + LedMode::Manual( + Rgb::new(led_color.red, led_color.green, led_color.blue), + led_color.brightness as f64 / 100.0, ) } - }; + }, + ) + } + + /// Returns a [`Vec`] of objects detected by the sensor. + pub fn objects(&self) -> Result, VisionError> { + if self.mode()? == VisionMode::Wifi { + return Err(VisionError::WifiMode); + } + + let device = self.device_handle(); + + let object_count = unsafe { vexDeviceVisionObjectCountGet(device) } as usize; + let mut objects = Vec::with_capacity(object_count); + + for i in 0..object_count { + let mut object = V5_DeviceVisionObject::default(); + + if unsafe { vexDeviceVisionObjectGet(device, i as u32, &mut object) } == 0 { + return Err(VisionError::ReadingFailed); + } + + let object: VisionObject = object.into(); + + match object.source { + DetectionSource::Signature(_) | DetectionSource::Line => { + objects.push(object); + } + DetectionSource::Code(code) => { + if self.codes.contains(&code) { + objects.push(object); + } + } + } } + + Ok(objects) + } + + /// Returns the number of objects detected by the sensor. + pub fn object_count(&self) -> Result { + // NOTE: We actually can't rely on [`vexDeviceVisionObjectCountGet`], due to the way that + // vision codes are registered. + // + // When a code is registered, all this really does is set a bunch of normal signatures with + // an additional flag set (see: [`Self::set_code_signature`]). This means that if the user + // has multiple vision codes, we can't distinguish between which objects were detected by + // a certain code until AFTER we get the full objects list (where we can then distinguish) + // by [`VisionObject::source`]. + Ok(self.objects()?.len()) } - /// Sets the point that object positions are relative to, in other words where (0, 0) is or the zero point. - pub fn set_zero_point(&mut self, zero: VisionZeroPoint) { + /// Sets the vision sensor's detection mode. See [`VisionMode`] for more information on what + /// each mode does. + pub fn set_mode(&self, mode: VisionMode) -> Result<(), VisionError> { + self.validate_port()?; + + let device = self.device_handle(); + unsafe { - pros_sys::vision_set_zero_point(self.port.index(), zero as _); + vexDeviceVisionWifiModeSet( + device, + match mode { + VisionMode::Wifi => V5VisionWifiMode::kVisionWifiModeOn, + _ => V5VisionWifiMode::kVisionWifiModeOff, + }, + ); + + vexDeviceVisionModeSet( + device, + match mode { + VisionMode::ColorDetection => V5VisionMode::kVisionModeNormal, + VisionMode::LineDetection => V5VisionMode::kVisionModeLineDetect, + VisionMode::MixedDetection => V5VisionMode::kVisionModeMixed, + // If the user requested WiFi mode, then we already set + // it around 14 lines ago, so there's nothing to do here. + VisionMode::Wifi => return Ok(()), + VisionMode::Test => V5VisionMode::kVisionTypeTest, + }, + ); } + + Ok(()) } - /// Sets the color of the led. - pub fn set_led(&mut self, mode: LedMode) { - unsafe { - match mode { - LedMode::Off => pros_sys::vision_clear_led(self.port.index()), - LedMode::On(rgb) => pros_sys::vision_set_led( - self.port.index(), - >::into(rgb) as i32, - ), - }; + /// Gets the current detection mode that the sensor is in. + pub fn mode(&self) -> Result { + self.validate_port()?; + + let device = self.device_handle(); + + if unsafe { vexDeviceVisionWifiModeGet(device) } == V5VisionWifiMode::kVisionWifiModeOn { + return Ok(VisionMode::Wifi); } + + Ok(unsafe { vexDeviceVisionModeGet(device) }.into()) } } @@ -135,95 +421,440 @@ impl SmartDevice for VisionSensor { } } -//TODO: figure out how coordinates are done. +/// A vision detection color signature. +/// +/// Vision signatures contain information used by the vision sensor to detect objects of a certain +/// color. These signatures are typically generated through VEX's vision utility tool rather than +/// written by hand. For creating signatures using the utility, see [`from_utility`]. +/// +/// [`from_utility`]: VisionSignature::from_utility +/// +/// # Format & Detection Overview +/// +/// Vision signatures operate in a version of the Y'UV color space, specifically using the "U" and "V" +/// chroma components for edge detection purposes. This can be seen in the `u_threshold` and +/// `v_threshold` fields of this struct. These fields place three "threshold" (min, max, mean) +/// values on the u and v chroma values detected by the sensor. The values are then transformed to a +/// 3D lookup table to detect actual colors. +/// +/// There is additionally a `range` field, which works as a scale factor or threshold for how lenient +/// edge detection should be. +/// +/// Signatures can additionally be grouped together into [`VisionCode`]s, which narrow the filter for +/// object detection by requiring two colors. +#[derive(Debug, Copy, Clone, PartialEq)] +pub struct VisionSignature { + /// The (min, max, mean) values on the "U" axis. + /// + /// This defines a threshold of values for the sensor to match against a certain chroma in the + /// Y'UV color space - speciailly on the U component. + pub u_threshold: (i32, i32, i32), + + /// The (min, max, mean) values on the V axis. + /// + /// This defines a threshold of values for the sensor to match against a certain chroma in the + /// Y'UV color space - speciailly on the "V" component. + pub v_threshold: (i32, i32, i32), + + /// The signature range scale factor. + /// + /// This value effectively serves as a threshold for how lenient the sensor should be + /// when detecting the edges of colors. This value ranges from 0-11 in Vision Utility. + /// + /// Higher values of `range` will increase the range of brightness that the sensor will + /// consider to be part of the signature. Lighter/Darker shades of the signature's color + /// will be detected more often. + pub range: f32, + + /// The signature's flags. + pub flags: u8, +} + +impl VisionSignature { + /// Create a [`VisionSignature`]. + /// + /// # Examples + pub const fn new( + u_threshold: (i32, i32, i32), + v_threshold: (i32, i32, i32), + range: f32, + ) -> Self { + Self { + flags: 0, + u_threshold, + v_threshold, + range, + } + } + + /// Create a [`VisionSignature`] using the same format as VEX's Vision Utility tool. + /// + /// # Panics + /// + /// Panics if the provided `id` is equal to 0. Signature IDs are internally stored as + /// [`NonZeroU8`], and the IDs given by Vision Utility should always be from 1-7. + /// + /// # Examples + /// + /// ```` + /// // Register a signature for detecting red objects. + /// // This numbers in this signature was generated using VEX's vision utility app. + /// let my_signature = + /// VisionSignature::from_utility(1, 10049, 11513, 10781, -425, 1, -212, 4.1, 0); + /// ```` + pub const fn from_utility( + _id: u8, // We don't store IDs in our vision signatures. + u_min: i32, + u_max: i32, + u_mean: i32, + v_min: i32, + v_max: i32, + v_mean: i32, + range: f32, + _signature_type: u32, // This is handled automatically by [`VisionSensor::add_code`]. + ) -> Self { + Self { + u_threshold: (u_min, u_max, u_mean), + v_threshold: (v_min, v_max, v_mean), + range, + flags: Default::default(), + } + } +} + +impl From for VisionSignature { + fn from(value: V5_DeviceVisionSignature) -> Self { + Self { + u_threshold: (value.uMin, value.uMax, value.uMean), + v_threshold: (value.vMin, value.vMax, value.vMean), + range: value.range, + flags: value.flags, + } + } +} + +/// A vision detection code. +/// +/// Codes are a special type of detection signature that group multiple [`VisionSignature`]s +/// together. A [`VisionCode`] can associate 2-5 color signatures together, detecting the resulting object +/// when its color signatures are present close to each other. +/// +/// These codes work very similarly to [Pixy2 Color Codes](https://docs.pixycam.com/wiki/doku.php?id=wiki:v2:using_color_codes). +#[derive(Debug, Copy, Clone, Eq, PartialEq)] +pub struct VisionCode( + pub u8, + pub u8, + pub Option, + pub Option, + pub Option, +); + +impl VisionCode { + /// Creates a new vision code. + /// + /// Two signatures are required to create a vision code, with an additional three + /// optional signatures. + pub const fn new( + sig_1: u8, + sig_2: u8, + sig_3: Option, + sig_4: Option, + sig_5: Option, + ) -> Self { + Self(sig_1, sig_2, sig_3, sig_4, sig_4) + } + + /// Creates a [`VisionCode`] from a bit representation of its signature IDs. + pub const fn from_id(id: u16) -> Self { + const MASK: u16 = (1 << 3) - 1; + + Self( + ((id >> 12) & MASK) as u8, + ((id >> 9) & MASK) as u8, + match ((id >> 6) & MASK) as u8 { + 0 => None, + sig => Some(sig), + }, + match ((id >> 3) & MASK) as u8 { + 0 => None, + sig => Some(sig), + }, + match (id & MASK) as u8 { + 0 => None, + sig => Some(sig), + }, + ) + } + + /// Returns `true` if a given signature ID is stored in this code. + pub const fn contains_signature(&self, id: u8) -> bool { + if self.0 == id || self.1 == id { + return true; + } + + if let Some(sig_3) = self.2 { + if sig_3 == id { + return true; + } + } + if let Some(sig_4) = self.3 { + if sig_4 == id { + return true; + } + } + if let Some(sig_5) = self.4 { + if sig_5 == id { + return true; + } + } + + return false; + } + + /// Returns the internal ID used by the sensor to determine which signatures + /// belong to which code. + pub const fn id(&self) -> u16 { + let mut id: u16 = 0; + + id = (id << 3) | self.0 as u16; + id = (id << 3) | self.1 as u16; + id = (id << 3) | self.2.unwrap_or_default() as u16; + id = (id << 3) | self.3.unwrap_or_default() as u16; + id = (id << 3) | self.4.unwrap_or_default() as u16; + + id + } +} + +impl From<(u8, u8)> for VisionCode { + /// Convert a tuple of two [`VisionSignatures`] into a [`VisionCode`]. + fn from(signatures: (u8, u8)) -> Self { + Self(signatures.0, signatures.1, None, None, None) + } +} + +impl From<(u8, u8, u8)> for VisionCode { + /// Convert a tuple of three [`VisionSignatures`] into a [`VisionCode`]. + fn from(signatures: (u8, u8, u8)) -> Self { + Self(signatures.0, signatures.1, Some(signatures.2), None, None) + } +} + +impl From<(u8, u8, u8, u8)> for VisionCode { + /// Convert a tuple of four [`VisionSignatures`] into a [`VisionCode`]. + fn from(signatures: (u8, u8, u8, u8)) -> Self { + Self( + signatures.0, + signatures.1, + Some(signatures.2), + Some(signatures.3), + None, + ) + } +} + +impl From<(u8, u8, u8, u8, u8)> for VisionCode { + /// Convert a tuple of five [`VisionSignatures`] into a [`VisionCode`]. + fn from(signatures: (u8, u8, u8, u8, u8)) -> Self { + Self( + signatures.0, + signatures.1, + Some(signatures.2), + Some(signatures.3), + Some(signatures.4), + ) + } +} + #[derive(Default, Debug, Clone, Copy, PartialEq, Eq)] -/// An object detected by the vision sensor -pub struct VisionObject { - /// The offset from the top of the object to the vision center. - pub top: i16, - /// The offset from the left of the object to the vision center. - pub left: i16, - /// The x-coordinate of the middle of the object relative to the vision center. - pub middle_x: i16, - /// The y-coordinate of the middle of the object relative to the vision center. - pub middle_y: i16, - - /// The width of the object. - pub width: i16, - /// The height of the object. - pub height: i16, -} - -impl TryFrom for VisionObject { - type Error = VisionError; - fn try_from(value: pros_sys::vision_object_s_t) -> Result { - if value.signature == VISION_OBJECT_ERR_SIG { - bail_errno!(); - unreachable!("Errno should be non-zero") - } - - Ok(Self { - top: value.top_coord, - left: value.left_coord, - middle_x: value.x_middle_coord, - middle_y: value.y_middle_coord, - width: value.width, - height: value.height, - }) +pub enum VisionMode { + /// Uses color signatures and codes to identify objects in blocks. + #[default] + ColorDetection, + + /// Uses line tracking to identify lines. + LineDetection, + + /// Both color signatures and lines will be detected as objects. + MixedDetection, + + /// Sets the sensor into "wifi mode", which disables all forms of object detection and + /// enables the sensor's onboard Wi-Fi hotspot for streaming camera data over a webserver. + /// + /// Once enabled, the sensor will create a wireless network with an SSID + /// in the format of of VISION_XXXX. The sensor's camera feed is available + /// at `192.168.1.1`. + /// + /// This mode will be automatically disabled when connected to field control. + Wifi, + + /// Unknown use. + Test, +} + +impl From for VisionMode { + fn from(value: V5VisionMode) -> Self { + match value { + V5VisionMode::kVisionModeNormal => Self::ColorDetection, + V5VisionMode::kVisionModeLineDetect => Self::LineDetection, + V5VisionMode::kVisionModeMixed => Self::MixedDetection, + V5VisionMode::kVisionTypeTest => Self::Test, + } } } -#[repr(u32)] +/// Defines a source for what method was used to detect a [`VisionObject`]. #[derive(Debug, Clone, Copy, PartialEq, Eq)] -/// The zero point of the vision sensor. -/// Vision object coordinates are relative to this point. -pub enum VisionZeroPoint { - /// The zero point will be the top left corner of the vision sensor. - TopLeft, - /// The zero point will be the top right corner of the vision sensor. - Center, +pub enum DetectionSource { + /// A normal vision signature not associated with a color code was used to detect this object. + Signature(u8), + + /// Multiple signatures joined in a color code were used to detect this object. + Code(VisionCode), + + /// Line detection was used to find this object. + Line, } +/// A detected vision object. +/// +/// This struct contains metadata about objects detected by the vision sensor. Objects are +/// detected by calling [`VisionSensor::objects`] after adding signatures and color codes +/// to the sensor. #[derive(Debug, Clone, Copy, PartialEq, Eq)] -/// The white balance of the vision sensor. +pub struct VisionObject { + /// The ID of the signature or color code used to detect this object. + pub source: DetectionSource, + + /// The width of the detected object's bounding box in pixels. + pub width: u16, + + /// The height of the detected object's bounding box in pixels. + pub height: u16, + + /// The top-left coordinate of the detected object relative to the top-left + /// of the camera's field of view. + pub offset: mint::Point2, + + /// The center coordinate of the detected object relative to the top-left + /// of the camera's field of view. + pub center: mint::Point2, + + /// The approximate degrees of rotation of the detected object's bounding box. + pub angle: u16, +} + +impl From for VisionObject { + fn from(value: V5_DeviceVisionObject) -> Self { + Self { + source: match value.r#type { + V5VisionBlockType::kVisionTypeColorCode => { + DetectionSource::Code(VisionCode::from_id(value.signature)) + } + V5VisionBlockType::kVisionTypeNormal => { + DetectionSource::Signature(value.signature as u8) + } + V5VisionBlockType::kVisionTypeLineDetect => DetectionSource::Line, + }, + width: value.width, + height: value.height, + offset: mint::Point2 { + x: value.xoffset, + y: value.yoffset, + }, + center: mint::Point2 { + x: value.xoffset + (value.width / 2), + y: value.yoffset + (value.height / 2), + }, + angle: value.angle * 10, + } + } +} + +/// Vision Sensor white balance mode. +/// +/// Represents a white balance configuration for the vision sensor's camera. +#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)] pub enum WhiteBalance { - /// Provide a specific color to balance the white balance. - Rgb(Rgb), - /// Automatically balance the white balance. + /// Automatic Mode + /// + /// The sensor will automatically adjust the camera's white balance, using the brightest + /// part of the image as a white point. + #[default] Auto, + + /// "Startup" Automatic Mode + /// + /// The sensor will automatically adjust the camera's white balance, but will only perform + /// this adjustment once on power-on. + StartupAuto, + + /// Manual Mode + /// + /// Allows for manual control over white balance using an RGB color. + Manual(Rgb), } -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -/// The mode of the vision sensor led. +impl From for V5VisionWBMode { + fn from(value: WhiteBalance) -> Self { + match value { + WhiteBalance::Auto => Self::kVisionWBNormal, + WhiteBalance::StartupAuto => Self::kVisionWBStart, + WhiteBalance::Manual(_) => Self::kVisionWBManual, + } + } +} + +/// Vision Sensor LED mode. +/// +/// Represents the states that the integrated LED indicator on a vision sensor can be in. +#[derive(Default, Debug, Clone, Copy, PartialEq)] pub enum LedMode { - /// Turn on the led with a certain color. - On(Rgb), - /// Turn off the led. - Off, + /// Automatic Mode + /// + /// When in automatic mode, the integrated LED will display the color of the most prominent + /// detected object's signature color. + #[default] + Auto, + + /// Manual Mode + /// + /// When in manual mode, the integrated LED will display a user-set RGB color code and brightness + /// percentage from 0.0-1.0. + Manual(Rgb, f64), +} + +impl From for V5VisionLedMode { + fn from(value: LedMode) -> Self { + match value { + LedMode::Auto => Self::kVisionLedModeAuto, + LedMode::Manual(_, _) => Self::kVisionLedModeManual, + } + } +} + +impl From for Rgb { + fn from(value: V5_DeviceVisionRgb) -> Self { + Self::new(value.red, value.green, value.blue) + } } #[derive(Debug, Snafu)] /// Errors that can occur when using a vision sensor. pub enum VisionError { + /// Objects cannot be read while wifi mode is enabled. + WifiMode, + + /// The given signature ID or argument is out of range. + InvalidId, + /// The camera could not be read. ReadingFailed, - /// The index specified was higher than the total number of objects seen by the camera. - IndexTooHigh, - /// Port already taken. - PortTaken, - #[snafu(display("{source}"), context(false))] + /// Generic port related error. + #[snafu(display("{source}"), context(false))] Port { /// The source of the error. source: PortError, }, } - -map_errno! { - VisionError { - EHOSTDOWN => Self::ReadingFailed, - EDOM => Self::IndexTooHigh, - EACCES => Self::PortTaken, - } - inherit PortError; -}