Skip to content

Commit

Permalink
Revamp In-App Video Streaming (#106)
Browse files Browse the repository at this point in the history
* Remove dependency on web video server

* Unsubscribe before unload

---------

Co-authored-by: Amal Nanavati <amaln@cs.washington.edu>
  • Loading branch information
amalnanavati and amalnanavati authored Dec 23, 2023
1 parent 6b3e050 commit 95d06eb
Show file tree
Hide file tree
Showing 14 changed files with 110 additions and 120 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,9 @@ def __init__(self):
self.camera_info_publisher = self.create_publisher(
CameraInfo, "~/camera_info", 1
)
self.aligned_depth_camera_info_publisher = self.create_publisher(
CameraInfo, "~/aligned_depth/camera_info", 1
)
if self.video is not None:
self.num_frames = 0
self.bridge = CvBridge()
Expand Down Expand Up @@ -174,6 +177,7 @@ def publish_frames(self):
self.compressed_image_publisher.publish(compressed_frame_msg)
self.aligned_depth_publisher.publish(depth_frame_msg)
self.camera_info_publisher.publish(self.camera_info_msg)
self.aligned_depth_camera_info_publisher.publish(self.camera_info_msg)

rate.sleep()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import numpy as np
import rclpy
from rclpy.node import Node
from sensor_msgs.msg import Image
from sensor_msgs.msg import CompressedImage
from threading import Lock


Expand Down Expand Up @@ -65,15 +65,20 @@ def __init__(

# Subscribe to the camera feed
self.subscription = self.create_subscription(
Image, "camera/color/image_raw", self.camera_callback, 1
CompressedImage,
"camera/color/image_raw/compressed",
self.camera_callback,
1,
)
self.subscription # prevent unused variable warning

# Create the publishers
self.publisher_results = self.create_publisher(
FaceDetection, "face_detection", 1
)
self.publisher_image = self.create_publisher(Image, "face_detection_img", 1)
self.publisher_image = self.create_publisher(
CompressedImage, "face_detection_img/compressed", 1
)

def toggle_face_detection_callback(self, request, response):
"""
Expand Down Expand Up @@ -150,15 +155,16 @@ def camera_callback(self, msg):
face_detection_msg.is_face_detected = is_face_detected
if is_face_detected:
# Add a dummy face marker to the sensor_msgs/Image
cv_image = self.bridge.imgmsg_to_cv2(msg, "bgr8")
cv_image = self.bridge.compressed_imgmsg_to_cv2(msg, "bgr8")
height, width, _ = cv_image.shape
cv2.circle(
cv_image,
(msg.width // 2, msg.height // 2),
msg.height // 25,
(width // 2, height // 2),
height // 25,
(0, 0, 255),
-1,
)
annotated_msg = self.bridge.cv2_to_imgmsg(cv_image, "bgr8")
annotated_msg = self.bridge.cv2_to_compressed_imgmsg(cv_image, "jpeg")
annotated_img = annotated_msg
# Publish the detected mouth center. The below is a hardcoded
# rough position of the mouth from the side staging location,
Expand All @@ -185,7 +191,6 @@ def camera_callback(self, msg):
annotated_img = msg
face_detection_msg.is_mouth_open = open_mouth_detected
self.publisher_results.publish(face_detection_msg)
self.get_logger().info("Published face detection")
self.publisher_image.publish(annotated_img)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from rclpy.action import ActionServer, CancelResponse, GoalResponse
from rclpy.executors import MultiThreadedExecutor
from rclpy.node import Node
from sensor_msgs.msg import CompressedImage, Image, RegionOfInterest
from sensor_msgs.msg import CompressedImage, RegionOfInterest
from shapely.geometry import MultiPoint
import threading
import time
Expand All @@ -34,8 +34,8 @@ def __init__(self, sleep_time=2.0, send_feedback_hz=10):

# Subscribe to the image topic, to store the latest image
self.image_subscriber = self.create_subscription(
Image,
"/camera/color/image_raw",
CompressedImage,
"/camera/color/image_raw/compressed",
self.image_callback,
1,
)
Expand Down Expand Up @@ -123,8 +123,8 @@ def segment_image(self, seed_point, result, segmentation_success):
with self.latest_img_msg_lock:
latest_img_msg = self.latest_img_msg
result.header = latest_img_msg.header
width = latest_img_msg.width
height = latest_img_msg.height
img = self.bridge.compressed_imgmsg_to_cv2(latest_img_msg, "bgr8")
width, height, _ = img.shape

# Sleep (dummy segmentation)
time.sleep(self.sleep_time)
Expand Down Expand Up @@ -175,6 +175,10 @@ def segment_image(self, seed_point, result, segmentation_success):
format="jpeg",
data=cv2.imencode(".jpg", mask_img)[1].tostring(),
)
mask_msg.rgb_image = CompressedImage(
format="jpeg",
data=cv2.imencode(".jpg", img[y_min:y_max, x_min:x_max])[1].tostring(),
)
mask_msg.item_id = "dummy_food_id_%d" % (i)
mask_msg.confidence = np.random.random()
result.detected_items.append(mask_msg)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,6 @@
<group if="$(var run_web_bridge)">
<!-- The ROSBridge Node -->
<include file="$(find-pkg-share rosbridge_server)/launch/rosbridge_websocket_launch.xml"/>
<!-- The ROS web_video_server -->
<node pkg="web_video_server" exec="web_video_server" name="web_video_server"/>
</group>

<!-- The dummy nodes -->
Expand All @@ -23,8 +21,9 @@
<remap from="~/compressed_image" to="/camera/color/image_raw/compressed"/>
<remap from="~/aligned_depth" to="/camera/aligned_depth_to_color/image_raw"/>
<remap from="~/camera_info" to="/camera/color/camera_info"/>
<remap from="~/aligned_depth/camera_info" to="/camera/aligned_depth_to_color/camera_info"/>
<param name="fps" value="30"/>
<param name="rgb_path" value="$(find-pkg-share feeding_web_app_ros2_test)/../data/above_plate_1_rgb.jpg"/>
<param name="rgb_path" value="$(find-pkg-share feeding_web_app_ros2_test)/../data/2022_11_01_ada_picks_up_carrots_camera_compressed_ft_tf.mp4"/>
<param name="depth_path" value="$(find-pkg-share feeding_web_app_ros2_test)/../data/above_plate_1_depth.png"/>
</node>
</group>
Expand Down
19 changes: 8 additions & 11 deletions feedingwebapp/src/App.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -22,23 +22,22 @@ import BiteSelectionPointMask from './Pages/Home/BiteSelectionUIStates/BiteSelec
* @param {APP_PAGE} appPage - The current app page. Must be one of the
* states specified in APP_PAGE.
* @param {string} rosbridgeURL - The URL of the rosbridge server.
* @param {string} webVideoServerURL - The URL of the web_video_server.
* @param {bool} debug - Whether to run it in debug mode or not.
*/
function getComponentByAppPage(appPage, rosbridgeURL, webVideoServerURL, debug) {
function getComponentByAppPage(appPage, rosbridgeURL, debug) {
switch (appPage) {
case APP_PAGE.Home:
// Must wrap a component in ROS tags for it to be able to connect to ROS
return (
<RosConnection url={rosbridgeURL} autoConnect>
<Header webVideoServerURL={webVideoServerURL} />
<Home debug={debug} webVideoServerURL={webVideoServerURL} />
<Header />
<Home debug={debug} />
</RosConnection>
)
case APP_PAGE.Settings:
return (
<RosConnection url={rosbridgeURL} autoConnect>
<Header webVideoServerURL={webVideoServerURL} />
<Header />
<Settings />
</RosConnection>
)
Expand All @@ -57,8 +56,6 @@ function App() {

// Get the rosbridge URL
const rosbridgeURL = 'ws://'.concat(window.location.hostname, ':', process.env.REACT_APP_ROSBRIDGE_PORT)
// Get the web_video_server URL
const webVideoServerURL = 'http://'.concat(window.location.hostname, ':', process.env.REACT_APP_WEB_VIDEO_SERVER_PORT)

// Get the debug flag
const debug = process.env.REACT_APP_DEBUG === 'true'
Expand All @@ -68,7 +65,7 @@ function App() {
<>
<Router>
<Routes>
<Route exact path='/' element={getComponentByAppPage(appPage, rosbridgeURL, webVideoServerURL, debug)} />
<Route exact path='/' element={getComponentByAppPage(appPage, rosbridgeURL, debug)} />
<Route
exact
path='/test_ros'
Expand All @@ -83,7 +80,7 @@ function App() {
path='/test_bite_selection_ui/button_overlay_selection'
element={
<RosConnection url={rosbridgeURL} autoConnect>
<Header webVideoServerURL={webVideoServerURL} />
<Header />
<BiteSelectionButtonOverlay debug={debug} />
</RosConnection>
}
Expand All @@ -93,7 +90,7 @@ function App() {
path='/test_bite_selection_ui/point_mask_selection'
element={
<RosConnection url={rosbridgeURL} autoConnect>
<Header webVideoServerURL={webVideoServerURL} />
<Header />
<BiteSelectionPointMask debug={debug} />
</RosConnection>
}
Expand All @@ -103,7 +100,7 @@ function App() {
path='/test_bite_selection_ui/food_name_selection'
element={
<RosConnection url={rosbridgeURL} autoConnect>
<Header webVideoServerURL={webVideoServerURL} />
<Header />
<BiteSelectionName debug={debug} />
</RosConnection>
}
Expand Down
4 changes: 2 additions & 2 deletions feedingwebapp/src/Pages/Constants.js
Original file line number Diff line number Diff line change
Expand Up @@ -48,10 +48,10 @@ NON_MOVING_STATES.add(MEAL_STATE.U_PostMeal)
export { NON_MOVING_STATES }

// The names of the ROS topic(s)
export const CAMERA_FEED_TOPIC = '/local/camera/color/image_raw'
export const CAMERA_FEED_TOPIC = '/local/camera/color/image_raw/compressed'
export const FACE_DETECTION_TOPIC = '/face_detection'
export const FACE_DETECTION_TOPIC_MSG = 'ada_feeding_msgs/FaceDetection'
export const FACE_DETECTION_IMG_TOPIC = '/face_detection_img'
export const FACE_DETECTION_IMG_TOPIC = '/face_detection_img/compressed'

// States from which, if they fail, it is NOT okay for the user to retry the
// same action.
Expand Down
11 changes: 2 additions & 9 deletions feedingwebapp/src/Pages/Header/Header.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,6 @@ import React, { useCallback, useEffect, useState } from 'react'
import Navbar from 'react-bootstrap/Navbar'
import Nav from 'react-bootstrap/Nav'
import { useMediaQuery } from 'react-responsive'
// PropTypes is used to validate that the used props are in fact passed to this
// Component
import PropTypes from 'prop-types'
// Toast generates a temporary pop-up with a timeout.
import { ToastContainer /* , toast */ } from 'react-toastify'
import 'react-toastify/dist/ReactToastify.css'
Expand All @@ -23,7 +20,7 @@ import LiveVideoModal from './LiveVideoModal'
* clicking "Video", and the ToastContainer popup that specifies when the user
* cannot click Settings.
*/
const Header = (props) => {
const Header = () => {
// Create a local state variable to toggle on/off the video
// TODO: Since this local state variable is in the header, the LiveVideoModal
// continues showing even if the state changes. Is this desirable? Perhaps
Expand Down Expand Up @@ -166,13 +163,9 @@ const Header = (props) => {
* The LiveVideoModal toggles on and off with the Video button and shows the
* robot's live camera feed.
*/}
<LiveVideoModal webVideoServerURL={props.webVideoServerURL} show={videoShow} onHide={() => setVideoShow(false)} />
<LiveVideoModal show={videoShow} onHide={() => setVideoShow(false)} />
</>
)
}
Header.propTypes = {
// The URL of the ROS web video server
webVideoServerURL: PropTypes.string.isRequired
}

export default Header
7 changes: 3 additions & 4 deletions feedingwebapp/src/Pages/Header/LiveVideoModal.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,8 @@ function LiveVideoModal(props) {
<Modal.Body ref={modalBodyRef} style={{ overflow: 'hidden' }}>
<center>
<VideoFeed
webVideoServerURL={props.webVideoServerURL}
topic='/local/camera/color/image_raw/compressed'
updateRateHz={10}
parent={modalBodyRef}
marginTop={margin}
marginBottom={margin}
Expand All @@ -65,9 +66,7 @@ LiveVideoModal.propTypes = {
// Whether or not the modal is visible
show: PropTypes.bool.isRequired,
// Callback function for when the modal is hidden
onHide: PropTypes.func.isRequired,
// The URL of the ROS web video server
webVideoServerURL: PropTypes.string.isRequired
onHide: PropTypes.func.isRequired
}

export default LiveVideoModal
12 changes: 4 additions & 8 deletions feedingwebapp/src/Pages/Home/Home.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ import { TIME_TO_RESET_MS } from '../Constants'
*
* @param {boolean} debug - whether to run it in debug mode (e.g., if you aren't
* simulatenously running the robot) or not
* @param {string} webVideoServerURL - the URL of the web video server
* @returns {JSX.Element}
*/
function Home(props) {
Expand Down Expand Up @@ -94,10 +93,10 @@ function Home(props) {
)
}
case MEAL_STATE.U_BiteSelection: {
return <BiteSelection debug={props.debug} webVideoServerURL={props.webVideoServerURL} />
return <BiteSelection debug={props.debug} />
}
case MEAL_STATE.U_PlateLocator: {
return <PlateLocator debug={props.debug} webVideoServerURL={props.webVideoServerURL} />
return <PlateLocator debug={props.debug} />
}
case MEAL_STATE.R_BiteAcquisition: {
/**
Expand Down Expand Up @@ -153,7 +152,7 @@ function Home(props) {
)
}
case MEAL_STATE.R_DetectingFace: {
return <DetectingFace debug={props.debug} webVideoServerURL={props.webVideoServerURL} />
return <DetectingFace debug={props.debug} />
}
case MEAL_STATE.R_MovingToMouth: {
/**
Expand Down Expand Up @@ -254,7 +253,6 @@ function Home(props) {
}, [
mealState,
props.debug,
props.webVideoServerURL,
biteAcquisitionActionInput,
moveAbovePlateActionInput,
moveToMouthActionInput,
Expand All @@ -278,9 +276,7 @@ Home.propTypes = {
* Whether to run it in debug mode (e.g., if you aren't simulatenously running
* the robot) or not
*/
debug: PropTypes.bool,
// The URL of the web video server
webVideoServerURL: PropTypes.string.isRequired
debug: PropTypes.bool
}

export default Home
Loading

0 comments on commit 95d06eb

Please sign in to comment.