Skip to content

Commit

Permalink
Added test capability for laptop webcam
Browse files Browse the repository at this point in the history
  • Loading branch information
KenH2 committed Nov 2, 2023
1 parent 27dfd04 commit 2a9520f
Show file tree
Hide file tree
Showing 4 changed files with 83 additions and 5 deletions.
Binary file removed people_tracking/test/assets/example.jpg
Binary file not shown.
16 changes: 16 additions & 0 deletions people_tracking/test/test_with_webcam.launch
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
<launch>
<node
pkg="people_tracking"
type="yolo.py"
name="listener"
output="screen"
/>

<node
pkg="people_tracking"
type="webcam.py"
name="webcam_pub"
output="screen"
/>

</launch>
59 changes: 59 additions & 0 deletions people_tracking/test/webcam.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
#!/usr/bin/env python3
# Basics ROS program to publish real-time streaming
# video from your built-in webcam
# https://automaticaddison.com/working-with-ros-and-opencv-in-ros-noetic/

# Import the necessary libraries
import rospy # Python library for ROS
from sensor_msgs.msg import Image # Image is the message type
from cv_bridge import CvBridge # Package to convert between ROS and OpenCV Images
import cv2 # OpenCV library


def publish_message():
# Node is publishing to the video_frames topic using
# the message type Image
pub = rospy.Publisher('video_frames', Image, queue_size=10)

# Tells rospy the name of the node.
# Anonymous = True makes sure the node has a unique name. Random
# numbers are added to the end of the name.
rospy.init_node('video_pub_py', anonymous=True)

# Go through the loop 10 times per second
rate = rospy.Rate(30) # 10hz

# Create a VideoCapture object
# The argument '0' gets the default webcam.
cap = cv2.VideoCapture(0)

# Used to convert between ROS and OpenCV images
br = CvBridge()

# While ROS is still running.
while not rospy.is_shutdown():

# Capture frame-by-frame
# This method returns True/False as well
# as the video frame.
ret, frame = cap.read()

if ret == True:
# Print debugging information to the terminal
# rospy.loginfo('publishing video frame')

# Publish the image.
# The 'cv2_to_imgmsg' method converts an OpenCV
# image to a ROS image message
pub.publish(br.cv2_to_imgmsg(frame))

# Sleep just enough to maintain the desired rate
rate.sleep()


if __name__ == '__main__':
try:
publish_message()
except rospy.ROSInterruptException:
pass
rospy.spin()
13 changes: 8 additions & 5 deletions people_tracking/yolo.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,9 @@ def __init__(self) -> None:
self.table_class = 0 #table class defined with index 60 (person = 0)

rospy.init_node('listener', anonymous=True)
self.publisher = rospy.Publisher('/hero/segmented_image',Image)
self.subscriber = rospy.Subscriber('/hero/head_rgbd_sensor/rgb/image_raw',Image , self.callback)
self.publisher = rospy.Publisher('/hero/segmented_image', Image)
# self.subscriber = rospy.Subscriber('/hero/head_rgbd_sensor/rgb/image_raw', Image, self.callback)
self.subscriber = rospy.Subscriber('video_frames', Image, self.callback)

@staticmethod
def detect(model, frame):
Expand Down Expand Up @@ -81,16 +82,18 @@ def callback(self, data):
image_message = bridge.cv2_to_imgmsg(cv_image, encoding="passthrough")
self.publisher.publish(image_message)

def listener():

def listener(self):

# In ROS, nodes are uniquely named. If two nodes with the same
# name are launched, the previous one is kicked off. The
# anonymous=True flag means that rospy will choose a unique
# name for our 'listener' node so that multiple listeners can
# run simultaneously.


rospy.Subscriber('/hero/head_rgbd_sensor/rgb/image_raw',Image , callback)
rospy.loginfo("tesssssssssssssss")
# rospy.Subscriber('/hero/head_rgbd_sensor/rgb/image_raw',Image , callback)
rospy.Subscriber('video_frames',Image , callback)

# spin() simply keeps python from exiting until this node is stopped
rospy.spin()
Expand Down

0 comments on commit 2a9520f

Please sign in to comment.