adds image visualization
This commit is contained in:
parent
d7ab431fd6
commit
4af63a8520
69
inference.py
69
inference.py
@ -175,7 +175,30 @@ class YoloProcessing:
|
|||||||
return box_centers, box_scales, objness, class_pred
|
return box_centers, box_scales, objness, class_pred
|
||||||
|
|
||||||
|
|
||||||
def postprocessing(self, endnodes):
|
def process_to_picture(self, endnodes, data):
|
||||||
|
logits = self.postprocessing(endnodes)
|
||||||
|
self.visualize_image(logits, data)
|
||||||
|
|
||||||
|
|
||||||
|
def visualize_image(self, logits, data):
|
||||||
|
labels = data.get_labels("data/daria_labels.json")
|
||||||
|
image = visualize_boxes_and_labels_on_image_array(
|
||||||
|
data.dataset[0],
|
||||||
|
logits['detection_boxes'].numpy()[0],
|
||||||
|
logits['detection_classes'][0],
|
||||||
|
logits['detection_scores'].numpy()[0],
|
||||||
|
labels,
|
||||||
|
use_normalized_coordinates=True,
|
||||||
|
max_boxes_to_draw=100,
|
||||||
|
min_score_thresh=.5,
|
||||||
|
agnostic_mode=False,
|
||||||
|
line_thickness=4)
|
||||||
|
|
||||||
|
Image.fromarray(np.uint8(image)).save('/home/maintenance/test.png')
|
||||||
|
print("Successfully saved image")
|
||||||
|
|
||||||
|
|
||||||
|
def postprocessing(self, endnodes, count):
|
||||||
"""
|
"""
|
||||||
endnodes is a list of 3 output tensors:
|
endnodes is a list of 3 output tensors:
|
||||||
endnodes[0] - stride 32 of input
|
endnodes[0] - stride 32 of input
|
||||||
@ -235,6 +258,7 @@ class YoloProcessing:
|
|||||||
max_total_size=100)
|
max_total_size=100)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# adding offset to the class prediction and cast to integer
|
# adding offset to the class prediction and cast to integer
|
||||||
def translate_coco_2017_to_2014(nmsed_classes):
|
def translate_coco_2017_to_2014(nmsed_classes):
|
||||||
return np.vectorize(COCO_17_14.get)(nmsed_classes).astype(np.int32)
|
return np.vectorize(COCO_17_14.get)(nmsed_classes).astype(np.int32)
|
||||||
@ -242,6 +266,8 @@ class YoloProcessing:
|
|||||||
nmsed_classes = tf.cast(tf.add(nmsed_classes, labels_offset), tf.int16)
|
nmsed_classes = tf.cast(tf.add(nmsed_classes, labels_offset), tf.int16)
|
||||||
nmsed_classes = translate_coco_2017_to_2014(nmsed_classes)
|
nmsed_classes = translate_coco_2017_to_2014(nmsed_classes)
|
||||||
|
|
||||||
|
print(count)
|
||||||
|
|
||||||
return {'detection_boxes': nmsed_boxes,
|
return {'detection_boxes': nmsed_boxes,
|
||||||
'detection_scores': nmsed_scores,
|
'detection_scores': nmsed_scores,
|
||||||
'detection_classes': nmsed_classes,
|
'detection_classes': nmsed_classes,
|
||||||
@ -336,52 +362,42 @@ def test_async_yolo5():
|
|||||||
|
|
||||||
fps = 0
|
fps = 0
|
||||||
now = time.time()
|
now = time.time()
|
||||||
for i in range(1000):
|
for i in range(100):
|
||||||
fps += 1
|
fps += 1
|
||||||
if now + 1 < time.time():
|
if now + 1 < time.time():
|
||||||
print(fps)
|
|
||||||
fps = 0
|
fps = 0
|
||||||
now = time.time()
|
now = time.time()
|
||||||
|
|
||||||
hailo.hailo_input(data.dataset)
|
hailo.hailo_input(data.dataset)
|
||||||
out = None
|
out = None
|
||||||
while(out == None):
|
while(out == None):
|
||||||
time.sleep(0.01)
|
time.sleep(0.0001)
|
||||||
out = hailo.hailo_output()
|
out = hailo.hailo_output()
|
||||||
|
|
||||||
|
Thread(target=processor.postprocessing, args=[out, i]).start()
|
||||||
|
|
||||||
hailo.stop_hailo_thread()
|
hailo.stop_hailo_thread()
|
||||||
|
|
||||||
logits = processor.postprocessing(out)
|
|
||||||
|
|
||||||
|
|
||||||
labels = data.get_labels("data/daria_labels.json")
|
|
||||||
image = visualize_boxes_and_labels_on_image_array(
|
|
||||||
data.dataset[0],
|
|
||||||
logits['detection_boxes'].numpy()[0],
|
|
||||||
logits['detection_classes'][0],
|
|
||||||
logits['detection_scores'].numpy()[0],
|
|
||||||
labels,
|
|
||||||
use_normalized_coordinates=True,
|
|
||||||
max_boxes_to_draw=100,
|
|
||||||
min_score_thresh=.5,
|
|
||||||
agnostic_mode=False,
|
|
||||||
line_thickness=4)
|
|
||||||
|
|
||||||
Image.fromarray(np.uint8(image)).save('/home/maintenance/test.png')
|
|
||||||
print("Successfully saved image")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def test_process_yolo5():
|
def test_process_yolo5():
|
||||||
|
|
||||||
imageMeta = ImageMeta(640, 640, 3)
|
imageMeta = ImageMeta(640, 640, 3)
|
||||||
processor = YoloProcessing(imageMeta, classes=3)
|
processor = YoloProcessing(imageMeta, classes=4)
|
||||||
data = DataHandler('./data', imageMeta)
|
data = DataHandler('./data', imageMeta)
|
||||||
data.load_data(processor.preproc)
|
data.load_data(processor.preproc)
|
||||||
|
|
||||||
hailo = HailoHandler('hef/yolov5m_daria.hef')
|
hailo = HailoHandler('hef/yolov5m_daria.hef')
|
||||||
out = hailo.run_hailo(data.dataset)
|
|
||||||
|
|
||||||
|
now = time.time()
|
||||||
|
fps = 0
|
||||||
|
for i in range(100):
|
||||||
|
fps += 1
|
||||||
|
if now + 1 < time.time():
|
||||||
|
print(fps)
|
||||||
|
fps = 0
|
||||||
|
now = time.time()
|
||||||
|
|
||||||
|
out = hailo.run_hailo(data.dataset)
|
||||||
logits = processor.postprocessing(out)
|
logits = processor.postprocessing(out)
|
||||||
|
|
||||||
|
|
||||||
@ -398,7 +414,6 @@ def test_process_yolo5():
|
|||||||
agnostic_mode=False,
|
agnostic_mode=False,
|
||||||
line_thickness=4)
|
line_thickness=4)
|
||||||
|
|
||||||
Image.fromarray(np.uint8(image)).save('/home/maintenance/test.png')
|
|
||||||
print("Successfully saved image")
|
print("Successfully saved image")
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
1
requirements.txt
Normal file
1
requirements.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
vision_msgs
|
197
ros_inference.py
197
ros_inference.py
@ -2,9 +2,11 @@ import json
|
|||||||
import os
|
import os
|
||||||
import io
|
import io
|
||||||
import time
|
import time
|
||||||
|
import copy
|
||||||
|
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
|
from multiprocessing import Process
|
||||||
|
|
||||||
import ipdb
|
import ipdb
|
||||||
|
|
||||||
@ -24,6 +26,7 @@ from tensorflow.image import combined_non_max_suppression
|
|||||||
import rclpy
|
import rclpy
|
||||||
from rclpy.node import Node
|
from rclpy.node import Node
|
||||||
|
|
||||||
|
from std_msgs.msg import String
|
||||||
from sensor_msgs.msg import Image as ImageMsg
|
from sensor_msgs.msg import Image as ImageMsg
|
||||||
from vision_msgs.msg import Detection2DArray, Detection2D, BoundingBox2D, ObjectHypothesisWithPose
|
from vision_msgs.msg import Detection2DArray, Detection2D, BoundingBox2D, ObjectHypothesisWithPose
|
||||||
from geometry_msgs.msg import Pose2D
|
from geometry_msgs.msg import Pose2D
|
||||||
@ -190,16 +193,10 @@ class YoloProcessing:
|
|||||||
box_scales = (raw_box_scales * 2) ** 2 * anchors_for_stride # dim [N, HxW, 3, 2]
|
box_scales = (raw_box_scales * 2) ** 2 * anchors_for_stride # dim [N, HxW, 3, 2]
|
||||||
return box_centers, box_scales, objness, class_pred
|
return box_centers, box_scales, objness, class_pred
|
||||||
|
|
||||||
|
def visualize_image(self, logits, image):
|
||||||
def process_to_picture(self, endnodes, data):
|
labels = get_labels("data/daria_labels.json")
|
||||||
logits = self.postprocessing(endnodes)
|
|
||||||
self.visualize_image(logits, data)
|
|
||||||
|
|
||||||
|
|
||||||
def visualize_image(self, logits, data):
|
|
||||||
labels = data.get_labels("data/daria_labels.json")
|
|
||||||
image = visualize_boxes_and_labels_on_image_array(
|
image = visualize_boxes_and_labels_on_image_array(
|
||||||
data.dataset[0],
|
image,
|
||||||
logits['detection_boxes'].numpy()[0],
|
logits['detection_boxes'].numpy()[0],
|
||||||
logits['detection_classes'][0],
|
logits['detection_classes'][0],
|
||||||
logits['detection_scores'].numpy()[0],
|
logits['detection_scores'].numpy()[0],
|
||||||
@ -211,7 +208,7 @@ class YoloProcessing:
|
|||||||
line_thickness=4)
|
line_thickness=4)
|
||||||
|
|
||||||
Image.fromarray(np.uint8(image)).save('/home/maintenance/test.png')
|
Image.fromarray(np.uint8(image)).save('/home/maintenance/test.png')
|
||||||
print("Successfully saved image")
|
Image.fromarray(np.uint8(image)).show()
|
||||||
|
|
||||||
|
|
||||||
def postprocessing(self, endnodes):
|
def postprocessing(self, endnodes):
|
||||||
@ -292,7 +289,6 @@ class YoloProcessing:
|
|||||||
class HailoHandler:
|
class HailoHandler:
|
||||||
def __init__(self, hef_path='hef/yolov5m.hef'):
|
def __init__(self, hef_path='hef/yolov5m.hef'):
|
||||||
target = PcieDevice()
|
target = PcieDevice()
|
||||||
|
|
||||||
self.hef = HEF(hef_path)
|
self.hef = HEF(hef_path)
|
||||||
|
|
||||||
# Configure network groups
|
# Configure network groups
|
||||||
@ -326,6 +322,7 @@ class HailoHandler:
|
|||||||
self.hailo_async = True
|
self.hailo_async = True
|
||||||
self.hailo_block = False
|
self.hailo_block = False
|
||||||
self.input_data = None
|
self.input_data = None
|
||||||
|
self._infer_results = None
|
||||||
self.hailo_thread = Thread(target=self._hailo_async)
|
self.hailo_thread = Thread(target=self._hailo_async)
|
||||||
self.hailo_thread.start()
|
self.hailo_thread.start()
|
||||||
|
|
||||||
@ -339,26 +336,25 @@ class HailoHandler:
|
|||||||
def _hailo_async_loop(self, infer_pipeline):
|
def _hailo_async_loop(self, infer_pipeline):
|
||||||
while self.hailo_async:
|
while self.hailo_async:
|
||||||
if(not self.hailo_block and type(self.input_data) != type(None)):
|
if(not self.hailo_block and type(self.input_data) != type(None)):
|
||||||
self.infer_results = None
|
self._infer_results = None
|
||||||
self.hailo_block = True
|
self.hailo_block = True
|
||||||
infer_results = infer_pipeline.infer(self.input_data)
|
infer_results = infer_pipeline.infer(self.input_data)
|
||||||
self.infer_results = [infer_results[i.name] for i in self.output_vstream_infos]
|
self._infer_results = [infer_results[i.name] for i in self.output_vstream_infos]
|
||||||
self.input_data = None
|
self.input_data = None
|
||||||
self.hailo_block = False
|
self.hailo_block = False
|
||||||
|
|
||||||
def hailo_input(self, input_data):
|
def hailo_input(self, input_data):
|
||||||
while self.hailo_block:
|
while self.hailo_block:
|
||||||
time.sleep(0.01)
|
time.sleep(0.001)
|
||||||
self.hailo_block = True
|
self.hailo_block = True
|
||||||
self.input_data = input_data
|
self.input_data = input_data
|
||||||
self.input_data = {self.input_vstream_info.name: input_data}
|
self.input_data = {self.input_vstream_info.name: input_data}
|
||||||
self.infer_results = None
|
|
||||||
self.hailo_block = False
|
self.hailo_block = False
|
||||||
|
|
||||||
def hailo_output(self):
|
def hailo_output(self):
|
||||||
while self.hailo_block:
|
while self.hailo_block:
|
||||||
time.sleep(0.01)
|
time.sleep(0.001)
|
||||||
return self.infer_results
|
return self._infer_results
|
||||||
|
|
||||||
|
|
||||||
def stop_hailo_thread(self):
|
def stop_hailo_thread(self):
|
||||||
@ -368,49 +364,98 @@ class HailoHandler:
|
|||||||
class HailoNode(Node):
|
class HailoNode(Node):
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
|
||||||
|
self._ros_init()
|
||||||
|
self._metadata_init()
|
||||||
|
self._object_init()
|
||||||
|
self._thread_init()
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
self.hailo.stop_hailo_thread()
|
||||||
|
self._thread_run = False
|
||||||
|
self._post_process.join()
|
||||||
|
|
||||||
|
def _ros_init(self):
|
||||||
super().__init__('hailo_image_subscriber')
|
super().__init__('hailo_image_subscriber')
|
||||||
self.sub = self.create_subscription(ImageMsg, '/camera/color/image_raw', self.image_callback, 10)
|
self.sub = self.create_subscription(ImageMsg, '/r3_cam_left_0', self._image_callback, 10)
|
||||||
self.pub = self.create_publisher(Detection2DArray, '/hailo_bounding_boxes', 10)
|
self.pub = self.create_publisher(Detection2DArray, '/hailo_bounding_boxes', 10)
|
||||||
|
self.pub_ping = self.create_publisher(String, '/ping', 1)
|
||||||
|
|
||||||
self.bridge = CvBridge()
|
def _metadata_init(self):
|
||||||
|
# TODO into yaml file
|
||||||
# metadata init
|
|
||||||
classes = 3
|
classes = 3
|
||||||
self.image_meta = ImageMeta(640, 640, 3)
|
self.image_meta = ImageMeta(640, 640, 3)
|
||||||
self.processor = YoloProcessing(self.image_meta, classes)
|
self.processor = YoloProcessing(self.image_meta, classes)
|
||||||
|
self.hailo_hef = 'hef/yolov5m_daria.hef'
|
||||||
|
|
||||||
# hailo init
|
def _object_init(self):
|
||||||
self.hailo = HailoHandler('hef/yolov5m_daria.hef')
|
self.hailo = HailoHandler(self.hailo_hef)
|
||||||
|
self.bridge = CvBridge()
|
||||||
|
|
||||||
|
def _thread_init(self):
|
||||||
|
self._thread_run = True
|
||||||
|
self._new_input = False
|
||||||
|
self.yolo_image = None
|
||||||
self.hailo.start_hailo_thread()
|
self.hailo.start_hailo_thread()
|
||||||
|
|
||||||
def image_callback(self, data):
|
self.detections = None
|
||||||
img = self.convert(data)
|
self.detections_new = False
|
||||||
self.image_infer(img)
|
self.detections_mutex = False
|
||||||
|
self._post_process = Thread(target=self._thread_postprocessing).start()
|
||||||
|
self.publish_thread = Thread(target=self._thread_publish).start()
|
||||||
|
|
||||||
def image_infer(self, image):
|
def _image_callback(self, ros_image):
|
||||||
|
image = self._convert_ros_to_pil(ros_image)
|
||||||
|
self.yolo_image = self._preprocess(image)
|
||||||
|
self.image_infer(self.yolo_image)
|
||||||
|
self._new_input = True
|
||||||
|
|
||||||
|
def _preprocess(self, image):
|
||||||
image = self.processor.preproc(image)
|
image = self.processor.preproc(image)
|
||||||
dataset = self.dataset_from_image(image)
|
return self._dataset_from_image(image)
|
||||||
self.hailo.hailo_input(dataset)
|
|
||||||
|
|
||||||
out = None
|
def image_infer(self, data):
|
||||||
while(out == None):
|
self.hailo.hailo_input(data)
|
||||||
time.sleep(0.0001)
|
|
||||||
out = self.hailo.hailo_output()
|
|
||||||
|
|
||||||
Thread(target=self._thread_postprocessing, args=[out]).start()
|
def _thread_postprocessing(self):
|
||||||
|
while self._thread_run:
|
||||||
|
output = None
|
||||||
|
while(output == None or not self._new_input):
|
||||||
|
time.sleep(0.001)
|
||||||
|
output = self.hailo.hailo_output()
|
||||||
|
|
||||||
|
|
||||||
def _thread_postprocessing(self, out):
|
self._new_input = False
|
||||||
logits = self.processor.postprocessing(out)
|
now = time.time()
|
||||||
|
self.detections_mutex = True
|
||||||
|
self.detections = self.processor.postprocessing(output)
|
||||||
|
self.detections_new = True
|
||||||
|
self.detections_mutex = False
|
||||||
|
print("postprocessing time: ", time.time() - now)
|
||||||
|
self.processor.visualize_image(self.detections, self.yolo_image[0])
|
||||||
|
|
||||||
|
def _thread_publish(self):
|
||||||
|
while self._thread_run:
|
||||||
|
while self.detections_mutex or not self.detections_new:
|
||||||
|
time.sleep(0.001)
|
||||||
|
self._publish_detection(self.detections)
|
||||||
|
self.detections_new = False
|
||||||
|
|
||||||
|
def _publish_ping(self, msg="ping"):
|
||||||
|
s = String()
|
||||||
|
s.data = msg
|
||||||
|
self.pub_ping.publish(s)
|
||||||
|
|
||||||
|
def _publish_detection(self, detections):
|
||||||
|
|
||||||
labels = get_labels("data/daria_labels.json")
|
labels = get_labels("data/daria_labels.json")
|
||||||
|
|
||||||
detection_array = Detection2DArray()
|
detection_array = Detection2DArray()
|
||||||
|
|
||||||
for bb in range(len(logits['detection_boxes'].numpy()[0])):
|
for bb in range(len(detections['detection_boxes'].numpy()[0])):
|
||||||
boxes = logits['detection_boxes'].numpy()[0][bb]
|
boxes = detections['detection_boxes'].numpy()[0][bb]
|
||||||
classes = logits['detection_classes'][0][bb]
|
classes = detections['detection_classes'][0][bb]
|
||||||
scores = logits['detection_scores'].numpy()[0][bb]
|
scores = detections['detection_scores'].numpy()[0][bb]
|
||||||
if(scores > 0.01):
|
if(scores > 0.01):
|
||||||
bb = BoundingBox2D()
|
bb = BoundingBox2D()
|
||||||
bb.center = Pose2D()
|
bb.center = Pose2D()
|
||||||
@ -424,8 +469,7 @@ class HailoNode(Node):
|
|||||||
|
|
||||||
self.pub.publish(detection_array)
|
self.pub.publish(detection_array)
|
||||||
|
|
||||||
# convert ros image to PIL image
|
def _convert_ros_to_pil(self, ros_image):
|
||||||
def convert(self, ros_image):
|
|
||||||
try:
|
try:
|
||||||
img = self.bridge.imgmsg_to_cv2(ros_image, "rgb8")
|
img = self.bridge.imgmsg_to_cv2(ros_image, "rgb8")
|
||||||
image = Image.fromarray(img)
|
image = Image.fromarray(img)
|
||||||
@ -433,7 +477,7 @@ class HailoNode(Node):
|
|||||||
print(e)
|
print(e)
|
||||||
return image
|
return image
|
||||||
|
|
||||||
def dataset_from_image(self, image):
|
def _dataset_from_image(self, image):
|
||||||
dataset = np.zeros((1, self.image_meta.image_height,
|
dataset = np.zeros((1, self.image_meta.image_height,
|
||||||
self.image_meta.image_width,
|
self.image_meta.image_width,
|
||||||
self.image_meta.channels),
|
self.image_meta.channels),
|
||||||
@ -441,77 +485,12 @@ class HailoNode(Node):
|
|||||||
dataset[0, :, :, :] = np.array(image)
|
dataset[0, :, :, :] = np.array(image)
|
||||||
return dataset
|
return dataset
|
||||||
|
|
||||||
def test_async_yolo5():
|
|
||||||
imageMeta = ImageMeta(640, 640, 3)
|
|
||||||
processor = YoloProcessing(imageMeta, classes=3)
|
|
||||||
data = DataHandler('./data', imageMeta)
|
|
||||||
data.load_data(processor.preproc)
|
|
||||||
|
|
||||||
hailo = HailoHandler('hef/yolov5m_daria.hef')
|
|
||||||
hailo.start_hailo_thread()
|
|
||||||
|
|
||||||
fps = 0
|
|
||||||
now = time.time()
|
|
||||||
for i in range(100):
|
|
||||||
fps += 1
|
|
||||||
if now + 1 < time.time():
|
|
||||||
fps = 0
|
|
||||||
now = time.time()
|
|
||||||
|
|
||||||
hailo.hailo_input(data.dataset)
|
|
||||||
out = None
|
|
||||||
while(out == None):
|
|
||||||
time.sleep(0.0001)
|
|
||||||
out = hailo.hailo_output()
|
|
||||||
|
|
||||||
Thread(target=processor.postprocessing, args=[out]).start()
|
|
||||||
|
|
||||||
hailo.stop_hailo_thread()
|
|
||||||
|
|
||||||
|
|
||||||
def test_process_yolo5():
|
|
||||||
|
|
||||||
imageMeta = ImageMeta(640, 640, 3)
|
|
||||||
processor = YoloProcessing(imageMeta, classes=4)
|
|
||||||
data = DataHandler('./data', imageMeta)
|
|
||||||
data.load_data(processor.preproc)
|
|
||||||
|
|
||||||
hailo = HailoHandler('hef/yolov5m_daria.hef')
|
|
||||||
|
|
||||||
now = time.time()
|
|
||||||
fps = 0
|
|
||||||
for i in range(100):
|
|
||||||
fps += 1
|
|
||||||
if now + 1 < time.time():
|
|
||||||
print(fps)
|
|
||||||
fps = 0
|
|
||||||
now = time.time()
|
|
||||||
|
|
||||||
out = hailo.run_hailo(data.dataset)
|
|
||||||
logits = processor.postprocessing(out)
|
|
||||||
|
|
||||||
|
|
||||||
labels = data.get_labels("data/daria_labels.json")
|
|
||||||
image = visualize_boxes_and_labels_on_image_array(
|
|
||||||
data.dataset[0],
|
|
||||||
logits['detection_boxes'].numpy()[0],
|
|
||||||
logits['detection_classes'][0],
|
|
||||||
logits['detection_scores'].numpy()[0],
|
|
||||||
labels,
|
|
||||||
use_normalized_coordinates=True,
|
|
||||||
max_boxes_to_draw=100,
|
|
||||||
min_score_thresh=.5,
|
|
||||||
agnostic_mode=False,
|
|
||||||
line_thickness=4)
|
|
||||||
|
|
||||||
print("Successfully saved image")
|
|
||||||
|
|
||||||
def main(args=None):
|
def main(args=None):
|
||||||
rclpy.init(args=args)
|
rclpy.init(args=args)
|
||||||
|
|
||||||
hailo_node = HailoNode()
|
hailo_node = HailoNode()
|
||||||
|
|
||||||
rclpy.spin(hailo_node)
|
rclpy.spin(hailo_node)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
Loading…
Reference in New Issue
Block a user