diff --git a/examples/python-dataflow/object_detection.py b/examples/python-dataflow/object_detection.py index e74feadd..445d65ea 100755 --- a/examples/python-dataflow/object_detection.py +++ b/examples/python-dataflow/object_detection.py @@ -13,23 +13,23 @@ model = torch.hub.load("ultralytics/yolov5", "yolov5n") node = Node() for event in node: - match event["type"]: - case "INPUT": - match event["id"]: - case "image": - print("[object detection] received image input") - frame = event["value"].to_numpy() - frame = cv2.imdecode(frame, -1) - frame = frame[:, :, ::-1] # OpenCV image (BGR to RGB) - results = model(frame) # includes NMS - arrays = np.array(results.xyxy[0].cpu()).tobytes() + event_type = event["type"] + if event_type == "INPUT": + event_id = event["id"] + if event_id == "image": + print("[object detection] received image input") + frame = event["value"].to_numpy() + frame = cv2.imdecode(frame, -1) + frame = frame[:, :, ::-1] # OpenCV image (BGR to RGB) + results = model(frame) # includes NMS + arrays = np.array(results.xyxy[0].cpu()).tobytes() - node.send_output("bbox", arrays, event["metadata"]) - case other: - print("[object detection] ignoring unexpected input:", other) - case "STOP": - print("[object detection] received stop") - case "ERROR": - print("[object detection] error: ", event["error"]) - case other: - print("[object detection] received unexpected event:", other) + node.send_output("bbox", arrays, event["metadata"]) + else: + print("[object detection] ignoring unexpected input:", event_id) + elif event_type == "INPUT": + print("[object detection] received stop") + elif event_type == "ERROR": + print("[object detection] error: ", event["error"]) + else: + print("[object detection] received unexpected event:", event_type) diff --git a/examples/python-dataflow/plot.py b/examples/python-dataflow/plot.py index 55f96683..e28d8016 100755 --- a/examples/python-dataflow/plot.py +++ b/examples/python-dataflow/plot.py @@ -83,16 +83,16 @@ plotter = Plotter() node = Node() for event in node: - match event["type"]: - case "INPUT": - status = plotter.on_input(event) - match status: - case DoraStatus.CONTINUE: - pass - case DoraStatus.STOP: - print("plotter returned stop status") - break - case "STOP": - print("received stop") - case other: - print("received unexpected event:", other) + event_type = event["type"] + if event_type == "INPUT": + status = plotter.on_input(event) + match status: + case DoraStatus.CONTINUE: + pass + case DoraStatus.STOP: + print("plotter returned stop status") + break + elif event_type == "INPUT": + print("received stop") + else: + print("received unexpected event:", event_type) diff --git a/examples/python-dataflow/webcam.py b/examples/python-dataflow/webcam.py index d130b50c..fa85e2cc 100755 --- a/examples/python-dataflow/webcam.py +++ b/examples/python-dataflow/webcam.py @@ -22,31 +22,31 @@ start = time.time() while time.time() - start < 10: # Wait next dora_input event = node.next() - match event["type"]: - case "INPUT": - ret, frame = video_capture.read() - if not ret: - frame = np.zeros((CAMERA_HEIGHT, CAMERA_WIDTH, 3), dtype=np.uint8) - cv2.putText( - frame, - "No Webcam was found at index %d" % (CAMERA_INDEX), - (int(30), int(30)), - font, - 0.75, - (255, 255, 255), - 2, - 1, - ) - node.send_output( - "image", - cv2.imencode(".jpg", frame)[1].tobytes(), - event["metadata"], + event_type = event["type"] + if event_type == "INPUT": + ret, frame = video_capture.read() + if not ret: + frame = np.zeros((CAMERA_HEIGHT, CAMERA_WIDTH, 3), dtype=np.uint8) + cv2.putText( + frame, + "No Webcam was found at index %d" % (CAMERA_INDEX), + (int(30), int(30)), + font, + 0.75, + (255, 255, 255), + 2, + 1, ) - case "STOP": - print("received stop") - break - case other: - print("received unexpected event:", other) - break + node.send_output( + "image", + cv2.imencode(".jpg", frame)[1].tobytes(), + event["metadata"], + ) + elif event_type == "INPUT": + print("received stop") + break + else: + print("received unexpected event:", event_type) + break video_capture.release() diff --git a/examples/python-operator-dataflow/webcam.py b/examples/python-operator-dataflow/webcam.py index a0a86da3..f6dc13d2 100755 --- a/examples/python-operator-dataflow/webcam.py +++ b/examples/python-operator-dataflow/webcam.py @@ -33,35 +33,35 @@ class Operator: dora_event: str, send_output, ) -> DoraStatus: - match dora_event["type"]: - case "INPUT": - ret, frame = self.video_capture.read() - if ret: - frame = cv2.resize(frame, (CAMERA_WIDTH, CAMERA_HEIGHT)) + event_type = dora_event["type"] + if event_type == "INPUT": + ret, frame = self.video_capture.read() + if ret: + frame = cv2.resize(frame, (CAMERA_WIDTH, CAMERA_HEIGHT)) - ## Push an error image in case the camera is not available. - else: - frame = np.zeros((CAMERA_HEIGHT, CAMERA_WIDTH, 3), dtype=np.uint8) - cv2.putText( - frame, - "No Webcam was found at index %d" % (CAMERA_INDEX), - (int(30), int(30)), - font, - 0.75, - (255, 255, 255), - 2, - 1, - ) - - send_output( - "image", - pa.array(frame.ravel()), - dora_event["metadata"], + ## Push an error image in case the camera is not available. + else: + frame = np.zeros((CAMERA_HEIGHT, CAMERA_WIDTH, 3), dtype=np.uint8) + cv2.putText( + frame, + "No Webcam was found at index %d" % (CAMERA_INDEX), + (int(30), int(30)), + font, + 0.75, + (255, 255, 255), + 2, + 1, ) - case "STOP": - print("received stop") - case other: - print("received unexpected event:", other) + + send_output( + "image", + pa.array(frame.ravel()), + dora_event["metadata"], + ) + elif event_type == "INPUT": + print("received stop") + else: + print("received unexpected event:", event_type) if time.time() - self.start_time < 20: return DoraStatus.CONTINUE diff --git a/examples/python-ros2-dataflow/control_node.py b/examples/python-ros2-dataflow/control_node.py index 0437b871..7540c092 100755 --- a/examples/python-ros2-dataflow/control_node.py +++ b/examples/python-ros2-dataflow/control_node.py @@ -12,22 +12,22 @@ for i in range(500): if event is None: break if event["type"] == "INPUT": - match event["id"]: - case "turtle_pose": - print( - f"""Pose: {event["value"].tolist()}""".replace("\r", "").replace( - "\n", " " - ) + event_id = event["id"] + if event_id == "turtle_pose": + print( + f"""Pose: {event["value"].tolist()}""".replace("\r", "").replace( + "\n", " " ) - case "tick": - direction = { - "linear": { - "x": 1.0 + random.random(), - }, - "angular": {"z": (random.random() - 0.5) * 5}, - } + ) + elif event_id == "tick": + direction = { + "linear": { + "x": 1.0 + random.random(), + }, + "angular": {"z": (random.random() - 0.5) * 5}, + } - node.send_output( - "direction", - pa.array([direction]), - ) + node.send_output( + "direction", + pa.array([direction]), + ) diff --git a/examples/python-ros2-dataflow/random_turtle.py b/examples/python-ros2-dataflow/random_turtle.py index 24fa1c2a..d466ab1e 100755 --- a/examples/python-ros2-dataflow/random_turtle.py +++ b/examples/python-ros2-dataflow/random_turtle.py @@ -43,20 +43,20 @@ for i in range(500): event = dora_node.next() if event is None: break - match event["kind"]: - # Dora event - case "dora": - match event["type"]: - case "INPUT": - match event["id"]: - case "direction": - twist_writer.publish(event["value"]) + event_kind = event["kind"] + # Dora event + if event_kind == "dora": + event_type = event["type"] + if event_type == "INPUT": + event_id = event["id"] + if event_id == "direction": + twist_writer.publish(event["value"]) # ROS2 Event - case "external": - pose = event.inner()[0].as_py() - if i == CHECK_TICK: - assert ( - pose["x"] != 5.544444561004639 - ), "turtle should not be at initial x axis" - dora_node.send_output("turtle_pose", event.inner()) + elif event_kind == "external": + pose = event.inner()[0].as_py() + if i == CHECK_TICK: + assert ( + pose["x"] != 5.544444561004639 + ), "turtle should not be at initial x axis" + dora_node.send_output("turtle_pose", event.inner())