Browse Source

Add python-operator-dataflow example

tags/v0.2.0-candidate
Philipp Oppermann 3 years ago
parent
commit
a6b3bbdf3c
Failed to extract signature
12 changed files with 468 additions and 0 deletions
  1. +1
    -0
      examples/python-operator-dataflow/.gitignore
  2. +33
    -0
      examples/python-operator-dataflow/README.md
  3. +27
    -0
      examples/python-operator-dataflow/dataflow.yml
  4. +27
    -0
      examples/python-operator-dataflow/dataflow_without_webcam.yml
  5. +33
    -0
      examples/python-operator-dataflow/no_webcam.py
  6. +42
    -0
      examples/python-operator-dataflow/object_detection.py
  7. +86
    -0
      examples/python-operator-dataflow/plot.py
  8. +45
    -0
      examples/python-operator-dataflow/requirements.txt
  9. +46
    -0
      examples/python-operator-dataflow/run.rs
  10. +15
    -0
      examples/python-operator-dataflow/run.sh
  11. +82
    -0
      examples/python-operator-dataflow/utils.py
  12. +31
    -0
      examples/python-operator-dataflow/webcam.py

+ 1
- 0
examples/python-operator-dataflow/.gitignore View File

@@ -0,0 +1 @@
*.pt

+ 33
- 0
examples/python-operator-dataflow/README.md View File

@@ -0,0 +1,33 @@
# Python Dataflow Example

This examples shows how to create and connect dora operators and custom nodes in Python.

## Overview

The [`dataflow.yml`](./dataflow.yml) defines a simple dataflow graph with the following three nodes:

- a webcam node, that connects to your webcam and feed the dataflow with webcam frame as jpeg compressed bytearray.
- an object detection node, that apply Yolo v5 on the webcam image. The model is imported from Pytorch Hub. The output is the bouding box of each object detected, the confidence and the class. You can have more info here: https://pytorch.org/hub/ultralytics_yolov5/
- a window plotting node, that will retrieve the webcam image and the Yolov5 bounding box and join the two together.

## Getting started

```bash
cargo run --example python-dataflow
```

## Installation

To install, you should run the `install.sh` script.

```bash
install.sh
```

## Run the dataflow as a standalone

- Start the `dora-coordinator`, passing the paths to the dataflow file and the `dora-runtime` as arguments:

```
../../target/release/dora-coordinator --run-dataflow dataflow.yml ../../target/release/dora-runtime
```

+ 27
- 0
examples/python-operator-dataflow/dataflow.yml View File

@@ -0,0 +1,27 @@
communication:
zenoh:
prefix: /example-python-dataflow

nodes:
- id: webcam
custom:
source: webcam.py
inputs:
tick: dora/timer/millis/100
outputs:
- image

- id: object_detection
operator:
python: object_detection.py
inputs:
image: webcam/image
outputs:
- bbox

- id: plot
operator:
python: plot.py
inputs:
image: webcam/image
bbox: object_detection/bbox

+ 27
- 0
examples/python-operator-dataflow/dataflow_without_webcam.yml View File

@@ -0,0 +1,27 @@
communication:
zenoh:
prefix: /example-python-no-webcam-dataflow

nodes:
- id: no_webcam
custom:
source: ./no_webcam.py
inputs:
tick: dora/timer/millis/100
outputs:
- image

- id: object_detection
operator:
python: object_detection.py
inputs:
image: no_webcam/image
outputs:
- bbox

- id: plot
operator:
python: plot.py
inputs:
image: no_webcam/image
bbox: object_detection/bbox

+ 33
- 0
examples/python-operator-dataflow/no_webcam.py View File

@@ -0,0 +1,33 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import time
import urllib.request

import cv2
import numpy as np
from dora import Node

print("Hello from no_webcam.py")


req = urllib.request.urlopen("https://ultralytics.com/images/zidane.jpg")

arr = np.asarray(bytearray(req.read()), dtype=np.uint8)
node = Node()

start = time.time()

while time.time() - start < 20:
# Wait next dora_input
event = node.next()
match event["type"]:
case "INPUT":
print("received input", event["id"])
node.send_output("image", arr.tobytes())
case "STOP":
print("received stop")
case other:
print("received unexpected event:", other)

time.sleep(1)

+ 42
- 0
examples/python-operator-dataflow/object_detection.py View File

@@ -0,0 +1,42 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

from enum import Enum
from typing import Callable

import cv2
import numpy as np
import torch


class DoraStatus(Enum):
CONTINUE = 0
STOP = 1


class Operator:
"""
Infering object from images
"""

def __init__(self):
self.model = torch.hub.load("ultralytics/yolov5", "yolov5n")

def on_input(
self,
dora_input: dict,
send_output: Callable[[str, bytes], None],
) -> DoraStatus:
"""Handle image
Args:
dora_input (dict): Dict containing the "id", "data", and "metadata"
send_output (Callable[[str, bytes]]): Function enabling sending output back to dora.
"""

frame = np.frombuffer(dora_input["data"], dtype="uint8")
frame = cv2.imdecode(frame, -1)
frame = frame[:, :, ::-1] # OpenCV image (BGR to RGB)
results = self.model(frame) # includes NMS
arrays = np.array(results.xyxy[0].cpu()).tobytes()
send_output("bbox", arrays, dora_input["metadata"])
return DoraStatus.CONTINUE

+ 86
- 0
examples/python-operator-dataflow/plot.py View File

@@ -0,0 +1,86 @@
import os
from enum import Enum
from typing import Callable

import cv2
import numpy as np

from utils import LABELS

CI = os.environ.get("CI")

font = cv2.FONT_HERSHEY_SIMPLEX


class DoraStatus(Enum):
CONTINUE = 0
STOP = 1


class Operator:
"""
Plot image and bounding box
"""

def __init__(self):
self.image = []
self.bboxs = []

def on_input(
self,
dora_input: dict,
send_output: Callable[[str, bytes], None],
) -> DoraStatus:
"""
Put image and bounding box on cv2 window.

Args:
dora_input["id"] (str): Id of the dora_input declared in the yaml configuration
dora_input["data"] (bytes): Bytes message of the dora_input
send_output (Callable[[str, bytes]]): Function enabling sending output back to dora.
"""
if dora_input["id"] == "image":
frame = np.frombuffer(dora_input["data"], dtype="uint8")
frame = cv2.imdecode(frame, -1)
self.image = frame

elif dora_input["id"] == "bbox" and len(self.image) != 0:
bboxs = np.frombuffer(dora_input["data"], dtype="float32")
self.bboxs = np.reshape(bboxs, (-1, 6))
for bbox in self.bboxs:
[
min_x,
min_y,
max_x,
max_y,
confidence,
label,
] = bbox
cv2.rectangle(
self.image,
(int(min_x), int(min_y)),
(int(max_x), int(max_y)),
(0, 255, 0),
2,
)

cv2.putText(
self.image,
LABELS[int(label)] + f", {confidence:0.2f}",
(int(max_x), int(max_y)),
font,
0.75,
(0, 255, 0),
2,
1,
)

if CI != "true":
cv2.imshow("frame", self.image)
if cv2.waitKey(1) & 0xFF == ord("q"):
return DoraStatus.STOP

return DoraStatus.CONTINUE

def __del__(self):
cv2.destroyAllWindows()

+ 45
- 0
examples/python-operator-dataflow/requirements.txt View File

@@ -0,0 +1,45 @@
# YOLOv5 requirements
# Usage: pip install -r requirements.txt

# Base ----------------------------------------
matplotlib>=3.2.2
numpy>=1.18.5
opencv-python>=4.1.1
Pillow>=7.1.2
PyYAML>=5.3.1
requests>=2.23.0
scipy>=1.4.1
torch>=1.7.0
torchvision>=0.8.1
tqdm>=4.64.0
protobuf<=3.20.1 # https://github.com/ultralytics/yolov5/issues/8012

# Logging -------------------------------------
tensorboard>=2.4.1
# wandb
# clearml

# Plotting ------------------------------------
pandas>=1.1.4
seaborn>=0.11.0

# Export --------------------------------------
# coremltools>=5.2 # CoreML export
# onnx>=1.9.0 # ONNX export
# onnx-simplifier>=0.4.1 # ONNX simplifier
# nvidia-pyindex # TensorRT export
# nvidia-tensorrt # TensorRT export
# scikit-learn==0.19.2 # CoreML quantization
# tensorflow>=2.4.1 # TFLite export (or tensorflow-cpu, tensorflow-aarch64)
# tensorflowjs>=3.9.0 # TF.js export
# openvino-dev # OpenVINO export

# Extras --------------------------------------
ipython # interactive notebook
psutil # system utilization
thop>=0.1.1 # FLOPs computation
# albumentations>=1.0.3
# pycocotools>=2.0 # COCO mAP
# roboflow

opencv-python>=4.1.1

+ 46
- 0
examples/python-operator-dataflow/run.rs View File

@@ -0,0 +1,46 @@
use eyre::{bail, Context};
use std::{env, path::Path};

#[tokio::main]
async fn main() -> eyre::Result<()> {
set_up_tracing().wrap_err("failed to set up tracing subscriber")?;

let root = Path::new(env!("CARGO_MANIFEST_DIR"));
std::env::set_current_dir(root.join(file!()).parent().unwrap())
.wrap_err("failed to set working dir")?;

build_package("dora-daemon").await?;

run(root).await?;

Ok(())
}

async fn build_package(package: &str) -> eyre::Result<()> {
let cargo = std::env::var("CARGO").unwrap();
let mut cmd = tokio::process::Command::new(&cargo);
cmd.arg("build");
cmd.arg("--package").arg(package);
if !cmd.status().await?.success() {
bail!("failed to build {package}");
};
Ok(())
}

async fn run(_root: &Path) -> eyre::Result<()> {
let mut run = tokio::process::Command::new("sh");
run.arg("./run.sh");
if !run.status().await?.success() {
bail!("failed to run python example.");
};
Ok(())
}

fn set_up_tracing() -> eyre::Result<()> {
use tracing_subscriber::prelude::__tracing_subscriber_SubscriberExt;

let stdout_log = tracing_subscriber::fmt::layer().pretty();
let subscriber = tracing_subscriber::Registry::default().with(stdout_log);
tracing::subscriber::set_global_default(subscriber)
.context("failed to set tracing global subscriber")
}

+ 15
- 0
examples/python-operator-dataflow/run.sh View File

@@ -0,0 +1,15 @@
set -e

python3 -m venv .env
. $(pwd)/.env/bin/activate
# Dev dependencies
pip install maturin
cd ../../apis/python/node
maturin develop
cd ../../../examples/python-operator-dataflow

# Dependencies
pip install --upgrade pip
pip install -r requirements.txt

cargo run -p dora-daemon -- --run-dataflow dataflow_without_webcam.yml

+ 82
- 0
examples/python-operator-dataflow/utils.py View File

@@ -0,0 +1,82 @@
LABELS = [
"ABC",
"bicycle",
"car",
"motorcycle",
"airplane",
"bus",
"train",
"truck",
"boat",
"traffic light",
"fire hydrant",
"stop sign",
"parking meter",
"bench",
"bird",
"cat",
"dog",
"horse",
"sheep",
"cow",
"elephant",
"bear",
"zebra",
"giraffe",
"backpack",
"umbrella",
"handbag",
"tie",
"suitcase",
"frisbee",
"skis",
"snowboard",
"sports ball",
"kite",
"baseball bat",
"baseball glove",
"skateboard",
"surfboard",
"tennis racket",
"bottle",
"wine glass",
"cup",
"fork",
"knife",
"spoon",
"bowl",
"banana",
"apple",
"sandwich",
"orange",
"broccoli",
"carrot",
"hot dog",
"pizza",
"donut",
"cake",
"chair",
"couch",
"potted plant",
"bed",
"dining table",
"toilet",
"tv",
"laptop",
"mouse",
"remote",
"keyboard",
"cell phone",
"microwave",
"oven",
"toaster",
"sink",
"refrigerator",
"book",
"clock",
"vase",
"scissors",
"teddy bear",
"hair drier",
"toothbrush",
]

+ 31
- 0
examples/python-operator-dataflow/webcam.py View File

@@ -0,0 +1,31 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-

import time

import cv2
from dora import Node

node = Node()

video_capture = cv2.VideoCapture(0)

start = time.time()

# Run for 20 seconds
while time.time() - start < 10:
# Wait next dora_input
event = node.next()
match event["type"]:
case "INPUT":
ret, frame = video_capture.read()
if ret:
node.send_output("image", cv2.imencode(".jpg", frame)[1].tobytes())
case "STOP":
print("received stop")
break
case other:
print("received unexpected event:", other)
break

video_capture.release()

Loading…
Cancel
Save