Browse Source

fix pip get path + fix CI for dataflow.yml

arrow_union
Hennzau haixuanTao 1 year ago
parent
commit
fed4f1f659
7 changed files with 25 additions and 146 deletions
  1. +4
    -10
      examples/python-dataflow/dataflow.yml
  2. +2
    -10
      examples/python-dataflow/dataflow_dynamic.yml
  3. +0
    -6
      examples/python-dataflow/dataflow_yolo.yml
  4. +8
    -4
      examples/python-dataflow/run.rs
  5. +10
    -25
      node-hub/opencv-plot/main.py
  6. +0
    -4
      node-hub/opencv-video-capture/main.py
  7. +1
    -87
      node-hub/ultralytics-yolo/main.py

+ 4
- 10
examples/python-dataflow/dataflow.yml View File

@@ -4,10 +4,9 @@ nodes:
path: opencv-video-capture
inputs:
tick: plot/tick

outputs:
- image
- text
env:
CAPTURE_PATH: 0
IMAGE_WIDTH: 640
@@ -17,13 +16,8 @@ nodes:
build: pip install ../../node-hub/opencv-plot
path: opencv-plot
inputs:
image:
source: camera/image
queue_size: 1

tick:
source: dora/timer/millis/16 # this node display a window, so it's better to deflect the timer, so when the window is closed, the ticks are not sent anymore in the graph
queue_size: 1

image: camera/image
tick: dora/timer/millis/16 # this node display a window, so it's better to deflect the timer, so when the window is closed, the ticks are not sent anymore in the graph
text: camera/text
outputs:
- tick

+ 2
- 10
examples/python-dataflow/dataflow_dynamic.yml View File

@@ -4,10 +4,8 @@ nodes:
path: opencv-video-capture
inputs:
tick: plot/tick

outputs:
- image

env:
CAPTURE_PATH: 0
IMAGE_WIDTH: 640
@@ -17,13 +15,7 @@ nodes:
build: pip install ../../node-hub/opencv-plot
path: dynamic
inputs:
image:
source: camera/image
queue_size: 1

tick:
source: dora/timer/millis/16 # this node display a window, so it's better to deflect the timer, so when the window is closed, the ticks are not sent anymore in the graph
queue_size: 1

image: camera/image
tick: dora/timer/millis/16 # this node display a window, so it's better to deflect the timer, so when the window is closed, the ticks are not sent anymore in the graph
outputs:
- tick

+ 0
- 6
examples/python-dataflow/dataflow_yolo.yml View File

@@ -4,10 +4,8 @@ nodes:
path: opencv-video-capture
inputs:
tick: plot/tick

outputs:
- image

env:
CAPTURE_PATH: 0
IMAGE_WIDTH: 640
@@ -20,7 +18,6 @@ nodes:
image:
source: camera/image
queue_size: 1

outputs:
- bbox
env:
@@ -33,12 +30,9 @@ nodes:
image:
source: camera/image
queue_size: 1

bbox: object-detection/bbox

tick:
source: dora/timer/millis/16 # this node display a window, so it's better to deflect the timer, so when the window is closed, the ticks are not sent anymore in the graph
queue_size: 1

outputs:
- tick

+ 8
- 4
examples/python-dataflow/run.rs View File

@@ -1,4 +1,4 @@
use dora_core::{get_python_path, run};
use dora_core::{get_pip_path, get_python_path, run};
use dora_tracing::set_up_tracing;
use eyre::{bail, ContextCompat, WrapErr};
use std::path::Path;
@@ -50,9 +50,13 @@ async fn main() -> eyre::Result<()> {
);
}

run("pip", &["install", "maturin"], Some(venv))
.await
.context("pip install maturin failed")?;
run(
get_pip_path().context("Could not get pip binary")?,
&["install", "maturin"],
Some(venv),
)
.await
.context("pip install maturin failed")?;

run(
"maturin",


+ 10
- 25
node-hub/opencv-plot/main.py View File

@@ -17,13 +17,7 @@ class Plot:
"names": np.array([]),
}

text: {} = {
"text": "",
"font_scale": np.float32(0.0),
"color": (np.uint8(0), np.uint8(0), np.uint8(0)),
"thickness": np.uint32(0),
"position": (np.uint32(0), np.uint32(0)),
}
text: str = ""

width: np.uint32 = None
height: np.uint32 = None
@@ -57,12 +51,12 @@ def plot_frame(plot, ci_enabled):

cv2.putText(
plot.frame,
plot.text["text"],
(int(plot.text["position"][0]), int(plot.text["position"][1])),
plot.text,
(20, 20),
cv2.FONT_HERSHEY_SIMPLEX,
float(plot.text["font_scale"]),
(int(plot.text["color"][0]), int(plot.text["color"][1]), int(plot.text["color"][2])),
int(plot.text["thickness"]),
0.5,
(255, 255, 255),
1,
1,
)

@@ -122,6 +116,9 @@ def main():
event_id = event["id"]

if event_id == "tick":
if ci_enabled:
break

node.send_output(
"tick",
pa.array([]),
@@ -154,23 +151,11 @@ def main():
break

elif event_id == "text":
arrow_text = event["value"][0]
plot.text = {
"text": arrow_text["text"].as_py(),
"font_scale": np.float32(arrow_text["font_scale"].as_py()),
"color": (np.uint8(arrow_text["color"].as_py()[0]),
np.uint8(arrow_text["color"].as_py()[1]),
np.uint8(arrow_text["color"].as_py()[2])),
"thickness": np.uint32(arrow_text["thickness"].as_py()),
"position": (np.uint32(arrow_text["position"].as_py()[0]),
np.uint32(arrow_text["position"].as_py()[1]))
}
plot.text = event["value"][0].as_py()

if plot_frame(plot, ci_enabled):
break

elif event_type == "STOP":
break
elif event_type == "ERROR":
raise Exception(event["error"])



+ 0
- 4
node-hub/opencv-video-capture/main.py View File

@@ -29,8 +29,6 @@ def main():
if isinstance(video_capture_path, str) and video_capture_path.isnumeric():
video_capture_path = int(video_capture_path)

print(type(video_capture_path))

image_width = os.getenv("IMAGE_WIDTH", args.image_width)
image_height = os.getenv("IMAGE_HEIGHT", args.image_height)

@@ -86,8 +84,6 @@ def main():
event["metadata"]
)

elif event_type == "STOP":
break
elif event_type == "ERROR":
raise Exception(event["error"])



+ 1
- 87
node-hub/ultralytics-yolo/main.py View File

@@ -7,90 +7,6 @@ import pyarrow as pa
from dora import Node
from ultralytics import YOLO

LABELS = [
"ABC",
"bicycle",
"car",
"motorcycle",
"airplane",
"bus",
"train",
"truck",
"boat",
"traffic light",
"fire hydrant",
"stop sign",
"parking meter",
"bench",
"bird",
"cat",
"dog",
"horse",
"sheep",
"cow",
"elephant",
"bear",
"zebra",
"giraffe",
"backpack",
"umbrella",
"handbag",
"tie",
"suitcase",
"frisbee",
"skis",
"snowboard",
"sports ball",
"kite",
"baseball bat",
"baseball glove",
"skateboard",
"surfboard",
"tennis racket",
"bottle",
"wine glass",
"cup",
"fork",
"knife",
"spoon",
"bowl",
"banana",
"apple",
"sandwich",
"orange",
"broccoli",
"carrot",
"hot dog",
"pizza",
"donut",
"cake",
"chair",
"couch",
"potted plant",
"bed",
"dining table",
"toilet",
"tv",
"laptop",
"mouse",
"remote",
"keyboard",
"cell phone",
"microwave",
"oven",
"toaster",
"sink",
"refrigerator",
"book",
"clock",
"vase",
"scissors",
"teddy bear",
"hair drier",
"toothbrush",
]


def main():
# Handle dynamic nodes, ask for the name of the node in the dataflow, and the same values as the ENV variables.
parser = argparse.ArgumentParser(
@@ -134,7 +50,7 @@ def main():
conf = np.array(results[0].boxes.conf.cpu())
labels = np.array(results[0].boxes.cls.cpu())

names = [LABELS[int(label)] for label in labels]
names = [model.names.get(label) for label in labels]

bbox = {
"bbox": bboxes.ravel(),
@@ -148,8 +64,6 @@ def main():
event["metadata"],
)

elif event_type == "STOP":
break
elif event_type == "ERROR":
raise Exception(event["error"])



Loading…
Cancel
Save