-
Notifications
You must be signed in to change notification settings - Fork 3
/
06-blob-detection-traffic.py
72 lines (59 loc) · 2.38 KB
/
06-blob-detection-traffic.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
import sensor, image, time, os
from utime import sleep_ms
sensor.reset()
sensor.set_pixformat(sensor.GRAYSCALE)
sensor.set_framesize(sensor.QVGA)
clock = time.clock()
img_reader = image.ImageIO("/Traffic-Short.bin", "r")
original_image = sensor.alloc_extra_fb(320, 240, sensor.GRAYSCALE)
background_image = sensor.alloc_extra_fb(320, 240, sensor.GRAYSCALE)
thresholds = (6, 255) # Define the grayscale range we're looking for
def print_blob_info(blob):
vertical_position = blob.cy()
if 140 <= vertical_position < 160:
print("Direction: left")
if 160 <= vertical_position < 190:
print("Direction: right")
print("Density: ", blob.density())
print("Pixels: ", blob.pixels())
print("Area: ", blob.area())
print("Width: ", blob.w())
print("Elongation: ", blob.elongation())
print("-----------------")
# Use the first frame as background
for i in range(0, 10): # Or skip a few frames
background_image.replace(img_reader.read(copy_to_fb=True, loop=True))
while(True):
clock.tick() # Track elapsed milliseconds between snapshots().
target_image = img_reader.read(copy_to_fb=True, loop=True)
# Make a copy of the image
original_image.replace(target_image)
# Replace the image with the "abs(NEW-OLD)" frame difference.
target_image.difference(background_image)
# Find blobs
blobs = target_image.find_blobs([thresholds], pixels_threshold=250, area_threshold=600, roi=(0,90,320,110), merge=True)
# Comment out this line to see the diff image.
target_image.replace(original_image)
# Draw blobs
for blob in blobs:
target_image.draw_rectangle(blob.rect(), color=255)
target_image.draw_cross(blob.cx(), blob.cy(), color=255)
elongation = blob.elongation()
if blob.area() > 5000 and elongation > 0.85:
if 120 < blob.w() < 135:
print("Bus detected!")
elif elongation > 0.9:
print("Tram detected!")
else:
continue
print_blob_info(blob)
# Set a value to create the desired frame rate
sleep_ms(20)
#print(clock.fps())
# Further considerations:
# - Track movement of blobs
# - Use image.get_histogram to detect patterns
# - Merge blobs only if certain parameters are met
# - Use more nuanced thresholds
# - Calcualte more complex directions using major_axis_line()
# - Detect congestion / rush hour