blob: 2c253285f8dad700f90400f77576035f893b0829 [file] [log] [blame]
from sense_hat import SenseHat
from time import sleep
import threading
# Our new APIs:
import vision
from bestiary import Chirp
# initialize SenseHat instance and clear the LED matrix
sense = SenseHat()
sense.set_pixels(Chirp.STANDING)
# animate the raspimon
def dance():
sense.set_pixels(Chirp.WINGS_UP)
sleep(0.3)
sense.set_pixels(Chirp.STANDING)
sleep(0.3)
# redraw the raspimon in response to detected faces
def react_to_faces(faces):
print(len(faces), 'faces visible')
if len(faces) > 0 and threading.active_count() == 1:
thread = threading.Thread(target=dance)
thread.start()
# load the neural network model (obfuscates use of TF and Edge TPU)
detector = vision.Detector(vision.FACE_DETECTION_MODEL)
# run a loop to run the model in real-time
for frame in vision.get_frames():
faces = detector.get_objects(frame)
# Draw bounding boxes on the frame and display it
vision.draw_objects(frame, faces)
# Pass faces to function that controls raspimon
react_to_faces(faces)