Переглянути джерело

added optical tracking sensor

subDesTagesMitExtraKaese 3 роки тому
батько
коміт
59dea609d5

+ 13 - 0
raspberry-pi/install.sh

@@ -0,0 +1,13 @@
+#!/bin/bash
+
+sudo apt update                               || exit 1
+sudo apt upgrade -y                           || exit 1
+
+sudo apt install python3 python3-pip -y       || exit 1
+
+python3 -m pip install --upgrade pip          || exit 1
+python3 -m pip install pyserial               || exit 1
+python3 -m pip install opencv-contrib-python  || exit 1
+python3 -m pip install pillow                 || exit 1
+python3 -m pip install noise                  || exit 1
+python3 -m pip install tkinter                || exit 1

+ 78 - 0
raspberry-pi/markerDetection.py

@@ -0,0 +1,78 @@
+#!/bin/python3
+
+import numpy as np
+import cv2
+import cv2.aruco as aruco
+
+aruco_dict = aruco.Dictionary_get(aruco.DICT_4X4_50)
+
+def saveMarkers():
+  image = np.zeros((400,400), np.uint8)
+  image[:,:] = (255)
+
+  image[ 50: 50 + 100, 50: 50 + 100] = aruco_dict.drawMarker(0, 100)
+  image[-50 - 100:-50, 50: 50 + 100] = aruco_dict.drawMarker(1, 100)
+  image[ 50: 50 + 100,-50 - 100:-50] = aruco_dict.drawMarker(2, 100)
+  image[-50 - 100:-50,-50 - 100:-50] = aruco_dict.drawMarker(3, 100)
+
+  cv2.imwrite("markers.png", image)
+
+parameters =  aruco.DetectorParameters_create()
+
+def find_marker(image):
+    # Our operations on the frame come here
+  gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
+  corners, ids, rejectedImgPoints = aruco.detectMarkers(gray, aruco_dict, parameters=parameters)
+  
+  markers = [None] * 4
+
+  if not corners:
+    return markers
+
+  for corner, id in zip(corners, ids.flatten()):
+    # draw the bounding box of the ArUCo detection
+    c = corner[0]
+    cv2.line(image, tuple(c[0]), tuple(c[1]), (0, 255, 0), 2)
+    cv2.line(image, tuple(c[1]), tuple(c[2]), (0, 255, 0), 2)
+    cv2.line(image, tuple(c[2]), tuple(c[3]), (0, 255, 0), 2)
+    cv2.line(image, tuple(c[3]), tuple(c[0]), (0, 255, 0), 2)
+    cv2.putText(image, str(id), (int(c[0][0]), int(c[0][1]) - 15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2)
+
+    cX = sum(c[:,0]) / 4
+    cY = sum(c[:,1]) / 4
+    cv2.circle(image, (int(cX), int(cY)), 4, (0, 0, 255), -1)
+
+    if id < 4:
+      markers[id] = (cX, cY)
+
+  return markers
+
+def measureDistances(image):
+    markers = find_marker(image)
+
+    if markers[0] and markers[1]:
+      cv2.line(image, (int(markers[0][0]),int(markers[0][1])), (int(markers[1][0]),int(markers[1][1])), (255, 0, 0), 2)
+      
+      length = np.sqrt((markers[0][0]-markers[1][0])**2 + (markers[0][1]-markers[1][1])**2)
+      middle = abs(markers[0][0]-markers[1][0])
+
+      return middle, length
+
+    else:
+      return None
+
+if __name__ == "__main__":
+  cap = cv2.VideoCapture(0) 
+  while True:
+    success, image = cap.read()
+    markers = find_marker(image)
+
+    if markers[0] and markers[1]:
+      cv2.line(image, (int(markers[0][0]),int(markers[0][1])), (int(markers[1][0]),int(markers[1][1])), (255, 0, 0), 2)
+      length = np.sqrt((markers[0][0]-markers[1][0])**2 + (markers[0][1]-markers[1][1])**2)
+
+      cv2.putText(image, "%.2fpx" % (length), (int(markers[0][0]), int(markers[0][1])), cv2.FONT_HERSHEY_SIMPLEX,
+        1.0, (255, 255, 0), 3)
+    cv2.imshow("image", image)
+    if cv2.waitKey(1) & 0xFF == ord('q'):
+      break

BIN
raspberry-pi/markers.png


+ 31 - 2
raspberry-pi/sensors.py

@@ -1,7 +1,9 @@
 from connection import ArduinoSlave
+import markerDetection
 import time
 import statistics
 import math
+import threading, cv2
 
 conn = ArduinoSlave()
 
@@ -39,7 +41,7 @@ class AcusticSensor:
     print("sonic speed right:",self.sonic_speed_right)
 
   def read(self):
-    return (952, 660)
+    return conn.getAcusticRTTs()
 
   def calculate_position(self):
         if not self.sonic_speed_right or not self.sonic_speed_left:
@@ -70,12 +72,39 @@ class MagneticSensor:
     pass
 
   def read(self):
-    return (0, 0)
+    return conn.getMagneticField()
+
+class OpticalSensor():
+  def __init__(self):
+    self.cap = cv2.VideoCapture(0)
+    self._t  = None
+    self.values = None
+
+  def start(self):
+    if not self._t:
+      self._t = threading.Thread(target=self._getFrames, args=())
+      self._t.daemon = True  # thread dies when main thread (only non-daemon thread) exits.
+      self._t.start()
+
+  def _getFrames(self):
+    while True:
+      success, image = self.cap.read()
+      if success:
+        self.values = markerDetection.measureDistances(image)
+        print("opt:", self.values)
+
+  def calibrate(self, x, y):
+    pass
+
+  def read(self):
+    return self.values
 
 if __name__ == "__main__":
   acc = AcusticSensor()
   acc.start()
   mag = MagneticSensor()
   mag.start()
+  opt = OpticalSensor()
+  opt.start()
   while True:
     time.sleep(1)