-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathautow.py
152 lines (132 loc) · 5.62 KB
/
autow.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
from .utils import vesc
import cv2
import cv2.aruco as aruco
import numpy as np
import depthai as dai
import signal
import numpy as np
import yaml
import math
import time
class Autow:
def __init__(self, target_aruco_id=0):
self.stopped = False
self.vesc = vesc.VESC("/dev/ttyACM0")
# Create pipeline
self.pipeline = dai.Pipeline()
self.cam = self.pipeline.create(dai.node.ColorCamera)
# self.cam.setColorOrder(dai.ColorCameraProperties.ColorOrder.RGB)
self.cam.setPreviewSize(640, 480)
self.cam.setInterleaved(False)
self.xout = self.pipeline.create(dai.node.XLinkOut)
self.xout.setStreamName("rgb")
self.cam.preview.link(self.xout.input)
self.target_id = target_aruco_id
self.hitch_ar = 0.065
self.hitch_cam = 0.146
self.steer_buff = []
self.init_time = time.perf_counter()
def run(self):
arucoDict = aruco.Dictionary_get(aruco.DICT_6X6_50)
arucoParams = aruco.DetectorParameters_create()
mtx, dist = yaml.safe_load(open('calibration.yaml'))['camera_matrix'], yaml.safe_load(
open('calibration.yaml'))['dist_coeff']
mtx, dist = np.array(mtx), np.array(dist)
# Connect to device and start pipeline
with dai.Device(self.pipeline) as device:
qRgb = device.getOutputQueue(name="rgb", maxSize=4, blocking=False)
print("UVC running")
while not self.stopped:
frame = qRgb.get()
if frame is None:
print(' No captured frame -- yikes!')
continue
frame = frame.getCvFrame()
# frame = cv2.flip(frame, -1)
# find center of ar tag
(corners, ids, rejected) = cv2.aruco.detectMarkers(frame, arucoDict,
parameters=arucoParams)
if len(corners) == 0:
self.vesc.set_throttle(0)
continue
ids = ids.flatten()
if self.target_id not in ids:
self.vesc.set_throttle(0)
continue
idx = np.where(ids == self.target_id)[0][0]
target_corners = corners[idx][0].reshape((4, 2))
rvecs, tvecs, _ = aruco.estimatePoseSingleMarkers(
corners, 0.05, mtx, dist)
theta = (rvecs[0, 0, 0]/math.pi*rvecs[0, 0, 2])
# ts = (rvecs[0, 0, 0],rvecs[0,0,1],rvecs[0, 0, 2])
# print(f"theta: {theta}, x: {-tvecs[0,0,0]}, z: {tvecs[0, 0, 2]}")
target_center = np.mean(target_corners, axis=0)
target_height = abs(target_corners[2, 1]-target_corners[0, 1])
# print(f"target @ {target_center} with height {target_height/frame.shape[0]}")
if target_height/frame.shape[0] >= 0.26:
print("target reached")
self.vesc.set_throttle(-0.05)
time.sleep(0.1)
print("hitching")
self.vesc.set_throttle(0)
time.sleep(1)
print("hitched")
break
# self.steer_buff.append(
# self.calc_angle(frame.shape[1], target_center))
self.steer_buff.append(self.calc_angle_hitch(
self.hitch_ar, self.hitch_cam, -tvecs[0,0,0], tvecs[0, 0, 2], theta))
if len(self.steer_buff) >= 5:
# print(f"time elapsed: {time.perf_counter()-self.init_time}")
self.init_time = time.perf_counter()
ave_steer = np.average(
self.steer_buff, weights=np.linspace(0, 1, len(self.steer_buff)))*1.15
print(f"ratio {target_height/frame.shape[0]}")
throttle = -(0.15 - abs(ave_steer-0.5)/15)*(1-target_height/frame.shape[0]*2)
print(f"steer: {ave_steer}, throttle: {throttle}")
self.vesc.run(ave_steer, throttle)
self.steer_buff = []
device.close()
self.vesc.close()
def get_area(self, x, y):
return 0.5*np.abs(np.dot(x, np.roll(y, 1))-np.dot(y, np.roll(x, 1)))
def calc_angle_hitch(self, h_ar, h_cam, x, z, theta):
"""
Calculate steering angle for given hitch offset and target position
Args:
h_ar (float): hitch offset of ar tag
h_cam (float): hitch offset of camera
x (float): target x position
z (float): target z position
theta (float): target angle
Returns:
float: steering angle
"""
z = z-h_cam-0.08
theta_a = np.arctan(x/z)
# print(f"theta_a: {theta_a}")
d = math.sqrt(x**2 + z**2)
theta_hd = np.arctan(z/x)-(math.pi/2-theta)
theta_o = np.arctan(h_ar*math.sin(theta_hd)/(d-h_ar*math.cos(theta_hd)))*x/abs(x)
theta_s = (theta_a-theta_o)*3
# print(theta_s)
return (theta_s+math.pi/2)/math.pi
# calc steering for given center point
def calc_angle(self, width, center_pt):
x, y = center_pt
print(x/width)
return 1-x/width
def e_stop(self):
self.vesc.run(0.5, 0)
self.vesc.close()
self.stopped = True
def stop(self, signal, frame):
print("gracefully stopping...")
self.stopped = True
def main():
lf = Autow(target_aruco_id=13)
signal.signal(signal.SIGTERM, lf.stop)
signal.signal(signal.SIGINT, lf.stop)
lf.run()
if __name__ == "__main__":
main()