Commit 1ea43e3f authored by Andreas Schmidt's avatar Andreas Schmidt

Merge from arosa.

parent b2ee2ec7
.vagrant/
rna.img.xz
__pycache__/
*.pyc
......@@ -7,3 +7,9 @@
[submodule "salt/prrt/files/PRRT"]
path = salt/prrt/files/PRRT
url = git@git.nt.uni-saarland.de:LARN/PRRT.git
[submodule "salt/car/files/Adafruit-Motor-HAT-Python-Library"]
path = salt/car/files/Adafruit-Motor-HAT-Python-Library
url = https://github.com/adafruit/Adafruit-Motor-HAT-Python-Library.git
[submodule "salt/video/files/gst-prrt"]
path = salt/video/files/gst-prrt
url = git@git.nt.uni-saarland.de:LARN/gst-prrt.git
......@@ -33,6 +33,9 @@ Vagrant.configure("2") do |config|
# within the machine from a port on the host machine. In the example below,
# accessing "localhost:8080" will access port 80 on the guest machine.
config.vm.network "forwarded_port", guest: 4505, host: 4505
config.vm.network "forwarded_port", guest: 4506, host: 4506
# Create a private network, which allows host-only access to the machine
# using a specific IP.
# config.vm.network "private_network", ip: "192.168.33.10"
......
......@@ -13,3 +13,5 @@ base:
- sender
'rna-06':
- sender
'rna-07':
- sender
include:
- prrt
- prrt.dependencies
- apps
Subproject commit 427692639d9e0d0f0b33bcc45545de9c72083f23
Subproject commit bb3aaed240bcac33643fa36c750f8c9bf215d508
......@@ -17,3 +17,10 @@ demo_script:
- user: rna
- group: rna
- file_mode: keep
prrt_build_ext:
cmd.run:
- name: python setup.py install && python3 setup.py install
- cwd: /opt/rna/apps/PRRT
- require:
- pip: pip_modules
Subproject commit f9e7c2592c5c720207b4e5040eb28e1628b48f49
from Adafruit_MotorHAT import Adafruit_MotorHAT, Adafruit_DCMotor
import time
import atexit
# create a default object, no changes to I2C address or frequency
mh = Adafruit_MotorHAT(addr=0x60)
# recommended for auto-disabling motors on shutdown!
def turnOffMotors():
mh.getMotor(1).run(Adafruit_MotorHAT.RELEASE)
mh.getMotor(2).run(Adafruit_MotorHAT.RELEASE)
mh.getMotor(3).run(Adafruit_MotorHAT.RELEASE)
mh.getMotor(4).run(Adafruit_MotorHAT.RELEASE)
atexit.register(turnOffMotors)
def _speedToDirection(speed):
if speed > 0:
return Adafruit_MotorHAT.FORWARD
elif speed < 0:
return Adafruit_MotorHAT.BACKWARD
else:
return Adafruit_MotorHAT.RELEASE
def setSpeed(left, right):
right = int(right * 1.1)
mh.getMotor(1).run(_speedToDirection(left))
mh.getMotor(2).run(_speedToDirection(left))
mh.getMotor(3).run(_speedToDirection(right))
mh.getMotor(4).run(_speedToDirection(right))
mh.getMotor(1).setSpeed(abs(left))
mh.getMotor(2).setSpeed(abs(left))
mh.getMotor(3).setSpeed(abs(right))
mh.getMotor(4).setSpeed(abs(right))
if __name__ == "__main__":
X = 255
Y = 4
while True:
print("right")
setSpeed(X/Y, X)
time.sleep(1)
print("stop")
setSpeed(0, 0)
time.sleep(1)
print("left")
setSpeed(-X/Y, 0-X)
time.sleep(1)
# #print("right")
# #setSpeed(50,100)
# #time.sleep(0.5)
# #setSpeed(100,200)
# #time.sleep(0.5)
# #setSpeed(50,100)
# #time.sleep(1)
# #print("left")
# #setSpeed(100,50)
# #time.sleep(1)
# #setSpeed(200,100)
# #time.sleep(1)
# #setSpeed(100,50)
# #time.sleep(1)
# setSpeed(100,120)
# time.sleep(0.1)
# setSpeed(100,140)
# time.sleep(0.1)
# setSpeed(100,160)
# time.sleep(0.1)
# setSpeed(100,180)
# time.sleep(0.1)
# setSpeed(100,200)
# time.sleep(0.1)
# setSpeed(100,180)
# time.sleep(0.1)
# setSpeed(100,160)
# time.sleep(0.1)
# setSpeed(100,140)
# time.sleep(0.1)
# setSpeed(100,120)
# time.sleep(0.1)
import prrt
import pickle
from MotorControl import *
socket = prrt.PrrtSocket(7000, isSender=False)
while True:
u = socket.receive_asap_wait()
u_v = pickle.loads(u)
left = u_v[0]
right = u_v[1]
setSpeed(int(left), int(right))
import gi
gi.require_version("Gst", "1.0")
from gi.repository import Gst, GObject
import cv2
import numpy as np
import logging
import prrt
import pickle
from controller.lineDetect import *
class Controller:
def __init__(self, port=5000, actor=("10.8.0.106", 8000)):
self.mainloop = GObject.MainLoop()
self.pipeline = None
self.main_loop = None
self.port = port
self.actor_socket = prrt.PrrtSocket(actor[1], True, target_delay=0.05)
self.actor_socket.connect(actor[0], actor[1])
Gst.init(None)
@staticmethod
def control(mean_angle):
v_d = 100
t_a = 1
p = .1 # gain, stability ensured with t_a as above
L = 12 # distance between left and right wheel (in cm)
# use a simple P controller approach for given sampling rate
u = np.empty(2)
u[0] = int(v_d - p * t_a * mean_angle * L / 2)
u[0] = min(255, max(0, u[0]))
u[1] = int(2 * v_d - u[0])
u[1] = min(255, max(0, u[1]))
return u
@staticmethod
def angle_from_image(img):
detected = detect_lines(img, 8)
# detected contains horizontal offsets (in pixels) from the x position of the red anchor,
# starting from the bottom
num_rows, num_cols, _ = img.shape
anchor_x = int(num_cols / 2)
anchor_y = int(num_rows / 16)
# compute the angle from the (pixel) differences
# use three anchor points
displacement = np.array([15, 13, 11, 9, 7, 5, 3, 1])
angles = np.arctan(detected / (num_rows - anchor_y * displacement))
sines = np.nansum(np.sin(angles))
cosines = np.nansum(np.cos(angles))
print(angles)
return np.rad2deg(np.arctan2(sines, cosines))
def create_buffer_handler(self):
def handler(appsink):
buf = appsink.emit("pull-sample").get_buffer()
memory = buf.get_all_memory()
success, info = memory.map(Gst.MapFlags.READ)
try:
if success:
array = np.frombuffer(info.data, dtype='uint8')
img = cv2.imdecode(array, 1)
mean_angle = self.angle_from_image(img)
print(mean_angle)
if np.isnan(mean_angle):
pass # Do not control.
else:
u = self.control(mean_angle)
print("Computed inputs left/right ({})".format(",".join([str(v) for v in u])))
self.actor_socket.send(pickle.dumps(u))
except Exception:
logging.exception("")
finally:
memory.unmap(info)
return Gst.FlowReturn.OK
return handler
def setup_pipeline(self):
GObject.threads_init()
self.pipeline = Gst.Pipeline.new("sensor")
prrtsrc = Gst.ElementFactory.make("prrtsrc", "prrtsrc")
prrtsrc.set_property("port", self.port)
"""
gst-launch-1.0 prrtsrc port=$1 \
! jpegdec \
! videoconvert \
! ximagesink sync=false
"""
tee = Gst.ElementFactory.make("tee", "tee")
queue_app = Gst.ElementFactory.make("queue", "queue_app")
queue_display = Gst.ElementFactory.make("queue", "queue_display")
jpegdec = Gst.ElementFactory.make("jpegdec", "jpegdec")
videoconvert = Gst.ElementFactory.make("videoconvert", "videoconvert")
ximagesink = Gst.ElementFactory.make("ximagesink", "ximagesink")
ximagesink.set_property("sync", False)
appsink = Gst.ElementFactory.make("appsink", "appsink")
appsink.set_property("drop", True)
appsink.set_property("emit-signals", True)
appsink.set_property("sync", True)
appsink.set_property("max-buffers", 1)
# Adding
self.pipeline.add(prrtsrc)
self.pipeline.add(tee)
self.pipeline.add(queue_app)
self.pipeline.add(queue_display)
self.pipeline.add(jpegdec)
self.pipeline.add(videoconvert)
self.pipeline.add(ximagesink)
self.pipeline.add(appsink)
# Connecting
appsink.connect("new-sample", self.create_buffer_handler())
# Linking
prrtsrc.link(tee)
tee.link(queue_app)
queue_app.link(appsink)
tee.link(queue_display)
queue_display.link(jpegdec)
jpegdec.link(videoconvert)
videoconvert.link(ximagesink)
def run(self):
self.setup_pipeline()
self.pipeline.set_state(Gst.State.PAUSED)
self.pipeline.set_state(Gst.State.PLAYING)
try:
self.mainloop.run()
except KeyboardInterrupt:
pass
if __name__ == "__main__":
Controller().run()
import numpy as np
import cv2
class Image:
def __init__(self):
self.image = None
self.contour_center_x = 0
self.main_contour = None
self.contours = None
self.middle_x = np.nan
self.middle_y = np.nan
self.prev_mc = None
self.prev_cx = None
self.height = 0
self.width = 0
self.dir = 0
def process(self):
# Convert to Gray Scale
imgray = cv2.cvtColor(self.image, cv2.COLOR_BGR2GRAY)
# Get Threshold
ret, thresh = cv2.threshold(imgray, 100, 255, cv2.THRESH_BINARY_INV)
# Get contour
_, self.contours, _ = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
self.prev_mc = self.main_contour
if self.contours:
self.main_contour = max(self.contours, key=cv2.contourArea)
self.height, self.width = self.image.shape[:2]
self.middle_x = int(self.width / 2) # Get X coordenate of the middle point
self.middle_y = int(self.height / 2) # Get Y coordenate of the middle point
self.prev_cx = self.contour_center_x
if self.get_contour_center(self.main_contour) != 0:
self.contour_center_x = self.get_contour_center(self.main_contour)[0]
if abs(self.prev_cx - self.contour_center_x) > 5:
self.correct_main_contour(self.prev_cx)
else:
self.contour_center_x = 0
self.dir = int((self.middle_x - self.contour_center_x) * self.get_contour_extent(self.main_contour))
cv2.drawContours(self.image, self.main_contour, -1, (0, 255, 0), 3) # Draw Contour GREEN
cv2.circle(self.image, (self.contour_center_x, self.middle_y), 7, (255, 255, 255),
-1) # Draw dX circle WHITE
cv2.circle(self.image, (self.middle_x, self.middle_y), 3, (0, 0, 255), -1) # Draw middle circle RED
font = cv2.FONT_HERSHEY_SIMPLEX
cv2.putText(self.image, str(self.middle_x - self.contour_center_x),
(self.contour_center_x + 20, self.middle_y), font, 1, (200, 0, 200), 2, cv2.LINE_AA)
cv2.putText(self.image, "Weight:%.3f" % self.get_contour_extent(self.main_contour),
(self.contour_center_x + 20, self.middle_y + 35), font, 0.5, (200, 0, 200), 1, cv2.LINE_AA)
@staticmethod
def get_contour_center(contour):
M = cv2.moments(contour)
if M["m00"] == 0:
return 0
x = int(M["m10"] / M["m00"])
y = int(M["m01"] / M["m00"])
return [x, y]
@staticmethod
def get_contour_extent(contour):
area = cv2.contourArea(contour)
x, y, w, h = cv2.boundingRect(contour)
rect_area = w * h
if rect_area > 0:
return float(area) / rect_area
@staticmethod
def approx(a, b, error):
if abs(a - b) < error:
return True
else:
return False
def correct_main_contour(self, prev_cx):
if abs(prev_cx - self.contour_center_x) > 5:
for i in range(len(self.contours)):
if self.get_contour_center(self.contours[i]) != 0:
tmp_cx = self.get_contour_center(self.contours[i])[0]
if self.approx(tmp_cx, prev_cx, 5):
self.main_contour = self.contours[i]
if self.get_contour_center(self.main_contour) != 0:
self.contour_center_x = self.get_contour_center(self.main_contour)[0]
import numpy as np
from .utils import *
def detect_lines(image, point_count):
internal_images_buffer = []
for _ in range(point_count):
internal_images_buffer.append(Image())
img = remove_background(image, True)
if img is None:
return None
slice_part(img, internal_images_buffer, point_count)
output = np.array([0.0] * point_count)
for i in range(point_count):
if internal_images_buffer[i].main_contour is None:
output[point_count - 1 - i] = np.NaN
else:
output[point_count - 1 - i] = (internal_images_buffer[i].middle_x -
internal_images_buffer[i].contour_center_x)
return output
import numpy as np
import cv2
from .image import *
def slice_part(im, images, slices):
height, width = im.shape[:2]
sl = int(height / slices)
for i in range(slices):
part = sl * i
crop_img = im[part:part + sl, 0:width]
images[i].image = crop_img
images[i].process()
def repack_images(images):
img = images[0].image
for i in range(len(images)):
if i == 0:
img = np.concatenate((img, images[1].image), axis=0)
if i > 1:
img = np.concatenate((img, images[i].image), axis=0)
return img
def center(moments):
if moments["m00"] == 0:
return 0
x = int(moments["m10"] / moments["m00"])
y = int(moments["m01"] / moments["m00"])
return x, y
def remove_background(image, b):
up = 50
# create NumPy arrays from the boundaries
lower = np.array([0, 0, 0], dtype="uint8")
upper = np.array([up, up, up], dtype="uint8")
# ----------------COLOR SELECTION-------------- (Remove any area that is whiter than 'upper')
if b:
mask = cv2.inRange(image, lower, upper)
image = cv2.bitwise_and(image, image, mask=mask)
image = cv2.bitwise_not(image, image, mask=mask)
image = (255 - image)
return image
else:
return image
# ////////////////COLOR SELECTION/////////////
from Adafruit_MotorHAT import Adafruit_MotorHAT, Adafruit_DCMotor
import time
import atexit
# create a default object, no changes to I2C address or frequency
mh = Adafruit_MotorHAT(addr=0x60)
# recommended for auto-disabling motors on shutdown!
def turnOffMotors():
mh.getMotor(1).run(Adafruit_MotorHAT.RELEASE)
mh.getMotor(2).run(Adafruit_MotorHAT.RELEASE)
mh.getMotor(3).run(Adafruit_MotorHAT.RELEASE)
mh.getMotor(4).run(Adafruit_MotorHAT.RELEASE)
atexit.register(turnOffMotors)
def _speedToDirection(speed):
if speed > 0:
return Adafruit_MotorHAT.FORWARD
elif speed < 0:
return Adafruit_MotorHAT.BACKWARD
else:
return Adafruit_MotorHAT.RELEASE
def setSpeed(left, right):
right = int(right * 1.1)
mh.getMotor(1).run(_speedToDirection(left))
mh.getMotor(2).run(_speedToDirection(left))
mh.getMotor(3).run(_speedToDirection(right))
mh.getMotor(4).run(_speedToDirection(right))
mh.getMotor(1).setSpeed(abs(left))
mh.getMotor(2).setSpeed(abs(left))
mh.getMotor(3).setSpeed(abs(right))
mh.getMotor(4).setSpeed(abs(right))
if __name__ == "__main__":
X = 255
Y = 4
while True:
print("right")
setSpeed(X // Y, X)
time.sleep(1)
print("stop")
setSpeed(0, 0)
time.sleep(1)
print("left")
setSpeed(-X // Y, 0 - X)
time.sleep(1)
# #print("right")
# #setSpeed(50,100)
# #time.sleep(0.5)
# #setSpeed(100,200)
# #time.sleep(0.5)
# #setSpeed(50,100)
# #time.sleep(1)
# #print("left")
# #setSpeed(100,50)
# #time.sleep(1)
# #setSpeed(200,100)
# #time.sleep(1)
# #setSpeed(100,50)
# #time.sleep(1)
# setSpeed(100,120)
# time.sleep(0.1)
# setSpeed(100,140)
# time.sleep(0.1)
# setSpeed(100,160)
# time.sleep(0.1)
# setSpeed(100,180)
# time.sleep(0.1)
# setSpeed(100,200)
# time.sleep(0.1)
# setSpeed(100,180)
# time.sleep(0.1)
# setSpeed(100,160)
# time.sleep(0.1)
# setSpeed(100,140)
# time.sleep(0.1)
# setSpeed(100,120)
# time.sleep(0.1)
import subprocess
import prrt
import signal
import pickle
import motorControl
class Plant:
def __init__(self, actor_port=8000, controller=("10.8.0.16", 5000), target_delay=50000):
self.controller = controller
self.target_delay = target_delay
self.actor_port = actor_port
def run(self):
pipeline = 'raspivid -t 0 -cd MJPEG -w 800 -h 450 -fps 24 -b 8000000 -o - | gst-launch-1.0 fdsrc \
! "image/jpeg,framerate=24/1" \
! jpegparse \
! queue \
! prrtsink host={} port={} targetdelay={}'.format(self.controller[0], self.controller[1], self.target_delay)
p = subprocess.Popen(pipeline, shell=True)
try:
s = prrt.PrrtSocket(self.actor_port, False)
while True:
data = s.receive_ordered_wait(0.01)
if len(data) == 0:
continue
else:
u = pickle.loads(data)
print(u)
motorControl.setSpeed(int(u[0]), int(u[1]))
except KeyboardInterrupt:
pass
finally:
p.send_signal(signal.SIGINT)
p.wait()
if __name__ == "__main__":
Plant().run()
import socket
import sys
import os
import numpy as np
import pdb
import cv2
import time
from datetime import datetime
from Image import *
from Utils import *
from LineDetect import *
font = cv2.FONT_HERSHEY_SIMPLEX
direction = 0
Images=[]
N_SLICES = 8
for q in range(N_SLICES):
Images.append(Image())
# Create a TCP/IP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# Bind the socket to the port
server_address = ('0.0.0.0', 8001)
sock.bind(server_address)
# Listen for incoming connections
sock.listen(1)
while True:
connection, client_address = sock.accept()
try:
data = b''
first = True
start = None
while True:
packet = connection.recv(15000)
if first:
start = datetime.now()
print("Start.")
first = False
if not packet:
break
data += packet
diff = (datetime.now() - start).total_seconds()
print("Received {} bytes in {}s".format(len(data), diff))
if data:
array = np.frombuffer(data, dtype='uint8')
img = cv2.imdecode(array, 1)
detected = detectLines(img, 8)
print("[ " + " ".join([str(x) for x in detected]) + " ]")
cv2.imshow("Camera capture", img)
#img = cv2.imread("cam7.jpg")
#img = cv2.resize(img, (600, 600))
#img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
direction = 0
img = remove_background(img, True)
if img is not None:
t1 = time.clock()
slice_part(img, Images, N_SLICES)
#for i in range(N_SLICES):
# direction += Images[i].dir
fm = repack_images(Images)
t2 = time.clock()
cv2.putText(fm,"Time: " + str((t2-t1)*1000) + " ms",(10, 470), font, 0.5,(0,0,255),1,cv2.LINE_AA)
cv2.imshow("Vision Race", fm)
#connection.sendall( bytes(str(direction).encode('utf8')) )
if cv2.waitKey(100000) & 0xFF == ord('q'):
break
finally:
# Clean up the connection
cv2.destroyAllWindows()
#connection.close()
import prrt
import numpy as np
import cv2
import datetime