diff --git a/Robot_Development/catkin_ws/src/src/lane_demo/src/lane_demo.cpp b/Robot_Development/catkin_ws/src/src/lane_demo/src/lane_demo.cpp index 5d8a1f19d4cf606ea75e336042279695a37715f7..b64cf34d6acdc1a1e8e9d52ea64efa1d51bffa57 100644 --- a/Robot_Development/catkin_ws/src/src/lane_demo/src/lane_demo.cpp +++ b/Robot_Development/catkin_ws/src/src/lane_demo/src/lane_demo.cpp @@ -66,7 +66,7 @@ void SenseAndAvoid::chatterCallback(const std_msgs::String::ConstPtr& msg) ROS_INFO("TURN_RIGHT"); state=TURN_RIGHT; vel_msg.linear.x = 0; - vel_msg.angular.z = -1.8; + vel_msg.angular.z = 1.25; vel_pub.publish(vel_msg); } else if(msg->data.compare("Drift Right") == 0) @@ -74,7 +74,7 @@ void SenseAndAvoid::chatterCallback(const std_msgs::String::ConstPtr& msg) ROS_INFO("TURN_LEFT"); state=TURN_LEFT; vel_msg.linear.x = 0; - vel_msg.angular.z = 1.8; + vel_msg.angular.z = -1.25; vel_pub.publish(vel_msg); } else diff --git a/Robot_Development/catkin_ws/src/src/lane_detection/launch/lane_detection.launch b/Robot_Development/catkin_ws/src/src/lane_detection/launch/lane_detection.launch index dd714004f994c772053a5c38662a25eeac2e8659..804a4d2cbfe86e8ca0ced8e78ab9089fc3c7b2fc 100644 --- a/Robot_Development/catkin_ws/src/src/lane_detection/launch/lane_detection.launch +++ b/Robot_Development/catkin_ws/src/src/lane_detection/launch/lane_detection.launch @@ -1,4 +1,4 @@ <?xml version="1.0"?> <launch> - <node name="lane_detection" pkg="lane_detection" type="detect.py" output="screen"/> + <node name="lane_detection" pkg="lane_detection" type="detect_test.py" output="screen"/> </launch> diff --git a/Robot_Development/catkin_ws/src/src/lane_detection/src/.detect.py.swp b/Robot_Development/catkin_ws/src/src/lane_detection/src/.detect.py.swp new file mode 100644 index 0000000000000000000000000000000000000000..68ec83c31e134b90da136ba9b0d0512a6cdaa876 Binary files /dev/null and b/Robot_Development/catkin_ws/src/src/lane_detection/src/.detect.py.swp differ diff --git a/Robot_Development/catkin_ws/src/src/lane_detection/src/.detect_test.py.swp b/Robot_Development/catkin_ws/src/src/lane_detection/src/.detect_test.py.swp new file mode 100644 index 0000000000000000000000000000000000000000..228ef011d303d64fe02942ad0c1157aa02927acb Binary files /dev/null and b/Robot_Development/catkin_ws/src/src/lane_detection/src/.detect_test.py.swp differ diff --git a/Robot_Development/catkin_ws/src/src/lane_detection/src/detect.py b/Robot_Development/catkin_ws/src/src/lane_detection/src/detect.py index dbfdb67a79e818b8dde379882b33d83a43d4d90c..ff8af798e709465c627d890e530b635caf5c5779 100755 --- a/Robot_Development/catkin_ws/src/src/lane_detection/src/detect.py +++ b/Robot_Development/catkin_ws/src/src/lane_detection/src/detect.py @@ -42,7 +42,7 @@ def detect_edge(pic): # pic: original image to apply the pre-set region of interest too def ROI(pic): height = pic.shape[0] - triangle = np.array([[(100, height), (600, height), (350, 100)]]) + triangle = np.array([[(0, height), (650, height), (325, 100)]]) mask = np.zeros_like(pic) cv2.fillPoly(mask, triangle, 255) roi = cv2.bitwise_and(pic, mask) @@ -54,7 +54,7 @@ def ROI(pic): # pic: original image to apply the pre-set region of interest too def ROI_real(pic): height = pic.shape[0] - triangle = np.array([[(0, height), (620, height), (430, 250), (150, 250)]]) + triangle = np.array([[(0, height), (650, height), (0, 0), (650, 0)]]) mask = np.zeros_like(pic) cv2.fillPoly(mask, triangle, 255) roi = cv2.bitwise_and(pic, mask) @@ -81,7 +81,6 @@ def applyLines(original_pic, lines): for line in lines: # parse the array to individual variables x1, y1, x2, y2 = line.reshape(4) - # print(line) # Draw the lines on the original picture cv2.line(original_pic, (x1, y1), (x2, y2), (255, 0, 0), 3) @@ -98,7 +97,7 @@ def find_poly_lane(pic, lines): # Collections for the negative and positive sloped lines left_lines_points = [[], []] # Negative slope right_lines_points = [[], []] # Positive slope - # print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Start~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") + for line in lines: x1, y1, x2, y2 = line[0] parameters = np.polyfit((x1, x2), (y1, y2), 1) @@ -125,14 +124,9 @@ def find_poly_lane(pic, lines): right_lines_points[1].append(y1) right_lines_points[1].append(y2) - # print("POINTS") - # print(right_lines_points[0]) - # print(left_lines_points[0]) - if right_lines_points[0]: z = np.polyfit(right_lines_points[0], right_lines_points[1], 1) f = np.poly1d(z) - # print(f) right_x_new = np.linspace(np.amin(right_lines_points[0]), np.amax(right_lines_points[0]), 150) right_y_new = f(right_x_new) @@ -144,7 +138,6 @@ def find_poly_lane(pic, lines): if left_lines_points[0]: z = np.polyfit(left_lines_points[0], left_lines_points[1], 1) f = np.poly1d(z) - # print(f) left_x_new = np.linspace(np.amin(left_lines_points[0]), np.amax(left_lines_points[0]), 150) left_y_new = f(left_x_new) @@ -153,10 +146,6 @@ def find_poly_lane(pic, lines): left_x_new = [] left_y_new = [] - # print("New left") - # print(left_x_new) - # print(left_y_new) - return [right_x_new, right_y_new], [left_x_new, left_y_new] @@ -175,8 +164,6 @@ def find_average_lane(pic, lines): for line in lines: x1, y1, x2, y2 = line[0] parameters = np.polyfit((x1, x2), (y1, y2), 1) - print("Slope, intercept") - print(parameters) slope = parameters[0] intercept = parameters[1] @@ -203,8 +190,6 @@ def find_average_lane(pic, lines): else: left_average = np.average(left_lines, axis=0) left_line = make_coordinates(pic, left_average) - print("Left Line: ") - print(left_line) if not right_lines: print("Right is emtpy") @@ -213,19 +198,11 @@ def find_average_lane(pic, lines): else: right_average = np.average(right_lines, axis=0) right_line = make_coordinates(pic, right_average) - print("Right line : ") - print(right_line) - - print("Left fit") - print(left_line) - print("\nRight fit") - print(right_line) return np.array([left_line, right_line]) def make_coordinates(image, line_parameters): - print(line_parameters) slope, intercept = line_parameters y1 = image.shape[0] y2 = int(y1 * (1 / 2)) @@ -275,6 +252,7 @@ def detectDeparture(left, car, right): video = cv2.VideoCapture(1) plt.ion() + def lane_status(direction): rospy.loginfo(direction) @@ -284,7 +262,6 @@ def lane_status(direction): while not rospy.is_shutdown(): ret, frame = video.read() - print(type(frame)) frame_edge = detect_edge(frame) @@ -292,12 +269,12 @@ while not rospy.is_shutdown(): wEdges = detect_edge(new_img) - cropped = ROI_real(wEdges) + cropped = ROI(wEdges) lines = getLines(cropped) if lines is None: - x = 0 + x = 0 else: Rpoints, Lpoints = find_poly_lane(new_img, lines) @@ -319,13 +296,13 @@ while not rospy.is_shutdown(): lane = applyLines(frame, lines) # Display edge detection - #cv2.imshow("edges", wEdges) + cv2.imshow("edges", wEdges) # Display cropped edge detection - #cv2.imshow("cropped", cropped) + cv2.imshow("cropped", cropped) # Show original image with all lines detected - #cv2.imshow("frame", lane) + cv2.imshow("frame", lane) key = cv2.waitKey(25) if key == 27: diff --git a/Robot_Development/catkin_ws/src/src/lane_detection/src/detect_test.py b/Robot_Development/catkin_ws/src/src/lane_detection/src/detect_test.py new file mode 100755 index 0000000000000000000000000000000000000000..18052d3f9d9b5f0228665fcae0eb480571fddb7b --- /dev/null +++ b/Robot_Development/catkin_ws/src/src/lane_detection/src/detect_test.py @@ -0,0 +1,322 @@ +#!/usr/bin/python2 + +import numpy as np +import rospy +import matplotlib.pyplot as plt +import cv2 + +from imutils.video import WebcamVideoStream +from imutils.video import FPS +from std_msgs.msg import String +from std_msgs.msg import Float64 + +pub = rospy.Publisher('lane_status', String, queue_size=10) +rospy.init_node('lane_status') +rate = rospy.Rate(100) # 100hz +width = 325 + +# Converts picture to grayscale and applies filter to picture +# param`s +# pic : a numpy array of pixel values to represent a picture +def formatImg(pic): + # Convert picture to gray scale + gray = cv2.cvtColor(pic, cv2.COLOR_BGR2GRAY) + + # Apply filter to image + img_filter = cv2.GaussianBlur(gray, (5, 5), 0) + + return img_filter + + +# pre-processes and performs edge detection on an image +# params +# pic: a numpy array of pixel values for an image in gray scale +def detect_edge(pic): + # Perform canny edge detection + img_edge = cv2.Canny(pic, 50, 150) + + # return new edge image + return img_edge + + +# Define the region of in which the lanes will be in the cameras view +# params +# pic: original image to apply the pre-set region of interest too +def ROI(pic): + height = pic.shape[0] + triangle = np.array([[(0, height), (650, height), (325, 100)]]) + mask = np.zeros_like(pic) + cv2.fillPoly(mask, triangle, 255) + roi = cv2.bitwise_and(pic, mask) + return roi + + +# Define the region of in which the lanes will be in the cameras view +# params +# pic: original image to apply the pre-set region of interest too +def ROI_real(pic): + height = pic.shape[0] + triangle = np.array([[(0, height), (600, height), (475, 300), (175, 300)]]) + mask = np.zeros_like(pic) + cv2.fillPoly(mask, triangle, 255) + roi = cv2.bitwise_and(pic, mask) + return roi + + +# Get all possible HoughLines and return them +# params +# edge_pic: the image with the edge detection performed +def getLines(edge_pic): + return cv2.HoughLinesP(edge_pic, 1, np.pi / 180, 100, maxLineGap=80, minLineLength=20) + + +# Apply the passed in lines the the original picture +# params +# original_pic: +# lines: Array of lines in the form (x1, x2, y1, y2) +def applyLines(original_pic, lines): + # If there is no lines return the original photo + if lines is None: + return original_pic + + + # Loop through all possible lines + for line in lines: + + # parse the array to individual variables + x1, y1, x2, y2 = line.reshape(4) + + # Draw the lines on the original picture + cv2.line(original_pic, (x1, y1), (x2, y2), (255, 0, 0), 3) + + # return the original picture with the lines drawn on + return original_pic + + +# Find the two average lines given the set of lines +# params +# pic: the original image +# lines: An array of lines in the form (x1, x2, y1, y2) +def find_poly_lane(pic, lines): + # Collections for the negative and positive sloped lines + left_lines_points = [[], []] # Negative slope + right_lines_points = [[], []] # Positive slope + + for line in lines: + x1, y1, x2, y2 = line[0] + parameters = np.polyfit((x1, x2), (y1, y2), 1) + # print("Slope, intercept") + # print(parameters) + slope = parameters[0] + intercept = parameters[1] + + if (slope < 1 and slope > -1): + #print("Line ignored"); + x = 1 + + elif (slope < 0): + #print("left insert") + left_lines_points[0].append(x1) + left_lines_points[0].append(x2) + left_lines_points[1].append(y1) + left_lines_points[1].append(y2) + + else: + #print("right insert") + right_lines_points[0].append(x1) + right_lines_points[0].append(x2) + right_lines_points[1].append(y1) + right_lines_points[1].append(y2) + + if right_lines_points[0]: + z = np.polyfit(right_lines_points[0], right_lines_points[1], 1) + f = np.poly1d(z) + + right_x_new = np.linspace(np.amin(right_lines_points[0]), np.amax(right_lines_points[0]), 150) + right_y_new = f(right_x_new) + + else: + right_x_new = [] + right_y_new = [] + + if left_lines_points[0]: + z = np.polyfit(left_lines_points[0], left_lines_points[1], 1) + f = np.poly1d(z) + + left_x_new = np.linspace(np.amin(left_lines_points[0]), np.amax(left_lines_points[0]), 150) + left_y_new = f(left_x_new) + + else: + left_x_new = [] + left_y_new = [] + + return [right_x_new, right_y_new], [left_x_new, left_y_new] + + +# Find the two average lines given the set of lines +# params +# pic: the original image +# lines: An array of lines in the form (x1, x2, y1, y2) +# Deprecated??? +def find_average_lane(pic, lines): + # Collections for the negative and positive sloped lines + left_lines = [] # Negative slope + left_lines_points = [[], []] # Negative slope + right_lines = [] # Positive slope + right_lines_points = [[], []] # Positive slope + + for line in lines: + x1, y1, x2, y2 = line[0] + parameters = np.polyfit((x1, x2), (y1, y2), 1) + slope = parameters[0] + intercept = parameters[1] + + if (slope < 0): + print("Left insert") + left_lines.append((slope, intercept)) + left_lines_points[0].append(x1) + left_lines_points[0].append(x2) + left_lines_points[1].append(y1) + left_lines_points[1].append(y2) + + else: + print("Right insert") + right_lines.append((slope, intercept)) + right_lines_points[0].append(x1) + right_lines_points[0].append(x2) + right_lines_points[1].append(y1) + right_lines_points[1].append(y2) + + if not left_lines: + print("Left is empty") + left_line = [0, 0, 0, 0] + + else: + left_average = np.average(left_lines, axis=0) + left_line = make_coordinates(pic, left_average) + + if not right_lines: + print("Right is emtpy") + right_line = [0, 0, 0, 0] + + else: + right_average = np.average(right_lines, axis=0) + right_line = make_coordinates(pic, right_average) + + return np.array([left_line, right_line]) + + +def make_coordinates(image, line_parameters): + slope, intercept = line_parameters + y1 = image.shape[0] + y2 = int(y1 * (1 / 2)) + x1 = int((y1 - intercept) / slope) + x2 = int((y2 - intercept) / slope) + return np.array([x1, y1, x2, y2]) + + + + +def detectDeparture(left, car, right): + a = 9999999 + b = 999999 + + try: + parametersLeft = np.polyfit((left[0][0], left[0][-1]), (left[1][0], left[1][-1]), 1) + leftEq = np.poly1d(parametersLeft) + leftPosition = (310 - parametersLeft[1]) / parametersLeft[0] + a = car - leftPosition + left_lane = 1 + + except IndexError: + print("Left lane does not exist") + left_lane = 0 + try: + parametersRight = np.polyfit((right[0][0], right[0][-1]), (right[1][0], right[1][-1]), 1) + rightEq = np.poly1d(parametersRight) + rightPosition = (310 - parametersRight[1]) / parametersRight[0] + b = rightPosition - car + right_lane = 1 + + except IndexError: + print("Right lane does not exist") + right_lane = 0 + area = width / 4 + + if left_lane == 0 and right_lane == 0: + direction = "No lane detected" + print("No lanes detected") + elif (area > a): + direction = "Drift Left" + print("Drift left") + elif (area > b): + direction = "Drift Right" + print("Drift right") + else: + direction = "On Course" + print("On course") + + return direction + +def lane_status(direction): + rospy.loginfo(direction) + pub.publish(direction) + rate.sleep() + + +#video = cv2.VideoCapture(1) +plt.ion() + +video = WebcamVideoStream(src=1).start() +#fps = FPS.start() + + +while not rospy.is_shutdown(): + frame = video.read() + + frame_edge = detect_edge(frame) + + new_img = formatImg(frame) + + wEdges = detect_edge(new_img) + + cropped = ROI(wEdges) + + lines = getLines(cropped) + + if lines is None: + x = 0 + + else: + Rpoints, Lpoints = find_poly_lane(new_img, lines) + + #plt.cla() + #plt.clf() + + #plt.scatter(Rpoints[0], Rpoints[1]) + #plt.scatter(Lpoints[0], Lpoints[1]) + + #plt.scatter(310, 350) + + # plt.imshow(frame, zorder=0) + + direction = detectDeparture(Lpoints, 350, Rpoints) + lane_status(direction) + # plt.pause(.001) + + lane = applyLines(frame, lines) + + # Display edge detection + #cv2.imshow("edges", wEdges) + + # Display cropped edge detection + cv2.imshow("cropped", cropped) + + # Show original image with all lines detected + cv2.imshow("frame", lane) + + key = cv2.waitKey(25) + if key == 27: + break +video.release() +cv2.destroyAllWindows() diff --git a/Robot_Development/catkin_ws/src/src/lane_detection/src/roi.py b/Robot_Development/catkin_ws/src/src/lane_detection/src/roi.py new file mode 100644 index 0000000000000000000000000000000000000000..ea7102918a75e58c4b587b10446877d6a6d07073 --- /dev/null +++ b/Robot_Development/catkin_ws/src/src/lane_detection/src/roi.py @@ -0,0 +1,19 @@ +import matplotlib.pyplot as plt +import cv2 + +video = cv2.VideoCapture(1) + +plt.ion + +while True: + ret, frame = video.read() + + key = cv2.waitKey(25) + if key == 27: + break + + plt.imshow(frame, zorder=0) + plt.pause(.001) + +video.release() +cv2.destroyAllWindows() diff --git a/Robot_Development/catkin_ws/src/src/rosjet/jet_bringup/launch/jet_real.launch b/Robot_Development/catkin_ws/src/src/rosjet/jet_bringup/launch/jet_real.launch index be4c37f2822cea2e781eb0c2ddc23f2a56dff146..9606d241fbed3a330b86b4d0a1a523d0055916d0 100644 --- a/Robot_Development/catkin_ws/src/src/rosjet/jet_bringup/launch/jet_real.launch +++ b/Robot_Development/catkin_ws/src/src/rosjet/jet_bringup/launch/jet_real.launch @@ -4,7 +4,7 @@ <include file="$(find jet_serial)/launch/jet_serial.launch" /> <node pkg="joy" type="joy_node" name="joy_node"> - <param name="dev" value="/dev/input/js0" /> + <param name="dev" value="/dev/input/js4" /> <param name="deadzone" value="0.3" /> <param name="autorepeat_rate" value="20" /> </node>