diff --git a/Robot_Development/catkin_ws/src/lane_demo/src/Detection.h b/Robot_Development/catkin_ws/src/lane_demo/src/Detection.h
index 4e87ad977a428a0961d7ea204dd915372c4cae25..d49d1ce369bce07cdf74e0b9ae02b9aaffb46667 100644
--- a/Robot_Development/catkin_ws/src/lane_demo/src/Detection.h
+++ b/Robot_Development/catkin_ws/src/lane_demo/src/Detection.h
@@ -28,9 +28,9 @@ public:
     }
     Detection(string serialization) {
         string whitespace = removeSpaces(serialization);
-        cout << whitespace << endl;
+        //cout << whitespace << endl;
         string unbraced = whitespace.substr(1, whitespace.length()-2);
-        cout << unbraced << endl;
+        //cout << unbraced << endl;
         string arr[5];
         
         string str1 = unbraced;
@@ -58,7 +58,7 @@ public:
         str3 = "";
         str1.replace(str1.find(str2),str2.length(),str3);
         
-        cout << str1 << endl;
+        //cout << str1 << endl;
         
         string name = str1.substr(0,str1.find(','));
         str1 = str1.substr(str1.find(',')+1, str1.length()-1);
diff --git a/Robot_Development/catkin_ws/src/lane_demo/src/lane_demo.cpp b/Robot_Development/catkin_ws/src/lane_demo/src/lane_demo.cpp
index 49a8d17568dc81ecc7d0f17c9a30cf925d6da9f4..bcca1db91180dc2733204ae43906e9ffd552dec7 100644
--- a/Robot_Development/catkin_ws/src/lane_demo/src/lane_demo.cpp
+++ b/Robot_Development/catkin_ws/src/lane_demo/src/lane_demo.cpp
@@ -11,8 +11,7 @@
 #include <math.h>
 #include <cstddef>
 #include <string.h>
-#define LINEAR_SPEED 0.6
-#define ANGULAR_SPEED 1.9
+#define ANGULAR_SPEED 0.25
 #define OBJECT_DIST_NEAR 40
 
 enum STATE { FORWARD, TURN_RIGHT, TURN_LEFT, STOP};
@@ -46,7 +45,7 @@ class SenseAndAvoid
 	
     geometry_msgs::Twist vel_msg;
 
-    int left_count, right_count, left_sonar, right_sonar, object_min;
+    int left_count, right_count, left_sonar, right_sonar, object_min, cur_count;
     float lane_distance, Kp, Kv, Kd, integral_error, cur_error, derivative_error, prev_error;
 
     // TODO : Keep a vector of current detections ( last ~1 second, no duplicates ) 
@@ -58,6 +57,7 @@ class SenseAndAvoid
     double elapsed = difftime(now, last_detection_time);
     
     STATE state;
+	float linear_speed = 0.5;
 };
 SenseAndAvoid::SenseAndAvoid()
 {
@@ -74,10 +74,10 @@ SenseAndAvoid::SenseAndAvoid()
   left_count = 0;
   right_count = 0;
   cur_error = 0;
-  Kp = .01;
+  Kp = 0.003;
   last_detection_time = time(NULL);
   Kv = 0;
-  Kd = 0.065;
+  Kd = 0.002;
 }
 
 void SenseAndAvoid::Lanes(const std_msgs::String::ConstPtr& msg)
@@ -169,52 +169,72 @@ Detection Variables
 		ROS_INFO("OBJECT IN THE WAY, STOP");
 		vel_msg.linear.x = 0;
 		vel_msg.angular.z = 0;
-		vel_pub.publish(vel_msg);
   }
-
-  else if(thanos._name == "stopsign" && thanos._prob > 30)
+  else if(thanos._name == "stop" && thanos._prob > 80)
   {
 	  	ROS_INFO("STOP SIGN FOUND");
 		vel_msg.linear.x = 0;
+		vel_msg.angular.z = 0;
+  }
+  else if(thanos._name == "person" && thanos._prob > 60)
+  {
+		ROS_INFO("PERSON FOUND");
+		vel_msg.linear.x = 0;
     	vel_msg.angular.z = 0;
-    	vel_pub.publish(vel_msg);
   }
-
+  else if(thanos._name == "speedLimit25" && thanos._prob > 60)
+  {
+		ROS_INFO("SPEED LIMIT 25");
+		linear_speed = 0.3;
+		vel_msg.linear.x = linear_speed;
+		vel_msg.angular.z = 0;
+  }
+  else if(thanos._name == "speedLimit35" && thanos._prob > 60)
+  {
+		ROS_INFO("SPEED LIMIT 35");
+		linear_speed = 0.5;
+		vel_msg.linear.x = linear_speed;
+		vel_msg.angular.z = 0;
+  }
   else if(state == STOP)
   {
 	ROS_INFO("NO LANES, STOP");
 	vel_msg.linear.x = 0;
 	vel_msg.angular.z = 0;
-	vel_pub.publish(vel_msg);
   }
-
   else if(state == TURN_LEFT)
   {
 	ROS_INFO("TURNING LEFT");
-	vel_msg.linear.x = 0.1;
+	vel_msg.linear.x = linear_speed;
 	vel_msg.angular.z = -ANGULAR_SPEED;
-	vel_pub.publish(vel_msg);
   }
   else if(state == TURN_RIGHT)
   {
 	ROS_INFO("TURNING_RIGHT");
-	vel_msg.linear.x = 0.1;
+	vel_msg.linear.x = linear_speed;
 	vel_msg.angular.z = ANGULAR_SPEED;
-	vel_pub.publish(vel_msg);
   }
-
   else
   {
-	ROS_INFO("PID Control");
 	cur_error = lane_distance;
 	ROS_INFO_STREAM(cur_error);
-	integral_error += cur_error;
-	derivative_error = cur_error - prev_error;
-	vel_msg.angular.z = -((Kp * cur_error) + (Kv * integral_error) + (Kd * derivative_error));
-	vel_msg.linear.x = LINEAR_SPEED;
-	vel_pub.publish(vel_msg);
-	prev_error = cur_error;
+	if(cur_error > 20 || cur_error < (-20))
+	{
+		ROS_INFO("PID Control");
+		integral_error += cur_error;
+		derivative_error = cur_error - prev_error;
+		vel_msg.linear.x = linear_speed;
+		vel_msg.angular.z = -(Kp * cur_error + Kv * integral_error + Kd * derivative_error);
+		prev_error = cur_error;
+	}
+	else
+	{
+		ROS_INFO("GOING STRAIGHT");
+		vel_msg.linear.x = linear_speed;
+		vel_msg.angular.z = 0;
+	}
   }
+  vel_pub.publish(vel_msg);
 }
 
 
diff --git a/Robot_Development/catkin_ws/src/lane_detection/src/detect.py b/Robot_Development/catkin_ws/src/lane_detection/src/detect.py
index bd037de24669b734f6e8a5607446df11234e1f50..fb0ba974ae24b23614495ef9757799c8991af782 100755
--- a/Robot_Development/catkin_ws/src/lane_detection/src/detect.py
+++ b/Robot_Development/catkin_ws/src/lane_detection/src/detect.py
@@ -12,7 +12,7 @@ from std_msgs.msg import Float64
 pub = rospy.Publisher('lanes', String, queue_size=10)
 pub1 = rospy.Publisher('lane_distance', Float64, queue_size=10)
 rospy.init_node('lane_detection')
-rate = rospy.Rate(500)  # 1000hz
+rate = rospy.Rate(1000)  # 1000hz
 width = 325
 
 
@@ -25,9 +25,14 @@ def formatImg(pic):
 
     # Apply filter to image
     img_filter = cv2.GaussianBlur(gray, (5, 5), 0)
-
     return img_filter
-
+	
+def thresh(pic):
+    grayscaled = cv2.cvtColor(pic, cv2.COLOR_BGR2GRAY) #grayscale
+    retval, th = cv2.threshold(grayscaled, 80, 255, cv2.THRESH_BINARY) #filter out unncessary edges
+    img_filter = cv2.GaussianBlur(th, (5, 5), 0) #Make the image more cohesive
+    img_edge = cv2.Canny(img_filter, 50, 150) #find the edges
+    return img_edge
 
 # pre-processes and performs edge detection on an image
 # params
@@ -44,7 +49,8 @@ def detect_edge(pic):
 # pic: original image to apply the pre-set region of interest too
 def ROI_real(pic):
     height = pic.shape[0]
-    triangle = np.array([[(0, height), (75, 200), (530, 200), (630, height)]], dtype=np.int32)
+    width = pic.shape[1]
+    triangle = np.array([[(0, height), (135, 250), (515, 250), (width, height)]], dtype=np.int32) #shape of a trapazoid
     mask = np.zeros_like(pic)
     cv2.fillPoly(mask, triangle, 255)
     roi = cv2.bitwise_and(pic, mask)
@@ -211,10 +217,10 @@ def detectDeparture(left, car, right):
 
 
 def detectDepartureNew(midPoints):
-    midx = 275
-    midy = 400
+    midx = 300
+    midy = 350
     for x in range(150):
-        if (myround(midPoints[1][x]) == 400):
+        if (myround(midPoints[1][x]) == 300):
             # print(midPoints[0][x] - midx)
             return midPoints[0][x] - midx
 
@@ -246,54 +252,63 @@ while not rospy.is_shutdown():
     frame = video.read()
     # gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
     # frame = cv2.imread("../frame.jpeg")
-    frame_edge = detect_edge(frame)
-
-    new_img = formatImg(frame)
+    #frame_edge = detect_edge(frame)
 
-    wEdges = detect_edge(new_img)
+    test = thresh(frame)
+    #new_img = formatImg(frame)
 
-    cropped = ROI_real(wEdges)
-    # cropped = ROI(wEdges)
+    #wEdges = detect_edge(new_img)
 
-    lines = getLines(cropped)
+    #cropped = ROI_real(wEdges)
+    cropped_2 = ROI_real(test)
 
-    if lines is None:
+    #lines = getLines(cropped)
+    lines_2 = getLines(cropped_2)
+    if lines_2 is None:
         x = 0
 
     else:
-        Rpoints, Lpoints = find_poly_lane(new_img, lines)
-
-        Mpoints = find_middle(Lpoints, Rpoints)
+        #Rpoints, Lpoints = find_poly_lane(new_img, lines)
+        Rpoints_2, Lpoints_2 = find_poly_lane(test, lines_2)
+        #Mpoints = find_middle(Lpoints, Rpoints)
+        Mpoints_2 = find_middle(Lpoints_2,Rpoints_2)
 
         # (type(Rpoints[0][0]))
-        #plt.cla()
-        #plt.clf()
+        plt.cla()
+        plt.clf()
 
         #plt.scatter(Rpoints[0], Rpoints[1])
         #plt.scatter(Lpoints[0], Lpoints[1])
-        lanes = detectDeparture(Lpoints, 400, Rpoints)
-        lanes_detected(lanes)
-        if (Mpoints != 0):
+        plt.scatter(Rpoints_2[0], Rpoints_2[1])
+        plt.scatter(Lpoints_2[0], Lpoints_2[1])
+        #lanes = detectDeparture(Lpoints, 400, Rpoints)
+        lanes_2 = detectDeparture(Lpoints_2, 400, Rpoints_2)
+        #lanes_detected(lanes)
+        lanes_detected(lanes_2)
+        if (Mpoints_2 != 0):
             #plt.scatter(Mpoints[0], Mpoints[1])
-            lane_distance = detectDepartureNew(Mpoints)
-            lane_status(lane_distance)
+            plt.scatter(Mpoints_2[0],Mpoints_2[1])
+            #lane_distance = detectDepartureNew(Mpoints)
+            lane_distance_2 = detectDepartureNew(Mpoints_2)
+            lane_status(lane_distance_2)
 
         else:
             print("No midpoint calculated")
-        # plt.scatter(310, 300)
-
-        #plt.imshow(frame, zorder=0)
+        plt.scatter(300, 350)
 
-        #plt.pause(.001)
+        plt.imshow(frame, zorder=0)
 
-    lane = applyLines(frame, lines)
+        plt.pause(.001)
 
+    #lane = applyLines(frame, lines)
+    #cv2.imshow("thresh",test)
+    #cv2.imshow("cropped_2",cropped_2)
     # cv2.imshow("edges", wEdges)
     # cv2.imshow("cropped", cropped)
     # cv2.imshow("frame", lane)
 
-    # key = cv2.waitKey(1)
-    # if key == 27:
-    #    break
-# video.release()
-# cv2.destroyAllWindows()
+    #key = cv2.waitKey(1)
+    #if key == 27:
+		#break
+    #video.release()
+    #cv2.destroyAllWindows()
diff --git a/Robot_Development/catkin_ws/src/lane_detection/src/detect_plot.py b/Robot_Development/catkin_ws/src/lane_detection/src/detect_plot.py
new file mode 100755
index 0000000000000000000000000000000000000000..fb0ba974ae24b23614495ef9757799c8991af782
--- /dev/null
+++ b/Robot_Development/catkin_ws/src/lane_detection/src/detect_plot.py
@@ -0,0 +1,314 @@
+#!/usr/bin/python
+
+import numpy as np
+import rospy
+import matplotlib.pyplot as plt
+import cv2
+from imutils.video import WebcamVideoStream
+from imutils.video import FPS
+from std_msgs.msg import String
+from std_msgs.msg import Float64
+
+pub = rospy.Publisher('lanes', String, queue_size=10)
+pub1 = rospy.Publisher('lane_distance', Float64, queue_size=10)
+rospy.init_node('lane_detection')
+rate = rospy.Rate(1000)  # 1000hz
+width = 325
+
+
+# Converts picture to grayscale and applies filter to picture
+# params
+# pic : a numpy array of pixel values to represent a picture
+def formatImg(pic):
+    # Convert picture to gray scale
+    gray = cv2.cvtColor(pic, cv2.COLOR_BGR2GRAY)
+
+    # Apply filter to image
+    img_filter = cv2.GaussianBlur(gray, (5, 5), 0)
+    return img_filter
+	
+def thresh(pic):
+    grayscaled = cv2.cvtColor(pic, cv2.COLOR_BGR2GRAY) #grayscale
+    retval, th = cv2.threshold(grayscaled, 80, 255, cv2.THRESH_BINARY) #filter out unncessary edges
+    img_filter = cv2.GaussianBlur(th, (5, 5), 0) #Make the image more cohesive
+    img_edge = cv2.Canny(img_filter, 50, 150) #find the edges
+    return img_edge
+
+# pre-processes and performs edge detection on an image
+# params
+# pic: a numpy array of pixel values for an image in gray scale
+def detect_edge(pic):
+    # Perform canny edge detection
+    img_edge = cv2.Canny(pic, 50, 150)
+
+    # return new edge image
+    return img_edge
+
+# Define the region of in which the lanes will be in the cameras view
+# params
+# pic: original image to apply the pre-set region of interest too
+def ROI_real(pic):
+    height = pic.shape[0]
+    width = pic.shape[1]
+    triangle = np.array([[(0, height), (135, 250), (515, 250), (width, height)]], dtype=np.int32) #shape of a trapazoid
+    mask = np.zeros_like(pic)
+    cv2.fillPoly(mask, triangle, 255)
+    roi = cv2.bitwise_and(pic, mask)
+    return roi
+
+
+# Get all possible HoughLines and return them
+# params
+# edge_pic: the image with the edge detection performed
+def getLines(edge_pic):
+    return cv2.HoughLinesP(edge_pic, 1, np.pi / 180, 50, maxLineGap=80, minLineLength=10)
+
+
+# Apply the passed in lines the the original picture
+# params
+# original_pic:
+# lines: Array of lines in the form (x1, x2, y1, y2)
+def applyLines(original_pic, lines):
+    # If there is no lines return the original photo
+    if lines is None:
+        return original_pic
+
+    # Loop through all possible lines
+    for line in lines:
+        # parse the array to individual variables
+        x1, y1, x2, y2 = line.reshape(4)
+        # print(line)
+
+        # Draw the lines on the original picture
+        cv2.line(original_pic, (x1, y1), (x2, y2), (255, 0, 0), 3)
+
+    # return the original picture with the lines drawn on
+    return original_pic
+
+
+def find_middle(leftPoints, rightPoints):
+    middle_lines = [[], []]
+
+    if leftPoints[1] == []:
+        print(
+            "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Caught the empty list~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
+        return 0
+
+    elif rightPoints[1] == []:
+        print(
+            "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Caught the empty list~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
+        return 0
+
+    else:
+        for x in range(150):
+            midPoint = (rightPoints[0][149 - x] + leftPoints[0][x]) / 2
+
+            middle_lines[1].append(leftPoints[1][x])
+            middle_lines[0].append(midPoint)
+
+    return middle_lines
+
+
+# Find the two average lines given the set of lines
+# params
+# pic: the original image
+# lines: An array of lines in the form (x1, x2, y1, y2)
+def find_poly_lane(pic, lines):
+    # Collections for the negative and positive sloped lines
+    left_lines_points = [[], []]  # Negative slope
+    right_lines_points = [[], []]  # Positive slope
+    # print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Start~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
+    for line in lines:
+        x1, y1, x2, y2 = line[0]
+        parameters = np.polyfit((x1, x2), (y1, y2), 1)
+        # print("Slope, intercept")
+        # print(parameters)
+        slope = parameters[0]
+        intercept = parameters[1]
+
+        if (slope < .5 and slope > -.5):
+            # print("Line ignored");
+            x = 1
+
+        elif (slope < 0):
+            # print("left insert")
+            left_lines_points[0].append(x1)
+            left_lines_points[0].append(x2)
+            left_lines_points[1].append(y1)
+            left_lines_points[1].append(y2)
+
+        else:
+            # print("right insert")
+            right_lines_points[0].append(x1)
+            right_lines_points[0].append(x2)
+            right_lines_points[1].append(y1)
+            right_lines_points[1].append(y2)
+
+    # print("POINTS")
+    # print(right_lines_points[0])
+    # print(left_lines_points[0])
+
+    if right_lines_points[0]:
+        z = np.polyfit(right_lines_points[0], right_lines_points[1], 1)
+        f = np.poly1d(z)
+        # print(f)
+
+        right_x_new = np.linspace(np.amin(right_lines_points[0]), np.amax(right_lines_points[0]), 150)
+        right_y_new = f(right_x_new)
+
+    else:
+        right_x_new = []
+        right_y_new = []
+
+    if left_lines_points[0]:
+        z = np.polyfit(left_lines_points[0], left_lines_points[1], 1)
+        f = np.poly1d(z)
+        #   print(f)
+
+        left_x_new = np.linspace(np.amin(left_lines_points[0]), np.amax(left_lines_points[0]), 150)
+        left_y_new = f(left_x_new)
+
+    else:
+        left_x_new = []
+        left_y_new = []
+
+    # print("New left")
+    # print(left_x_new)
+    # print(left_y_new)
+
+    return [right_x_new, right_y_new], [left_x_new, left_y_new]
+
+
+def make_coordinates(image, line_parameters):
+    # print(line_parameters)
+    slope, intercept = line_parameters
+    y1 = image.shape[0]
+    y2 = int(y1 * (1 / 2))
+    x1 = int((y1 - intercept) / slope)
+    x2 = int((y2 - intercept) / slope)
+    return np.array([x1, y1, x2, y2])
+
+
+def detectDeparture(left, car, right):
+    a = 9999999
+    b = 999999
+    left_lane = 0
+    right_lane = 0
+    missing_lane = ""
+    try:
+        parametersLeft = np.polyfit((left[0][0], left[0][-1]), (left[1][0], left[1][-1]), 1)
+        leftEq = np.poly1d(parametersLeft)
+        leftPosition = (310 - parametersLeft[1]) / parametersLeft[0]
+        a = car - leftPosition
+        left_lane = 1
+    except IndexError:
+        missing_lane = "Left lane does not exist"
+    try:
+        parametersRight = np.polyfit((right[0][0], right[0][-1]), (right[1][0], right[1][-1]), 1)
+        rightEq = np.poly1d(parametersRight)
+        rightPosition = (310 - parametersRight[1]) / parametersRight[0]
+        b = rightPosition - car
+        right_lane = 1
+    except IndexError:
+        missing_lane = "Right lane does not exist"
+    if (left_lane == 0 and right_lane == 0):
+        missing_lane = "No Lanes"
+    return missing_lane
+
+
+def detectDepartureNew(midPoints):
+    midx = 300
+    midy = 350
+    for x in range(150):
+        if (myround(midPoints[1][x]) == 300):
+            # print(midPoints[0][x] - midx)
+            return midPoints[0][x] - midx
+
+
+def myround(x):
+    return int(5 * round(float(x) / 5))
+
+
+def lane_status(lane_distance):
+    rospy.loginfo(lane_distance)
+    pub1.publish(lane_distance)
+    rate.sleep()
+
+
+def lanes_detected(lanes):
+    rospy.loginfo(lanes)
+    pub.publish(lanes)
+    rate.sleep()
+
+
+# video = cv2.VideoCapture("test2.mp4")
+# video = cv2.VideoCapture("highway.mp4")
+
+
+video = WebcamVideoStream(src=1).start()
+plt.ion()
+
+while not rospy.is_shutdown():
+    frame = video.read()
+    # gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
+    # frame = cv2.imread("../frame.jpeg")
+    #frame_edge = detect_edge(frame)
+
+    test = thresh(frame)
+    #new_img = formatImg(frame)
+
+    #wEdges = detect_edge(new_img)
+
+    #cropped = ROI_real(wEdges)
+    cropped_2 = ROI_real(test)
+
+    #lines = getLines(cropped)
+    lines_2 = getLines(cropped_2)
+    if lines_2 is None:
+        x = 0
+
+    else:
+        #Rpoints, Lpoints = find_poly_lane(new_img, lines)
+        Rpoints_2, Lpoints_2 = find_poly_lane(test, lines_2)
+        #Mpoints = find_middle(Lpoints, Rpoints)
+        Mpoints_2 = find_middle(Lpoints_2,Rpoints_2)
+
+        # (type(Rpoints[0][0]))
+        plt.cla()
+        plt.clf()
+
+        #plt.scatter(Rpoints[0], Rpoints[1])
+        #plt.scatter(Lpoints[0], Lpoints[1])
+        plt.scatter(Rpoints_2[0], Rpoints_2[1])
+        plt.scatter(Lpoints_2[0], Lpoints_2[1])
+        #lanes = detectDeparture(Lpoints, 400, Rpoints)
+        lanes_2 = detectDeparture(Lpoints_2, 400, Rpoints_2)
+        #lanes_detected(lanes)
+        lanes_detected(lanes_2)
+        if (Mpoints_2 != 0):
+            #plt.scatter(Mpoints[0], Mpoints[1])
+            plt.scatter(Mpoints_2[0],Mpoints_2[1])
+            #lane_distance = detectDepartureNew(Mpoints)
+            lane_distance_2 = detectDepartureNew(Mpoints_2)
+            lane_status(lane_distance_2)
+
+        else:
+            print("No midpoint calculated")
+        plt.scatter(300, 350)
+
+        plt.imshow(frame, zorder=0)
+
+        plt.pause(.001)
+
+    #lane = applyLines(frame, lines)
+    #cv2.imshow("thresh",test)
+    #cv2.imshow("cropped_2",cropped_2)
+    # cv2.imshow("edges", wEdges)
+    # cv2.imshow("cropped", cropped)
+    # cv2.imshow("frame", lane)
+
+    #key = cv2.waitKey(1)
+    #if key == 27:
+		#break
+    #video.release()
+    #cv2.destroyAllWindows()
diff --git a/Robot_Development/catkin_ws/src/object_detection/src/object_detect.py b/Robot_Development/catkin_ws/src/object_detection/src/object_detect.py
index fe2dc2748d1731a27bf36f00241933196c516e7a..8641b48085082953a10c569a4e040de4c3d084e2 100755
--- a/Robot_Development/catkin_ws/src/object_detection/src/object_detect.py
+++ b/Robot_Development/catkin_ws/src/object_detection/src/object_detect.py
@@ -50,7 +50,7 @@ def get_detections(fpath):
 		line_count = 0
 		time.sleep(0.05)
 		for row in csv_reader:
-			print(row);
+			# print(row);
 			if(len(row) != 6):
 				print("row length != 6")
 				continue;
diff --git a/Robot_Development/catkin_ws/src/rosjet/jet_control/config/jet_control.yaml b/Robot_Development/catkin_ws/src/rosjet/jet_control/config/jet_control.yaml
index 78828fb5231d3a71c1f8a4371d85db8d1903477f..2dcf021cb359613152f3e9322b3ff5a5951998fb 100644
--- a/Robot_Development/catkin_ws/src/rosjet/jet_control/config/jet_control.yaml
+++ b/Robot_Development/catkin_ws/src/rosjet/jet_control/config/jet_control.yaml
@@ -17,7 +17,7 @@ jet_drive_controller:
     twist_covariance_diagonal: [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
 
     # Wheel separation and radius multipliers
-    wheel_separation: .36
+    wheel_separation: .39
     wheel_radius: .077
     wheel_separation_multiplier: 1.0 # default: 1.0
     wheel_radius_multiplier    : 1.0 # default: 1.0