From b0d169623ba3b0c80b050b9f99a3b7035ea3d264 Mon Sep 17 00:00:00 2001
From: kpp55 <kpp55@drexel.edu>
Date: Fri, 8 Mar 2019 16:25:41 -0500
Subject: [PATCH] Succesful initial lane demo

---
 .../src/src/lane_demo/src/lane_demo.cpp       |   4 +-
 .../launch/lane_detection.launch              |   2 +-
 .../src/src/lane_detection/src/.detect.py.swp | Bin 0 -> 16384 bytes
 .../lane_detection/src/.detect_test.py.swp    | Bin 0 -> 16384 bytes
 .../src/src/lane_detection/src/detect.py      |  41 +--
 .../src/src/lane_detection/src/detect_test.py | 322 ++++++++++++++++++
 .../src/src/lane_detection/src/roi.py         |  19 ++
 .../rosjet/jet_bringup/launch/jet_real.launch |   2 +-
 8 files changed, 354 insertions(+), 36 deletions(-)
 create mode 100644 Robot_Development/catkin_ws/src/src/lane_detection/src/.detect.py.swp
 create mode 100644 Robot_Development/catkin_ws/src/src/lane_detection/src/.detect_test.py.swp
 create mode 100755 Robot_Development/catkin_ws/src/src/lane_detection/src/detect_test.py
 create mode 100644 Robot_Development/catkin_ws/src/src/lane_detection/src/roi.py

diff --git a/Robot_Development/catkin_ws/src/src/lane_demo/src/lane_demo.cpp b/Robot_Development/catkin_ws/src/src/lane_demo/src/lane_demo.cpp
index 5d8a1f19d..b64cf34d6 100644
--- a/Robot_Development/catkin_ws/src/src/lane_demo/src/lane_demo.cpp
+++ b/Robot_Development/catkin_ws/src/src/lane_demo/src/lane_demo.cpp
@@ -66,7 +66,7 @@ void SenseAndAvoid::chatterCallback(const std_msgs::String::ConstPtr& msg)
 	  ROS_INFO("TURN_RIGHT");
 	  state=TURN_RIGHT;
 	  vel_msg.linear.x = 0;
-	  vel_msg.angular.z = -1.8;
+	  vel_msg.angular.z = 1.25;
 	  vel_pub.publish(vel_msg);
   }
   else if(msg->data.compare("Drift Right") == 0)
@@ -74,7 +74,7 @@ void SenseAndAvoid::chatterCallback(const std_msgs::String::ConstPtr& msg)
 	  ROS_INFO("TURN_LEFT");
 	  state=TURN_LEFT;
 	  vel_msg.linear.x = 0;
-	  vel_msg.angular.z = 1.8;	
+	  vel_msg.angular.z = -1.25;	
 	  vel_pub.publish(vel_msg);
   }
   else
diff --git a/Robot_Development/catkin_ws/src/src/lane_detection/launch/lane_detection.launch b/Robot_Development/catkin_ws/src/src/lane_detection/launch/lane_detection.launch
index dd714004f..804a4d2cb 100644
--- a/Robot_Development/catkin_ws/src/src/lane_detection/launch/lane_detection.launch
+++ b/Robot_Development/catkin_ws/src/src/lane_detection/launch/lane_detection.launch
@@ -1,4 +1,4 @@
 <?xml version="1.0"?>
 <launch>
-  <node name="lane_detection" pkg="lane_detection" type="detect.py" output="screen"/>
+  <node name="lane_detection" pkg="lane_detection" type="detect_test.py" output="screen"/>
 </launch>
diff --git a/Robot_Development/catkin_ws/src/src/lane_detection/src/.detect.py.swp b/Robot_Development/catkin_ws/src/src/lane_detection/src/.detect.py.swp
new file mode 100644
index 0000000000000000000000000000000000000000..68ec83c31e134b90da136ba9b0d0512a6cdaa876
GIT binary patch
literal 16384
zcmYc?2=nw+FxN9-U|?VnU|>jF-xRY;g@-}FfPo>eEHfoD5hQ^hm!zf_CF+(YmFAU{
z;!{}%F;G7_u_QY)FTT83zqlw_KPNFSH9jS^BsIAtGd~Y31mWowRN^yjRBki`Mniz;
z5GXB4)3x9YH8wId0O?UyQdAHY3I#Do@n{H)hQMeDjE2By2#kinXb6mkz-S1JhQJ63
zfsz7dhI$4D1}3O~tDrO^8vP#%%An%aP#UK0A5`8AD*h2l!{njdQED^<MnhmU1V%$(
zGz3ONU^E0qLtr!nMnhmU1V%$(Gz3ONU;siOF@=FamVtpm5i|fGz`y|O|G(vDV0gpN
zz;KhFf#D241H&4A28M<F3=EC@3=9eU3=Hx73=GQr3=Ay%3=B{B7#MEwF)&=`V_>+(
z$G|XykAb0|kAWeGkAcCCkAXpokAdMOF9X91UIvCUybKI8c^MdH@G>wo@iH(p@-i?q
z@G>xX@G>yi^D;2}=V4%Y$-}^KjE8|?J`V%KJRSyyCLRWcJRSyyTpk9793BP+H68{A
zSsn(4+uRHcXSo>|HghvD<Z?4G_;WKbXmT?!uy8XleC1+b_`=1&@R^H&VJ#N}!xAnA
zh9oWq27fLF22(BuhNqki497Vc7#4CeFw}4|FywMFFywGDFl2KwFxYZ3Fqm*MF#O?Q
zV0gm8z|hITz|g_Lz|hXYz!1d2z~ITjz+lP2z`(-6z;K_PfngOp1H%+{28IfD28MEW
z28KX(1_mp31_lmx28R1=3=H?!7#P;FF)(DYF)(DZF)-+}F))a;F))a+F))a-F)%!2
zWnj3$%D}LXm4Ts+m4P9Rm4QK%m4V?K3j@O;76yjBEDQ|GSQr>uSr{1nSQr?zSQr@i
zSQr>SFf%ZmV`gAD%gn&AjG2L<k(q%ZpP7LnkC}lXgqeZCkePvjm6?IzCKCh0S|$dD
zMkWS^1||lEA|?g~KPE_=D}dr1U%FF30!5kW871*KnR%(j@df#rc_qcs2C;gH1qG>j
zDH;`qnhZ#46_gbUiZb&`G?c))6f*OQQ;SNJ5Nc9$ic_r^P_*QvrlA>c2sggc2%!~Z
z4PLcqPQg@*<ZOcGBB@0)7w$!n1=xI;nWmsooReRWs$ipFpo!+&3I$sQLlpnR&GX63
zOI65B&&w}LO;OUchATog&rl&TFGT@jx}AcqA+qtAc_pbu$*Bb;3bqObiA9OIsU@jJ
z#nFbbaFZbVP-G1dvM`4r^dbTuCZ`k(@|cbS+%BZJMd(woRmdyQE6C5OOv^0M(5Nuf
zQK&G|)KSo=G}KY31hJ696eJ50uP_41DcCB2l3Fy>U1|A6V7@|T9+)n+0(%BbfRh+X
z!c?$Th>nicQHYL>RZvh?2*@waEXgcOg@hMa4=B#?Y4J-<PejwAtl*rVlLJ|(RIC7U
zP)SCrLLN*ZC_V~MEJ}g6lp!TGO(88aFC`un5b>ZDPZ|Z8$vO~OO)Cav2+K;rF;5|}
zs3@^gAwLZw1oAD&lC=DyTm@+G=_r5#36vp}6$&zwtrS2C@{2OlGxHL26f$!Y(^DCg
zA@N?!pse7QnU?}qQc|9;kXV*ll$f3hH9S4DEHw`-SDab`x03;qD~nP~N{jLoU<St*
zWF~_HPFVpegJNw#W^zesQK~|DW=RIvL{QunE2I=9mggzt=Ru4FW!&U4BR!Cu2C~6A
z5WiI#f|3R#`avusQ&SxU104keO&tYe<Rs`)lvs{rOFqOCFcU$c0&)z(QmiRUuPC)R
zBe5V=!vxvrg2bZYRIu5Qm?+6t$jnR0EXzzOP0Ud!ODxJvOv*_u1_uQw34r_piX{*Y
zO_RzBKKc0t3MCmu`K9R@3W+&63I+Ma#hFPt(11rc3>uU$m%{@&6CVG?3V!){sgT63
ztl*gja#~TULT0f-UOqG$p@yKwM@D{0J~-he78K-Ef=ZNPv>*q&7$x<Ql=P6@3`)Y_
zOb<$Mjvxy`&QvH!EG|w>frk!Q4Aqs8RD+u0Kq<rn6mnok2WX_Gq^E)cMn}O=2V9zD
zD(EX1S{Udk7#bMpDC8zqfE2nX7TAKsax?QlT%Xju^pXr)BLhuvAf%_3fJBSohJpPJ
zGZ~VFz&QtzfI*T9kfn&3`FRQjsYRenn3{r|x7<@pP@@;&ZgBC8h?!heKNjU@f>TgY
zex`!00?5ZnnI+|!#i{X$c`1;>Avdu&8<H?V(rKAFIRW`Ol^P&n9fgvj%*4F(oKzhJ
zBU4j|Dv$)ExUEVp$}f)3$;?g#nFNuAX@IBzCvA=BXbl4$g^W~CwGPT|W}x6ia1Bij
zbQFvYK$%a&#M~6Zj*W$w2T=y9Uow;Rz}W~?P=gA(Ab-#JqSVA3u-)K<0V<<lsRdF#
zDU{?ZfYT*7NfZ>N>VgWOqSSOyNav?PDwWjY5`~ideB|(UNlgP)ji5AytXQEuBQqIV
z>?G!;7Aurz=Hw_OrGio(NNsXrZfa3tu|ip9YWd)d2XIP;gejJoFb2hhp@D%WQK3I1
z0zNZ0Jsy<9z_|ckrR1fSD}Y2HsRU9?!c;=@iF0CJUM0wjItt)WF*SfBNM(fpNDfs<
z2C0A;1JAzTLIA=8TMIG(Tm*nC&_sp2(%gbdMBQ7ES&<4V>~l&}Ar*aM9>htY6qOFD
zpo^0eb5N>TP$DiU%1=%$E>0~5XMSjjQH<skh5S4Pm~p6y5#-af%$$<cB81N&e5l{u
z6HAMWGZXWia!QLdK+e!n&@k0eFx3QAk<gHYl)g}Hpvnu}9so;%bU;l?E-P`)&&e+W
zg@X>b{Bid8@ehi3at|_c4|0rzg{gCXURi2U39Lp1<y){R5buMsJE%}hEb+`u2N?wm
zQUxmo{9#&>uTYd)P?TDnnpdKb2s0izt3z!rhRMOK2HOk{Ur+&(35p+Zkb&f(jSvO~
zhVsmmk_-h~1tW7)hN8rhR0UgwqWt25O1+@Ol2i>)RsuB(4Gj!3su&;=nR%Hd@p<_v
zsT%5_23mY^Nn%N9vAQNhL1_|PcR*=UPG)gNY7wd;9fjbMqRhN>9fiWu)Y8=W;>@a4
zTSEg)28Ohv{9J|Nl9c$|;`Cy@+~RbF%-n+fq7ntSoczQRGZWkjASN<El_!@O!I-&;
zB?US8B{`W%dIgmrmO^5&LP1UmOdjkV7zdiJixu(;7#Nfl^-GJ3^pi64^b0CWGV=3`
z7(o61Fir-BE-uLWf7tx}WoVy&IzIzL89xI<B0mFzJwF43DL(^)C_e*(2tNbEe?A6=
ze|!uKkI?%6yZ9Iw=JGKxH1RPo6!I}J6!0-H<nu8w1n@C181XSM81gYN@bWP*@bED(
zyyazJc*D!UaEh0K;Uq5u!zx||hLyYw4Bfm83|+ho3?;k_3~9U!45_>f4AHy{42rxA
z3<|sq41Bx{41ahS7=H6GFr4RMU|7t<z_5sifuV(mfuWg)fgzrUfgz5EfkBCffq|Ka
zf#DT51H(&h28N^D3=BuO85ovxGcdGsGcdGqGcY7_Gcee3Gcee4Gca&-Gcdg6Vqkc~
z#lUchi-F+)7X!m4E(V50T#zv81cej6{_?2YXb6mkz-S1JhQMeDjE2By2#kgRks$yY
z<VZ;^E-A{dbj-;K&&*57FE7^6WGKr_NzK<QO3g`4EKb#c^h-eFh)G4MiP_+8O=g-x
zc50=9EvU5*87BjC!M&RD#LN=!)JhE_Q%y*p3S>=YZgEC_xrS02Xk=VTM*-B{hZwA^
z5S)=;4j=P`_W8hLnxIiU@Q4|7Oei%4)4=4S{DOki6eS%6C<|t)OJ;FFPGTiY7_CQ$
zX%MInT&$#{Q0|(No?47<3P={}ip+fI&??9e3bv3z705^z*mtloW(EdjP@sWuK~9NY
zL1Jlfs)n9{0n+F@$OLGwM<XS(2r|sAiO>fZQ?P|v?vh%NSOgkq)$oB#2k9sn8JX%R
z1VMPv5Qo_b4Rxrqs`86cQj2UseI$?q5C%D{I61MTB(+Gx*bqAQ1PL2-X{deC2C+H{
zFt#CdWC~p^)HPtWFg8dnIPJg`C+DOgr7e(4l5-L@kbD5u3Aawc7JWoNFSR^AGdCR`
z;vl2ZCb>XUJ_cZWF~@ho!jJ@Js{kLagr_60G?b@cs{kKlgQXC#JcI*}AIL}t)Ll@&
zKzShN=NIKdI_}^ofy#r~V2x<TfF+=cic(8-6u<%swhFKenwX-Y$xxn=nUkuJmtO+y
zZf6$97iW}~q~w?9X@F*^z|I79$n}bIQd0|{VO~(0q*nmxS|cYtPzZv1vwAuC>6v+H
z`N)!>u?Or)){23F0ThFo`FXJN13Kyv1}aOO6AQp;))3(t&^!k`c_UIBTu8xILCHT)
z!8yOQs5n&#><389f?No~C@NiwGSf;Ff<U9HFtwnGyTqc@L<KvABuz*eg+rT9YFY^v
qU5U7K`Q?L)FKCWTO@ZhI*$0{6O3X_Im5c`95fkV<8B9#miU9z^@12eS

literal 0
HcmV?d00001

diff --git a/Robot_Development/catkin_ws/src/src/lane_detection/src/.detect_test.py.swp b/Robot_Development/catkin_ws/src/src/lane_detection/src/.detect_test.py.swp
new file mode 100644
index 0000000000000000000000000000000000000000..228ef011d303d64fe02942ad0c1157aa02927acb
GIT binary patch
literal 16384
zcmYc?2=nw+FxN9-U|?VnU|_hiqbcT%3J*hx6$3+FS!PORB1i&1E=f%<O4Kb)D$Oe?
z#iz0kVxWF<Vo7#pUVM46esNK<eokUuYJ5s+NosOQW_})62*QgmNi8nXE2zY0<fz<e
z2#kgRF(FV|lBR3H8)|H1XaG{KtfZ(QEEEc2j^fb}7!85Z5Eu=C(GVC7fzc2c4S~@R
z7!83D5&|U!%nbDm3=B+A|5ih3Ml||A6qG>4YoIht-9M<jFI4;kl!nPexueu*2#kin
zXb6mkz-S1JhQMeDjE2By2#kinXb6mkz-S1JhQI)XKw=65g9HNug92y(K!AY(*8hLY
z&%p48pMl{fKLf*Qeg=ls{0s~W_!$`H^D{6M@iQ=l@G~$N^D{6A@-r|9@G~%6=3`*k
z%*VhmiI0JyjgNt$m5+g;g^z*3jgNuBmXCpffscXV4KD-330?+<>AVaK(|8#es(2X~
z+<6%o+;|xn40stB^m!Q=^mrK<zVR?HyyanFSj)q}Fr9~ip@fHl!Iy`D!H0)|!GMQ>
zL57EcL7InwL5hcg;R-hc!)a~?hE8q<hH7pGhD2@#25D{vhQC}44A;3B7&dV+Ff8U`
zU|7V(z_5^ufgzfUfgy;CfkA+af#EMF1H)rZ28PL;3=E~53=IC93=CSF3=Gno3=C47
z3=EQ-3=FS17#JRLFfgp*U|^WU!N6e7!N6d~!N6e3!N9=4!NBmHoq^#wI|IWeb_RxC
zb_RwBb_NC~b_NDDb_NDjb_Rz3Yzz!9*cceLu`w|8vN169urV-1voSD8urV-*voSE-
zWo2ME%F4iSgq4BeFe?K?KPv-68!H1t4l4tL2`dAG2rC1_bruGOB`gdK1uP5<*(?kU
zAuJ3G#w-jBf0!8<ZZI=2>|thLn8(b(P{Yi?P|eK15W>vBpwG;}Aj{0aAj8bSz{JeJ
zaG!~RVKWm0Lpu`#Lo^cugFX`jgB}wk-sM4Yk0b3VKtXCwajF#q1C$4*a#GVu;&U?d
zQj6mY@-y>FilYr<^%4sTQu9(YDvdOWS8GT}tpVKJ3WDY$sYNqaS)rgPGp|HL3FL5v
z%)H{%q7o&9)u}m|X$l&}Ir#;t3N{J`nrObQP_R`nMDY{cJfF<GRE5m+y!@in6eUe-
zxFTfp3>6adQWPMj+bQT8A{(EXSCU$koLW$#V5?A&Sd^HXT9R4>3Qf345Pc}J2C;Bi
zm_rbH5x#=SDFuT(rlSD23(4mQeG0Y;c?Eg}`8kzonI#$;6^1$r6-Jsm3L2G$ItrB_
z7E+jkWI^H;Mj$x_TLn;Bh=#f=Ex!oNSIEo*)5TU`=Yol%%=8S@l%rs)5FH(>qYxb%
ztDvB)5RhM-S&~_n3JEW;9#EX2BsMfHeyQn+Xj+sNobz*XAj^u16+jLu$w*bmgDC{X
zM*)gODG--3q@<=Pq-Ex%#DfAN9<)A5qaZU`2O_I!#h?sfSt&T?DI^vZB~~irr$K~3
zz6Dv5mS2>s01ZAJ1yCS?GK8{1L1wa*0!TrAQD%B(USf_yW^Q77DuXg4-isNO72GoO
zQou?|%JUTx%TkLH(^H{_r)QR>=7HskQ%m4>f(ma11%;y2lG36)1(?C{1)0g<fK!Ia
zpjcawnOss@l&VmkS&{)Z5fpdD3Moa2<#`JEd5DajTxO&PlG8voSO?;_N<&c6fJ8rt
zWn^lqqhO$;V4$g^V2qpuU5XORk!;C_cmif3C{#d>L0F14W$6{A7H1?Dq-vNT8(olC
zRGbPn8xj*G`3jkNDVb%NDW!=y3T25!nTbg`sm0)+03`vCUqG=0qM>P0S-~eizd)fR
zqbR>LJwqWeCr6<mzqmLvDF+(x2#4Vc<V<+{7c2PX=cPgtyRw338pvrysS26J3VHd^
zXoMPq8Xp<?CHdflmsn7cQwb_jiqV1`>|&JEM^e&*g*GT*fRZpc(}NP6BgjIKGZhLF
zi;GiJ;GqK+Lv<x2)u5(0Pzvz?g&f$?0UD_(>8T(~bQBDAz@<s1g1&;Gg@KNOp@D&p
zLT+LONTGXTfh|ZZH!}~!^-0Z3FUhbqGSCDELV9WmNVFJk7}(!1lOb6MoO2Kf7$m6x
zSvZ)PpQlieS_I03sVT^L%RRLOHF^>51{cqWn8`);V^Mx4I0Y5uXDZk#fP9>kSyG-^
zoEo2)mjWpqaubWQAqf*CotBxC6Of-%sR0t!Q79?OOw3EqN!3v>GBt&$0!cuM+p5%}
z{NnhW%<NQ<Nf23>28ar9($<KM)-cdf$Vdg1o}lbz1`19D*U;QlN5R+tl=(DFVC>jf
zh<OlYp!y{<Sr43zKm|3ZpbPT%j4w(}%mLdCP8gsv3YJ<R<&#25z5+O1f|Eo+QK~Me
z04hpN2ZeNg8l+N5EiO?g$<Id)Z<o|GP}K-ZL&%C1$}=*Pp~X&OUTU#Id1g+ILQ*Oy
z<$=^DC+4OWB^E1`Wu}%7&UgT)WJs7|i3wv<NMhF{D)fg$z-Q*B$AfYhI2XXHl)Th(
z1&}Btl|YI~m`Z3qaZb$3s|0ybM*$ovrUsA%sjLtH$)O6#AQcc};Mo^k2tasXYe5Ep
zivVy1ny8Rhnp;qbsCx@CD^fv)eNJgAq@qvEgE$G4qS8SXba8TG4oWo(O2h?4`N^rp
z#i_;M%nvOwiqV{+ke{ajGY&N|f_$2mnNyNlgz!0p5B0lyVrg-4W@4UGPHB+_$Qe2c
z8m2l5rkbEC5*m__(if@?RC$5h17Jyz4yZ}VWhKt}Ir&APaL@slKhFL>{z36h?m<TG
zL5`8IFm=w)D@!dZfz_y>d<!-O;(btd2NjBmC7!wIAfsSGs$ivnKTJ#V6^c>|ic*VH
z^GXyFVa6k8b*Rn7FgcjjV4K0=3o1Y|LGc3)GLSs95yHU0P@b7mlA&O$U}SE}pj?z#
zlB!^<P?TR>P^lM`Sdyw?XkefTY8M(B7-Up2C_`j2^D;}~^YT+tHPk^(wD{tZ#FElt
zbxj84g3=_o{(#b?oXp~k)FM<>ItsxhMVWc&ItqoQsimp$#hF#9wuT0p3=GO?Mftf3
z#U&~6xy9+ldb!2v3YobD`9&oPZaMjhC1xggR6xvR0Gp7RTUwHtQ><5(nUb0hHzFVy
zBCCK^Iy^NgIWad3q$9YbC^a#cfdOiFa+wi~nVVQrkdt4MlbNJfPzhowBo-?a<diTd
zL)C(V2c{aDQ;HSx3K$ra74=Jti}aH+^YjZUOEU8Fj2J-u|J@u64BT9h_5ZN>`>Xs6
z4Ey*Q7?$%hFf8L|V5sD0U<l%8V6f$9V36TwU|{EGV0g;Mz;K(7fnhfv1H&#p28N}4
z3=B*77#KSE7#Ono7#ITh7#QsN7#QrJV*m<#3=Cp?3=E>sv4ESr3=B7T85pkfGBBLx
zWnkFE%fPUamw};|mw};&mw_Rdmw_RNmw~~Hmw~~Pmw`cxmw`c(mw|zsmw|zcmx196
z4+Fz99tMV|JPZuCc^DWL@-Q$g;9+3s<6&T^;bCB?=3!v)=V4$_=3!t^;$dK5;bCB4
z=3!vC&&|MakDGyEF*gH4GdBZ6Ha7!97B>Tf8#e=kD>nm!1~&r(2R8!)J2wNvBQ6Gp
z{ag$T`?wewdbk)E^0*ina=927g1HzN9Jv@6jJX&Xl(`_`#{~*Q9R27~;n5Hn4S~@R
z7!85Z5Eu=C(GVDfApjb;NJ%X&Dax;O%*hGQ%uC5HFV@gx0JmWEic)h@6N^(dAYBd6
z1VB<zYGO9Hr;?ebkeynoU<+!%Lx#S<TyRIFJTbGxJGD~7$W#;3O#)eynOmHZU#_8)
z1{&;E(oq04;2{PpD+Ficm&1oKq1`y}FePYU4m=139p*_*!89<rD8HZ}HAP8B0m_1z
z>XKPpkds&m6GrRgVHyPL))p)2D3rUVq^B06n*x%Bx*{_lI$8?ygMuw&oCGp>1@;|m
z*cf63DBKEiO7sd6ON&!A^b8CPH5rsaf*=es0oskxNXaaMj9f#k21&z(6l|e(yQCH*
z7J-INHGCj5FgglGMy5InK@c7^z(IOJc0vOk>Z+>zqLkDkTTqt=qyU6Lii(pHOG;9U
zG>i=mAfrjB?uJQ2?Ta>u)lq=44WVOA=te<Z16B)TgVchP4oq=!P8w3u0=XnPCs6~*
z2T+}G>lAF!$LRA?%i}Y1)8PRQG8%2#3N(ph0Jax%#1<?JNnf@K@Bv78G6G9Oc?z})
z@L@Ap0s+fIIPmy^j9oz81@#M*2XcOXQ7)t_4vrG2JeUpEh-M5}0$GuQtpY4-CZ=d;
zGL&az=A<fw6qSM|>KK&M3W^nM72E=X^@>Xpi%MV_S;1BTqnoKwT$F5Us0mld01E%i
z{5%a!24%1n&|wI0&(t}w0G#9uHDSXci6yCe#W|^|1<)8QC{5BUfb_f3(=oVjs+W_W
zo|%`Hk1Poqo4}s_q4RLiu~~Q~K%`5!kb<p(l7F6pbAD-2ajFtHLLl)EN(ms0qSB=(
zGp$4+2sD}sQwy4~ODsxFRIpP>(u5RbIJEhsrj=mPm5581Up}~ugXY`R6o_7seUOQ*
z#Jp5c@o4}aDS^(9!Ng!wg<zN2DnQ0c^c9TsOu=3Rxe!^00wjThgrW1DU_V0b&rb!-
OM3g9`R%8}~W<LR?wZeV?

literal 0
HcmV?d00001

diff --git a/Robot_Development/catkin_ws/src/src/lane_detection/src/detect.py b/Robot_Development/catkin_ws/src/src/lane_detection/src/detect.py
index dbfdb67a7..ff8af798e 100755
--- a/Robot_Development/catkin_ws/src/src/lane_detection/src/detect.py
+++ b/Robot_Development/catkin_ws/src/src/lane_detection/src/detect.py
@@ -42,7 +42,7 @@ def detect_edge(pic):
 # pic: original image to apply the pre-set region of interest too
 def ROI(pic):
     height = pic.shape[0]
-    triangle = np.array([[(100, height), (600, height), (350, 100)]])
+    triangle = np.array([[(0, height), (650, height), (325, 100)]])
     mask = np.zeros_like(pic)
     cv2.fillPoly(mask, triangle, 255)
     roi = cv2.bitwise_and(pic, mask)
@@ -54,7 +54,7 @@ def ROI(pic):
 # pic: original image to apply the pre-set region of interest too
 def ROI_real(pic):
     height = pic.shape[0]
-    triangle = np.array([[(0, height), (620, height), (430, 250), (150, 250)]])
+    triangle = np.array([[(0, height), (650, height), (0, 0), (650, 0)]])
     mask = np.zeros_like(pic)
     cv2.fillPoly(mask, triangle, 255)
     roi = cv2.bitwise_and(pic, mask)
@@ -81,7 +81,6 @@ def applyLines(original_pic, lines):
     for line in lines:
         # parse the array to individual variables
         x1, y1, x2, y2 = line.reshape(4)
-        # print(line)
 
         # Draw the lines on the original picture
         cv2.line(original_pic, (x1, y1), (x2, y2), (255, 0, 0), 3)
@@ -98,7 +97,7 @@ def find_poly_lane(pic, lines):
     # Collections for the negative and positive sloped lines
     left_lines_points = [[], []]  # Negative slope
     right_lines_points = [[], []]  # Positive slope
-    # print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Start~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
+    
     for line in lines:
         x1, y1, x2, y2 = line[0]
         parameters = np.polyfit((x1, x2), (y1, y2), 1)
@@ -125,14 +124,9 @@ def find_poly_lane(pic, lines):
             right_lines_points[1].append(y1)
             right_lines_points[1].append(y2)
 
-    # print("POINTS")
-    # print(right_lines_points[0])
-    # print(left_lines_points[0])
-
     if right_lines_points[0]:
         z = np.polyfit(right_lines_points[0], right_lines_points[1], 1)
         f = np.poly1d(z)
-        # print(f)
 
         right_x_new = np.linspace(np.amin(right_lines_points[0]), np.amax(right_lines_points[0]), 150)
         right_y_new = f(right_x_new)
@@ -144,7 +138,6 @@ def find_poly_lane(pic, lines):
     if left_lines_points[0]:
         z = np.polyfit(left_lines_points[0], left_lines_points[1], 1)
         f = np.poly1d(z)
-        #   print(f)
 
         left_x_new = np.linspace(np.amin(left_lines_points[0]), np.amax(left_lines_points[0]), 150)
         left_y_new = f(left_x_new)
@@ -153,10 +146,6 @@ def find_poly_lane(pic, lines):
         left_x_new = []
         left_y_new = []
 
-    # print("New left")
-    # print(left_x_new)
-    # print(left_y_new)
-
     return [right_x_new, right_y_new], [left_x_new, left_y_new]
 
 
@@ -175,8 +164,6 @@ def find_average_lane(pic, lines):
     for line in lines:
         x1, y1, x2, y2 = line[0]
         parameters = np.polyfit((x1, x2), (y1, y2), 1)
-        print("Slope, intercept")
-        print(parameters)
         slope = parameters[0]
         intercept = parameters[1]
 
@@ -203,8 +190,6 @@ def find_average_lane(pic, lines):
     else:
         left_average = np.average(left_lines, axis=0)
         left_line = make_coordinates(pic, left_average)
-        print("Left Line: ")
-        print(left_line)
 
     if not right_lines:
         print("Right is emtpy")
@@ -213,19 +198,11 @@ def find_average_lane(pic, lines):
     else:
         right_average = np.average(right_lines, axis=0)
         right_line = make_coordinates(pic, right_average)
-        print("Right line : ")
-        print(right_line)
-
-    print("Left fit")
-    print(left_line)
 
-    print("\nRight fit")
-    print(right_line)
     return np.array([left_line, right_line])
 
 
 def make_coordinates(image, line_parameters):
-    print(line_parameters)
     slope, intercept = line_parameters
     y1 = image.shape[0]
     y2 = int(y1 * (1 / 2))
@@ -275,6 +252,7 @@ def detectDeparture(left, car, right):
 video = cv2.VideoCapture(1)
 plt.ion()
 
+ 
 
 def lane_status(direction):
     rospy.loginfo(direction)
@@ -284,7 +262,6 @@ def lane_status(direction):
 
 while not rospy.is_shutdown():
     ret, frame = video.read()
-    print(type(frame))
 
     frame_edge = detect_edge(frame)
 
@@ -292,12 +269,12 @@ while not rospy.is_shutdown():
 
     wEdges = detect_edge(new_img)
 
-    cropped = ROI_real(wEdges)
+    cropped = ROI(wEdges)
 
     lines = getLines(cropped)
 
     if lines is None:
-        x = 0
+	x = 0
 
     else:
         Rpoints, Lpoints = find_poly_lane(new_img, lines)
@@ -319,13 +296,13 @@ while not rospy.is_shutdown():
     lane = applyLines(frame, lines)
 
     # Display edge detection
-    #cv2.imshow("edges", wEdges)
+    cv2.imshow("edges", wEdges)
 
     # Display cropped edge detection
-    #cv2.imshow("cropped", cropped)
+    cv2.imshow("cropped", cropped)
 
     # Show original image with all lines detected
-    #cv2.imshow("frame", lane)
+    cv2.imshow("frame", lane)
 
     key = cv2.waitKey(25)
     if key == 27:
diff --git a/Robot_Development/catkin_ws/src/src/lane_detection/src/detect_test.py b/Robot_Development/catkin_ws/src/src/lane_detection/src/detect_test.py
new file mode 100755
index 000000000..18052d3f9
--- /dev/null
+++ b/Robot_Development/catkin_ws/src/src/lane_detection/src/detect_test.py
@@ -0,0 +1,322 @@
+#!/usr/bin/python2
+
+import numpy as np
+import rospy
+import matplotlib.pyplot as plt
+import cv2
+
+from imutils.video import WebcamVideoStream
+from imutils.video import FPS
+from std_msgs.msg import String
+from std_msgs.msg import Float64
+
+pub = rospy.Publisher('lane_status', String, queue_size=10)
+rospy.init_node('lane_status')
+rate = rospy.Rate(100)  # 100hz
+width = 325
+
+# Converts picture to grayscale and applies filter to picture
+# param`s
+# pic : a numpy array of pixel values to represent a picture
+def formatImg(pic):
+    # Convert picture to gray scale
+    gray = cv2.cvtColor(pic, cv2.COLOR_BGR2GRAY)
+
+    # Apply filter to image
+    img_filter = cv2.GaussianBlur(gray, (5, 5), 0)
+
+    return img_filter
+
+
+# pre-processes and performs edge detection on an image
+# params
+# pic: a numpy array of pixel values for an image in gray scale
+def detect_edge(pic):
+    # Perform canny edge detection
+    img_edge = cv2.Canny(pic, 50, 150)
+
+    # return new edge image
+    return img_edge
+
+
+# Define the region of in which the lanes will be in the cameras view
+# params
+# pic: original image to apply the pre-set region of interest too
+def ROI(pic):
+    height = pic.shape[0]
+    triangle = np.array([[(0, height), (650, height), (325, 100)]])
+    mask = np.zeros_like(pic)
+    cv2.fillPoly(mask, triangle, 255)
+    roi = cv2.bitwise_and(pic, mask)
+    return roi
+
+
+# Define the region of in which the lanes will be in the cameras view
+# params
+# pic: original image to apply the pre-set region of interest too
+def ROI_real(pic):
+    height = pic.shape[0]
+    triangle = np.array([[(0, height), (600, height), (475, 300), (175, 300)]])
+    mask = np.zeros_like(pic)
+    cv2.fillPoly(mask, triangle, 255)
+    roi = cv2.bitwise_and(pic, mask)
+    return roi
+
+
+# Get all possible HoughLines and return them
+# params
+# edge_pic: the image with the edge detection performed
+def getLines(edge_pic):
+    return cv2.HoughLinesP(edge_pic, 1, np.pi / 180, 100, maxLineGap=80, minLineLength=20)
+
+
+# Apply the passed in lines the the original picture
+# params
+# original_pic:
+# lines: Array of lines in the form (x1, x2, y1, y2)
+def applyLines(original_pic, lines):
+    # If there is no lines return the original photo
+    if lines is None:
+        return original_pic
+
+
+    # Loop through all possible lines
+    for line in lines:
+
+        # parse the array to individual variables
+        x1, y1, x2, y2 = line.reshape(4)
+
+        # Draw the lines on the original picture
+        cv2.line(original_pic, (x1, y1), (x2, y2), (255, 0, 0), 3)
+
+    # return the original picture with the lines drawn on
+    return original_pic
+
+
+# Find the two average lines given the set of lines
+# params
+# pic: the original image
+# lines: An array of lines in the form (x1, x2, y1, y2)
+def find_poly_lane(pic, lines):
+    # Collections for the negative and positive sloped lines
+    left_lines_points = [[], []]  # Negative slope
+    right_lines_points = [[], []]  # Positive slope
+    
+    for line in lines:
+        x1, y1, x2, y2 = line[0]
+        parameters = np.polyfit((x1, x2), (y1, y2), 1)
+        # print("Slope, intercept")
+        # print(parameters)
+        slope = parameters[0]
+        intercept = parameters[1]
+
+        if (slope < 1 and slope > -1):
+            #print("Line ignored");
+            x = 1
+
+        elif (slope < 0):
+            #print("left insert")
+            left_lines_points[0].append(x1)
+            left_lines_points[0].append(x2)
+            left_lines_points[1].append(y1)
+            left_lines_points[1].append(y2)
+
+        else:
+            #print("right insert")
+            right_lines_points[0].append(x1)
+            right_lines_points[0].append(x2)
+            right_lines_points[1].append(y1)
+            right_lines_points[1].append(y2)
+
+    if right_lines_points[0]:
+        z = np.polyfit(right_lines_points[0], right_lines_points[1], 1)
+        f = np.poly1d(z)
+
+        right_x_new = np.linspace(np.amin(right_lines_points[0]), np.amax(right_lines_points[0]), 150)
+        right_y_new = f(right_x_new)
+
+    else:
+        right_x_new = []
+        right_y_new = []
+
+    if left_lines_points[0]:
+        z = np.polyfit(left_lines_points[0], left_lines_points[1], 1)
+        f = np.poly1d(z)
+
+        left_x_new = np.linspace(np.amin(left_lines_points[0]), np.amax(left_lines_points[0]), 150)
+        left_y_new = f(left_x_new)
+
+    else:
+        left_x_new = []
+        left_y_new = []
+
+    return [right_x_new, right_y_new], [left_x_new, left_y_new]
+
+
+# Find the two average lines given the set of lines
+# params
+# pic: the original image
+# lines: An array of lines in the form (x1, x2, y1, y2)
+# Deprecated???
+def find_average_lane(pic, lines):
+    # Collections for the negative and positive sloped lines
+    left_lines = []  # Negative slope
+    left_lines_points = [[], []]  # Negative slope
+    right_lines = []  # Positive slope
+    right_lines_points = [[], []]  # Positive slope
+
+    for line in lines:
+        x1, y1, x2, y2 = line[0]
+        parameters = np.polyfit((x1, x2), (y1, y2), 1)
+        slope = parameters[0]
+        intercept = parameters[1]
+
+        if (slope < 0):
+            print("Left insert")
+            left_lines.append((slope, intercept))
+            left_lines_points[0].append(x1)
+            left_lines_points[0].append(x2)
+            left_lines_points[1].append(y1)
+            left_lines_points[1].append(y2)
+
+        else:
+            print("Right insert")
+            right_lines.append((slope, intercept))
+            right_lines_points[0].append(x1)
+            right_lines_points[0].append(x2)
+            right_lines_points[1].append(y1)
+            right_lines_points[1].append(y2)
+
+    if not left_lines:
+        print("Left is empty")
+        left_line = [0, 0, 0, 0]
+
+    else:
+        left_average = np.average(left_lines, axis=0)
+        left_line = make_coordinates(pic, left_average)
+
+    if not right_lines:
+        print("Right is emtpy")
+        right_line = [0, 0, 0, 0]
+
+    else:
+        right_average = np.average(right_lines, axis=0)
+        right_line = make_coordinates(pic, right_average)
+
+    return np.array([left_line, right_line])
+
+
+def make_coordinates(image, line_parameters):
+    slope, intercept = line_parameters
+    y1 = image.shape[0]
+    y2 = int(y1 * (1 / 2))
+    x1 = int((y1 - intercept) / slope)
+    x2 = int((y2 - intercept) / slope)
+    return np.array([x1, y1, x2, y2])
+
+
+
+
+def detectDeparture(left, car, right):
+    a = 9999999
+    b = 999999
+
+    try:
+        parametersLeft = np.polyfit((left[0][0], left[0][-1]), (left[1][0], left[1][-1]), 1)
+        leftEq = np.poly1d(parametersLeft)
+        leftPosition = (310 - parametersLeft[1]) / parametersLeft[0]
+        a = car - leftPosition
+        left_lane = 1
+
+    except IndexError:
+        print("Left lane does not exist")
+        left_lane = 0
+    try:
+        parametersRight = np.polyfit((right[0][0], right[0][-1]), (right[1][0], right[1][-1]), 1)
+        rightEq = np.poly1d(parametersRight)
+        rightPosition = (310 - parametersRight[1]) / parametersRight[0]
+        b = rightPosition - car
+        right_lane = 1
+
+    except IndexError:
+        print("Right lane does not exist")
+        right_lane = 0
+    area = width / 4
+
+    if left_lane == 0 and right_lane == 0:
+        direction = "No lane detected"
+        print("No lanes detected")
+    elif (area > a):
+        direction = "Drift Left"
+        print("Drift left")
+    elif (area > b):
+        direction = "Drift Right"
+        print("Drift right")
+    else:
+        direction = "On Course"
+        print("On course")
+
+    return direction
+
+def lane_status(direction):
+    rospy.loginfo(direction)
+    pub.publish(direction)
+    rate.sleep()
+
+
+#video = cv2.VideoCapture(1)
+plt.ion()
+
+video = WebcamVideoStream(src=1).start()
+#fps = FPS.start()
+
+
+while not rospy.is_shutdown():
+    frame = video.read()
+
+    frame_edge = detect_edge(frame)
+
+    new_img = formatImg(frame)
+
+    wEdges = detect_edge(new_img)
+
+    cropped = ROI(wEdges)
+
+    lines = getLines(cropped)
+
+    if lines is None:
+        x = 0
+
+    else:
+        Rpoints, Lpoints = find_poly_lane(new_img, lines)
+
+        #plt.cla()
+        #plt.clf()
+
+        #plt.scatter(Rpoints[0], Rpoints[1])
+        #plt.scatter(Lpoints[0], Lpoints[1])
+
+        #plt.scatter(310, 350)
+
+       # plt.imshow(frame, zorder=0)
+
+        direction = detectDeparture(Lpoints, 350, Rpoints)
+        lane_status(direction)
+       # plt.pause(.001)
+
+    lane = applyLines(frame, lines)
+
+    # Display edge detection
+    #cv2.imshow("edges", wEdges)
+
+    # Display cropped edge detection
+    cv2.imshow("cropped", cropped)
+
+    # Show original image with all lines detected
+    cv2.imshow("frame", lane)
+
+    key = cv2.waitKey(25)
+    if key == 27:
+        break
+video.release()
+cv2.destroyAllWindows()
diff --git a/Robot_Development/catkin_ws/src/src/lane_detection/src/roi.py b/Robot_Development/catkin_ws/src/src/lane_detection/src/roi.py
new file mode 100644
index 000000000..ea7102918
--- /dev/null
+++ b/Robot_Development/catkin_ws/src/src/lane_detection/src/roi.py
@@ -0,0 +1,19 @@
+import matplotlib.pyplot as plt
+import cv2
+
+video = cv2.VideoCapture(1)
+
+plt.ion
+
+while True:
+	ret, frame = video.read()
+
+	key = cv2.waitKey(25)
+	if key == 27:
+		break
+	
+	plt.imshow(frame, zorder=0)
+	plt.pause(.001)
+
+video.release()
+cv2.destroyAllWindows()
diff --git a/Robot_Development/catkin_ws/src/src/rosjet/jet_bringup/launch/jet_real.launch b/Robot_Development/catkin_ws/src/src/rosjet/jet_bringup/launch/jet_real.launch
index be4c37f28..9606d241f 100644
--- a/Robot_Development/catkin_ws/src/src/rosjet/jet_bringup/launch/jet_real.launch
+++ b/Robot_Development/catkin_ws/src/src/rosjet/jet_bringup/launch/jet_real.launch
@@ -4,7 +4,7 @@
   <include file="$(find jet_serial)/launch/jet_serial.launch" />
 
   <node pkg="joy" type="joy_node" name="joy_node">
-    <param name="dev" value="/dev/input/js0" />
+    <param name="dev" value="/dev/input/js4" />
     <param name="deadzone" value="0.3" />
     <param name="autorepeat_rate" value="20" />
   </node>
-- 
GitLab