From 29d9e385bd47f99503cedddb18c83bfd2ed8cdad Mon Sep 17 00:00:00 2001 From: palmpalmpalm <56908742+palmpalmpalm@users.noreply.github.com> Date: Sun, 30 Jan 2022 18:47:50 +0700 Subject: [PATCH] Add main files --- models/__init__.py | 1 + models/__pycache__/__init__.cpython-36.pyc | Bin 0 -> 133 bytes models/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 125 bytes models/__pycache__/__init__.cpython-38.pyc | Bin 0 -> 129 bytes models/__pycache__/__init__.cpython-39.pyc | Bin 0 -> 144 bytes models/__pycache__/models.cpython-36.pyc | Bin 0 -> 21180 bytes models/__pycache__/models.cpython-37.pyc | Bin 0 -> 20841 bytes models/__pycache__/models.cpython-38.pyc | Bin 0 -> 20400 bytes models/__pycache__/models.cpython-39.pyc | Bin 0 -> 20399 bytes models/export.py | 68 + models/models.py | 761 ++++++++++ object_detection.py | 220 +++ requirements.txt | 28 + test1.jpg | Bin 0 -> 15345 bytes utils/__init__.py | 1 + utils/__pycache__/__init__.cpython-36.pyc | Bin 0 -> 132 bytes utils/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 124 bytes utils/__pycache__/__init__.cpython-38.pyc | Bin 0 -> 128 bytes utils/__pycache__/__init__.cpython-39.pyc | Bin 0 -> 143 bytes utils/__pycache__/datasets.cpython-36.pyc | Bin 0 -> 38035 bytes utils/__pycache__/datasets.cpython-37.pyc | Bin 0 -> 37889 bytes utils/__pycache__/datasets.cpython-38.pyc | Bin 0 -> 35819 bytes utils/__pycache__/datasets.cpython-39.pyc | Bin 0 -> 35906 bytes utils/__pycache__/general.cpython-36.pyc | Bin 0 -> 14061 bytes utils/__pycache__/general.cpython-37.pyc | Bin 0 -> 13998 bytes utils/__pycache__/general.cpython-38.pyc | Bin 0 -> 14126 bytes utils/__pycache__/general.cpython-39.pyc | Bin 0 -> 13965 bytes utils/__pycache__/google_utils.cpython-36.pyc | Bin 0 -> 2938 bytes utils/__pycache__/google_utils.cpython-37.pyc | Bin 0 -> 2908 bytes utils/__pycache__/google_utils.cpython-38.pyc | Bin 0 -> 2936 bytes utils/__pycache__/google_utils.cpython-39.pyc | Bin 0 -> 2959 bytes utils/__pycache__/layers.cpython-36.pyc | Bin 0 -> 24381 bytes utils/__pycache__/layers.cpython-37.pyc | Bin 0 -> 24304 bytes utils/__pycache__/layers.cpython-38.pyc | Bin 0 -> 23229 bytes utils/__pycache__/layers.cpython-39.pyc | Bin 0 -> 23244 bytes utils/__pycache__/loss.cpython-38.pyc | Bin 0 -> 4845 bytes utils/__pycache__/metrics.cpython-36.pyc | Bin 0 -> 3964 bytes utils/__pycache__/metrics.cpython-37.pyc | Bin 0 -> 3953 bytes utils/__pycache__/metrics.cpython-38.pyc | Bin 0 -> 3875 bytes utils/__pycache__/metrics.cpython-39.pyc | Bin 0 -> 3961 bytes utils/__pycache__/parse_config.cpython-36.pyc | Bin 0 -> 2758 bytes utils/__pycache__/parse_config.cpython-37.pyc | Bin 0 -> 2697 bytes utils/__pycache__/parse_config.cpython-38.pyc | Bin 0 -> 2753 bytes utils/__pycache__/parse_config.cpython-39.pyc | Bin 0 -> 2674 bytes utils/__pycache__/plots.cpython-36.pyc | Bin 0 -> 13922 bytes utils/__pycache__/plots.cpython-37.pyc | Bin 0 -> 13856 bytes utils/__pycache__/plots.cpython-38.pyc | Bin 0 -> 13760 bytes utils/__pycache__/plots.cpython-39.pyc | Bin 0 -> 13726 bytes utils/__pycache__/torch_utils.cpython-36.pyc | Bin 0 -> 9176 bytes utils/__pycache__/torch_utils.cpython-37.pyc | Bin 0 -> 9143 bytes utils/__pycache__/torch_utils.cpython-38.pyc | Bin 0 -> 9216 bytes utils/__pycache__/torch_utils.cpython-39.pyc | Bin 0 -> 9193 bytes utils/activations.py | 72 + utils/autoanchor.py | 152 ++ utils/datasets.py | 1297 +++++++++++++++++ utils/general.py | 449 ++++++ utils/google_utils.py | 120 ++ utils/layers.py | 534 +++++++ utils/loss.py | 173 +++ utils/metrics.py | 140 ++ utils/parse_config.py | 71 + utils/plots.py | 380 +++++ utils/torch_utils.py | 240 +++ 63 files changed, 4707 insertions(+) create mode 100644 models/__init__.py create mode 100644 models/__pycache__/__init__.cpython-36.pyc create mode 100644 models/__pycache__/__init__.cpython-37.pyc create mode 100644 models/__pycache__/__init__.cpython-38.pyc create mode 100644 models/__pycache__/__init__.cpython-39.pyc create mode 100644 models/__pycache__/models.cpython-36.pyc create mode 100644 models/__pycache__/models.cpython-37.pyc create mode 100644 models/__pycache__/models.cpython-38.pyc create mode 100644 models/__pycache__/models.cpython-39.pyc create mode 100644 models/export.py create mode 100644 models/models.py create mode 100644 object_detection.py create mode 100644 requirements.txt create mode 100644 test1.jpg create mode 100644 utils/__init__.py create mode 100644 utils/__pycache__/__init__.cpython-36.pyc create mode 100644 utils/__pycache__/__init__.cpython-37.pyc create mode 100644 utils/__pycache__/__init__.cpython-38.pyc create mode 100644 utils/__pycache__/__init__.cpython-39.pyc create mode 100644 utils/__pycache__/datasets.cpython-36.pyc create mode 100644 utils/__pycache__/datasets.cpython-37.pyc create mode 100644 utils/__pycache__/datasets.cpython-38.pyc create mode 100644 utils/__pycache__/datasets.cpython-39.pyc create mode 100644 utils/__pycache__/general.cpython-36.pyc create mode 100644 utils/__pycache__/general.cpython-37.pyc create mode 100644 utils/__pycache__/general.cpython-38.pyc create mode 100644 utils/__pycache__/general.cpython-39.pyc create mode 100644 utils/__pycache__/google_utils.cpython-36.pyc create mode 100644 utils/__pycache__/google_utils.cpython-37.pyc create mode 100644 utils/__pycache__/google_utils.cpython-38.pyc create mode 100644 utils/__pycache__/google_utils.cpython-39.pyc create mode 100644 utils/__pycache__/layers.cpython-36.pyc create mode 100644 utils/__pycache__/layers.cpython-37.pyc create mode 100644 utils/__pycache__/layers.cpython-38.pyc create mode 100644 utils/__pycache__/layers.cpython-39.pyc create mode 100644 utils/__pycache__/loss.cpython-38.pyc create mode 100644 utils/__pycache__/metrics.cpython-36.pyc create mode 100644 utils/__pycache__/metrics.cpython-37.pyc create mode 100644 utils/__pycache__/metrics.cpython-38.pyc create mode 100644 utils/__pycache__/metrics.cpython-39.pyc create mode 100644 utils/__pycache__/parse_config.cpython-36.pyc create mode 100644 utils/__pycache__/parse_config.cpython-37.pyc create mode 100644 utils/__pycache__/parse_config.cpython-38.pyc create mode 100644 utils/__pycache__/parse_config.cpython-39.pyc create mode 100644 utils/__pycache__/plots.cpython-36.pyc create mode 100644 utils/__pycache__/plots.cpython-37.pyc create mode 100644 utils/__pycache__/plots.cpython-38.pyc create mode 100644 utils/__pycache__/plots.cpython-39.pyc create mode 100644 utils/__pycache__/torch_utils.cpython-36.pyc create mode 100644 utils/__pycache__/torch_utils.cpython-37.pyc create mode 100644 utils/__pycache__/torch_utils.cpython-38.pyc create mode 100644 utils/__pycache__/torch_utils.cpython-39.pyc create mode 100644 utils/activations.py create mode 100644 utils/autoanchor.py create mode 100644 utils/datasets.py create mode 100644 utils/general.py create mode 100644 utils/google_utils.py create mode 100644 utils/layers.py create mode 100644 utils/loss.py create mode 100644 utils/metrics.py create mode 100644 utils/parse_config.py create mode 100644 utils/plots.py create mode 100644 utils/torch_utils.py diff --git a/models/__init__.py b/models/__init__.py new file mode 100644 index 0000000..d3f5a12 --- /dev/null +++ b/models/__init__.py @@ -0,0 +1 @@ + diff --git a/models/__pycache__/__init__.cpython-36.pyc b/models/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b6f823f5f9623f01c4b7dbb1241401e3996a8bae GIT binary patch literal 133 zcmXr!<>k6`^j-oJ0|UcjAcg}*Aj<)Wi&=m~3PUi1CZpdg`k0!5zt2~0rxF^B^Lj6jA15Erumi4=xl22Do4l?+87VFd9@&BZFlCABOj wH#M&$zbK|MKL<$W=BK3Q6vxEJXXa&=#K-FuRNmsS0ScDpq}qY>e+FU(03xj$l>h($ literal 0 HcmV?d00001 diff --git a/models/__pycache__/__init__.cpython-38.pyc b/models/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f48a6cafe84290c1ea9380d94144aa11f2bd9f70 GIT binary patch literal 129 zcmWIL<>g`k0!5zt2~0rxF^Gc<7=auIATDMB5-AM944RC7D;bJF!U*D*nu}G8OKMq6 wZfagheo;(ieh!e#%}+_qDUOMc&&2Tr~m)} literal 0 HcmV?d00001 diff --git a/models/__pycache__/__init__.cpython-39.pyc b/models/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9a28e6efef50174d68c1333dfe9d4bea6f8d4004 GIT binary patch literal 144 zcmYe~<>g`kg0u@?5}APXV-N=!FakLaKwQiMBvKfH88jLFRx%WUgb~CqQ)jD~qWq-% z|Owoczk%)Vz|I-29Z(oZ^`H_{_Y_lK6PNg34PQHbBAB NoK!oIai4*h0RXbEB4z*p literal 0 HcmV?d00001 diff --git a/models/__pycache__/models.cpython-36.pyc b/models/__pycache__/models.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ca99a678a15f9ec68c6a9f46c86e825af34051b3 GIT binary patch literal 21180 zcmeHv4U}Bfec$_<&z+h5&}y|>33&j4S%FqU5@4{95Rw+Kg%C&}m?9>#duL{6H2Y?B z=k02D^9EzDTqGNT?4%?n1Y_(Z#!X!(X$0&o7%}cw&OG>Zq-mXr{|EdLbLw(>CO_v?YR$=MlSTL&!KWV|rkl}>ra79cJNmLAhh4p_3w>C# z4OegJu2waKv8=aDVV3nXrejvEWvykEwN^x&Zbi?f7oK+_uC}c2)6S)jJ^iS0#%RR= z6$Mn>jfu!wjQhB8PM3d;RBWx*O5i(=?GruPKxWoz@^{@np^u^8!Vvum<9ATf4~&Me+(b$f z17~B)hOXIJg=f%70LzCuSO_t6HbMwDf&NY?s8wh!=_bXnSS?1*nyP-%ja2(iV^#ne z6KgQSQ8mKHRE;&Lk>ZMob!QC)|6BmIUO~A5RC-xcrEXAqg(`h6D9r?=j|HV0ROuIk z(*Ei|P`)cDe}^jfIz1Q+$~Oe%S(K;cE1mKozx-UE{S`oPm97MYDIY@W603C1kUIcz z$#nwEY1PhE&MM6Chl3ebtAsMAyG~d;&ia_R`XVdy)SIk~A-}4yzx|alnS-zjOZ%yJ?)M__?G# z8NghtU_R=s=D9)sM3DbZmH(@l$Fn>$&PY(sQ$pO=sq+7Y)HupBfP0#7*r98lG)lVI zB(6UdcQgBNLb+d^Q8y!QILo#Z_NxJo&A?&F+d?>=>A^9v0>|1Aj(;5B*rISe9>Q_1 z2glYGIL1Oa?hJ6;sBnBagySPUIJT|8v95z7ZvRbyV>@s}WHf~1qdhowtiZ88gyV|= zj++#YgCQJ0g_#CT49)A!EAU(q!XpDbwzs$MO2i#x=gvbn6g@%%=WbEnuV_60fD zoQvAzFFXp%&lc z4g@U@s1`?q7T=2&<5F|RG1AxFsIbjR7rD~zOg;XXbRt{K9Z3+DslmJUX%L;wB}Fec=I6&N24 z*W`W)VLcWu1JHoQ&(~@6-<3`~43Cb;^aMr+TTXy^WPbo~FMoxSE zr&Z4H3xRX={8~*>ChpLpvn}B3ru}-*j}l;$Qd6)$AA&7A+dHuB^`B9&8&Ok9XwD9t z!iw4>ru}??uc~0~?!ep>f>{V)jw_g22j=Dw%#i@5reK~6U(bA zuMME<3i_)TK))*lZPh(Eo?EbGpsdCp|ue*-gvO zU2=9msAl&Q!R)5wa~I5RO8zqNfp-P7d)E&*yD8Ag_Uyj&wrBT~|8LGNX!xWEB(F7C zRq!XP+ULd@J8dGCWbDMgqndcKtb?b5KY&K8UKnvB`?O=3Glm#*SD!X^LT5Yi`cc$& zSCyWDaxj2HJOYy9a#~@T-^W8FLq)JUUV-|Z4;i^C04>8fd2EI7ry8gH>Vv9ngk zD4M$vX@jHLXsxh_4TmAlR7%~=@od_`K>lo)C)4Dbm$oEz-z zLVlaLT)kKb^b73#-Of#k^MHfj@<%QYPz3Uy=k zh26M}6LK!plm2EAMCbCKlV?MH7+TSX%QzL!hq_@l3qL#!e4AwArEa_B67_M@=L(7;kYam@Tv65!6$E%`;@-l-ApmU?xwN8|`G6I8i9PdWzr>4! z(H3SKwc`0Ks$Xy%wzI2a2*3k|S~{)5ry)5k>kmP~J7EZo^#33zuUWC3fIxcs0y#jr zOB~-w>?{}U;zC32`U(;HJZ9<2H>K%qXpq@HQ~D8{wEg$^s$Wyvvo2drzY7BDYEF)tYf{6$vJ%b=YG#Ew9c=?+QJ`&Usbp!Yr9 z_`C4Wi?n_*7V|sZCUa3W3)^~{{!!0>yrn%eT{ibxw+}sS{}_k;d)+33VQ=qg`f?2W z8{IziwEbfo_BXpt=K9oX=W8>CTB)b~%Q5zEbvx42_K$Jw-|jZ)#hWvA=}gZ#m#cAJ z^R*?p)X8F{JmWO;RC++;NOioZpP7rvVqKQE3y>s6&T~S%$ODY$OmWij%za0XjRM&`j)UEGyK**+TO;=A>3=HjB;isJsU_Jj0&$;+5I5 z((t?_E9EP;C@!c4P&!cM32>70-2dh6>hDi>mp^;gA1@7m_J8a~&(gAZVy+^K&3suF z#M1Rg_dIa$o`ZMYmTk^LAIVZd%vS7*Q=z)5bf|2p0F_J@Cvs+06n``mB=@6#A;!T8 z+S;9qz$$8w(NtpgbG-;q0x%$0ctYYBiM4cWhc-7E&k;FMD(Kz#;p+Vqxz?~ z;Kg%SGz+<7k`?Y^a5sYUf79;Z1x4Me<9B<>O0!}&9jts&ZbLmN{PhNw+V+z7Qj;wn zfHvnPiuT-WQ9@Anl83Q|(7Ull(7A;O6hN6~%c~izC7NiV(J0!Gc@9t&f1r5a!Cd69 zuX*1~{ac^(5+TFH;YxY7UcuQoTsiQdypkAV`vUFRylc;)TBCAqZ-eyrBvJ17>2L#E z7qb&l7yDim-EG#Tmy&Zfw^e?&Sg^gmc51#L>OJNZb<;$UWWYz1`?uudfZTZQq`7;@)s#uV)rOWfG6vci=vCetg@GVb`(63Z6Az zDW3E!QE(8SEjTlt30sUjpgM4(~*XRMC--hvv0q`G(O_>`L>5V>@`h! zFT2i@?dkbW2~826l1-cPAj_;-07F_=Tf6BQQ?^X9;wS?O?VjH7$=MCextclg>9)a3 zuX+&@sl0FNNl^f3=BFxlK`#1~*z$?X4pvt72qmr29^qwrh3f?Jnid6bb5u|2Q9Z6( zx@C;$aU+8`|Fg&+&<*uJqz~yCl=!KX5!J_xB+{dL1~C2y5QNf)I~cCgQ%JMUFg})^ zdJ7DGO}0ba@gR(|l>q14i={d>y0J@R89=Xx^pQ?0!)m5RU?*s4$H*+f)f z2F|yFfnlQ6?!OA`0IkOW+O# zzD1EIVJ6H)q5t*C_oLEujP>!yZ>m#7+5A-mj^@B^XxXS5%e)qV!;p*1M_0>7U}o0x zQ5Wt)`53N2w0zu2xDi#BM9hcka}AWFT!XqBVrdb5)WmlNpMHD>;F1Ik)%ZG=4k9*K z9TI@3_6ayC32T}7S2@G-vx2eJC>e24g3Fian1&cGPZ%pH2Og!;#tzE+(DM*~q+B-% z-lq6uCm3JjMVfOB5DiX&7tcdNfsmJ%vrI;hC{pZ;GcZ2)$S$BgMH^(PCYRX~iIDsN zYZ>+eK8yI2!SR%cl%Fq7%Tp{&1Q{o1hkWy+*C+YyYhxd9Ok`z3`P#Q&tC9HbK1bB2YSrJ)Y3ooEmK zd>XYN7B=QO?qw9wxF`1F)BF?ySAUeN$`#WcqiVw0%&1yI$7K@tg;tF=7myRfCytbk z)i=%>K}z#eWvthuM$2qj$WLG|nI%KOA*-5%iyd|rYH3wXm?i8eI;jD3A97OFv}+tU z<%=qwae)OTDWv+V?Nq=~ypecWGBizV_!LcdIC18n>gh>S-hC(6g;mn_bE z#=^Y6P~Fw~B%n%MElT$q>5UoHH|d)U(&D9&?)ro`NM~hu`9a>GR_1OKZj5m4b4|Fr zWjTBJ=my;TS`nVYDz5r?bxGZzYCdYUqODjf4hKFBXBwxlWVi`8UgGTLkdFE(z$!O8 zCyrGDVc`X%8|hQuC7eAtA|SoazqAgu5?o+vgyoeaPH8fzZMl{(*95t^$kG=+Gtv)zeSm?tmxLj@5Kwh_o(^myOQR9JQA+97>$hBaIR z>nC2p<>sAE8gp}lFk4ak=C_&!Ku%pa52%HUN92QgRom1HRH`}%hftH`6%Lk>Djf2Z zyX7iidCt`jYiQY!UkvJ@Pl!b@_LSiK4_8;ic?7$E9URJ#9;vQYG_YUMKoj_I$qc+k z3v{!(hNY@ptsClf%H1?@_NzYfOu*d)X1~zNp^t{Y8aOwZHCk%`rK9b2g{x(}Q`!$q zq{nNO6>mdwMTDb)n#j924*Ska)D zA~mdu^eAU*1pFgZhHG!8Iz1K!AdOXoTmzh;vH%k9x$G6M(*ZMftABOAimNF&s{_lR zW4tz1)T964WS3TWabCySxcLGP>BD)0Xo#CHFAdeRj62Rc?lNW5=Po3c@-_rzeDL?@ zyA?B<2$I?zLFBu;m1;GIC3&4wDbLmCnu@?F66Hl@u>mDPewJ;KfZ?7L z24BV3DCv$5`}FListA&-XYDyWIjO`}PX{NQYRacrSCLcp-P2FXpH|Mf&%MdFg;DPZw*oHe)8ehc)~c2&tI#`w=_L z!I7`V_0&)@?aN_EcK$%m)!*MutmCEC|EFS|{27d0asvFtQSAL=Oy0vl9XQ$n{j2z& zV@#dDWyU(PT<8cYao|OkQjSvx#6M*e5!Bj`$NU4L8c--MDzf=opv(TbcYcNY;B5$N zgKcfLBdht_mRq$W5s>`+{gl<@r_psBl9%z-CB5Sp71QKr2-FeMrm{4xX8YPZ4PV%}F6{5FGs$6y7mr0nw=X(jvhLyK#%xV{ztWBOJ4(m;pK zUR+-L5A2OEugR}5{wjmtVbCj@$=_vOr{6sS8HI(3B1k5J}kFqGAvVOA1!Fn6kdbVSO9I%|$^Tw~NmC7t#9U zbh`!2HJ1>qw8I7VHH**npufYRZ`o8<8Cv3E4(u+sG3y~;(0Yyeoa$VEqad5g z%@4kvyjlD(i)gQuXBj+=!0Rs;oxHjb4E+7nC1c)Cz;M>URjHJPMS6TX+qZeKc_>%7 za8T)+chELgw0#4bRk2HT+a(zd0&O)QJPQ`RX%{| zxG|yjQRh41rGgWk$IVpHr=LdwDY@&tgzq$jOJvU~)hZ2{W6>;&Ox$CQC;VmSMK*4u z=gM^Ol^$W#CeWLnzUV8c#)9wA>YshJ5lF7qb0e%rM1wWdzr&!GIjU#PQ6sCb*R#o# zzSQ5|7C~P-yP_Wz2k}uybOE@q4>Y{c3-&=7762IEB4=QvmNz&wlcAjNe`tPk4Jj2o zOmeLlX+{~ZL|_V~G1Y=eT(4qBV86r#AeIe34=JqFvQeU89HyOf`a;%;VUJ*k;KizWAv+)ar1s8)tyt=b3UPs&NDKH-h9qG6+AE#4QQ zjGp@_PxIAlH7(X7Uvnt7q?Zlfs~@FNyuN^w0sCP;fR=7ROS;U!TOFn^`wrw>jkf~2 z+=SPQ{cTw8VS6LW_=-}OlNXeseV!^heg!5ry!!&=lnatnR%Zb7`2^{!4BIMmU<|z9Fs+#uIs3<7kw3G-lbKeHid!Jjpx* z-2E6g;4rT`UV*6fQKth&Z5Z@w$iW+Hyk4-xJMsREe%3tA*WqlAcg#&{4SUX}54BmH zO+P+?vk3>ci`4C&&2Z~5-*wLB7XNH|`c}`EW!~(R@T%HTMI8iF1diwH7S;iDW5mG_=sEE-1II&k zR6((o#L}hMn}G5V4=O%%CBJUPaa5$^Vh-g?R(98}?2?(iKHJkzagxVC9 z*)VbL{}JvBI00R95;`29%@J1nwEql;(ixB90`l9;`wj!e8O}2H3k<%-;Ccq%L*QA} zdd2q4r8b|Ss3DbU=OxC}eOsgnKQ3pyzsoq}cUjnB8o9WVOK9ITu{$A*Ovyhc1chfW z>U#os77c&uA%SN!XC!T+{w4`FuVOIYLhfyx@f;$&hD-8FUAIiI8W=94kT`HTY>eUx zpBMTAdKRY2lqyTYe2I@0H^xxsDnRhM9~Z{@dRzK4O!z{Df2TJn)fRdX1Cpfm?L7N+v~e8CoZcz$pheIZsU zZGVVlVDrl*2`uu;e?%|%dqtn}ELO*bA`4g030)25jL{D%fi% zPq7C{MB-*9o!pSTH9jbr-`UWVGdm+E1*dKU;e>1+8=LGLqc;zBDiF9sQL{)WiaM(J zxq*F&YIvg_m2hhaD683}(HLuoUL)S2JFp;J3sh>UeT8|&Y?5o8Y0oCO*3~O?o{KRo zG|(Y&yt-EFwqoPRuLJSvHeyXzX|?w|7GP>N{HVP_5f^iVnW?IqVLpzRqrzwzc8Z4) zsg1(IfroG^@M9^@?81FKeLNUKD5X+@!x-)4@qW|Ki+A!8B7x_5<(q9TQVh_!49;~i$H|1%lPzXikzIjb)Tzxo3BkxRml zTmZl3lJIMK;q6^+0wYjkd6O}q90DqSSj8Wyn+t2EvGpX zC!XhO;iO~^E)5Fb0&&y!2~wpqOz%<|+49IyT6=#`X(Wqw~g5RF}J`$c)hs&q@kBr9ogb{Bvit;}Z_8Sbo z$>0xI8dDw_+qe0L<2>^}%s@#3qyS!o{-!ngK^32_Hx>gAo+R<+QP{vkN_r3G3r%?| z>(&`G5R7LOMf75Um$(;&n|i%e0)a+>}2k1Y&>G-)0%)k3dnn?%= zxc7v}a2-@F5W_dpicCxds5?}E6Q0J$=MVrS6#(lup80Edh=Az98wAD%>?IA)!Grk1 zw;bP|1jWB|Vq@*Ne`1e)9%t3UCjtd9LXkmHk-K3mP^zqN;E-QO8js&`rsIL20FSO^ z`k?r&#^K3B@nL0d`l7j+3v=xe7p95}^O1WI%qQ+Xs)gticYf?3E}^GncaM^_sEz7x zHpva`8zyr7@= ztzo(Nl?yFR@Fkfu%r!R3otp^qjy9m;Qxs^L8kSRtWSB#n?_$e^7^~Mm4a_a5w z{PxYwMq>iWH^zd!6V4B^-WX(Oiqa{H3w9WpDn$=6*ybq&-1teCyY(wyk=qqt%o{QBA_#3vW(EW!jPG6K@X`@27U=>GL3XZy@F`&^Tgv15H@<2Ku{gP7M)Y zT`-j)*583sC}=)Yuk6u~BBh30#r>gn%KgAR9#^uEZvwMOSzr=%inC3xUy+$mN4eNu z{Jq7+{ZiJYl8SNfxt|@4p(q#S{&DI#Yx;P@@${lU*kC9e1s-(xEB^-^0@qoKfFQw{ zbpwP{h^e}~5{Not7CX$zR`ljK5dmGn1h?=_&rc|@5bCL79Jk~bXhxyW*|MI(MTjtG zups`=&Q+uIUxSBVR34$);nL3&y94qCen15tpim#6WW)t%kN;B%z?Bd&rbK)VV42&Z zhZ%l&sQ#jA2_8wydJu;^f?gz!TTS)?hIzjRb{YU zd-UeYBZY_Fb?cqmDmU+#KC%D7+`e0iTYVn+rYx3{R`8@27f>S%09Q0 zXU;auo-Sm930-<7y0wyQ4HO(;YdD=&zQ^4)#x9dwLEnvnwZ-8Y4WAf&#M}sGC$|9M zLEl+Q4?RaQBp)jI`KDaW_y~hF2*wA!A@v)H!NT}In3zlXdBL?q0O4O*V7HelWiO&2 ze9ME<^VHo1^+_*!uX+d2RS5jUNP%2&Vntxpp-?>z4gDH|s6!Rs32u#dGP}@6-{*sh dAI54}X8e8H`?Lc?$A%sqa^qHVTXHO!`M;9|Z4Uqd literal 0 HcmV?d00001 diff --git a/models/__pycache__/models.cpython-37.pyc b/models/__pycache__/models.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..07e311d7ba5961bc816b0ba1681696dacf41cb7c GIT binary patch literal 20841 zcmeHv3v^t^dER~O`+*lh5Ckb2qA6LRB#(u16U$9qCuvkiZC%xI632bHt{XSEc9O=YZW-&OJw0ve zqxdBKzPWc_;6r}IJjW+loI7{!%s>CU|JVG3{poZD&|Zm6Vf$dX2K#Q!WXnmTf~4^g;f%oi?U>`ep=4rz)qWv$IaAVoyw&%))O7PZ|%u?MRQNIhw0G`jR0BUA?FaeNeLu zSFh`?)-Z&ztk+Fp7WHRL$83a_wR)(i)zPzhBSQ zZAsO(KXqe`s7O0zId;($A;2Wuq?-a{&st6X8#gKR5wu$vqE})31LX7qqapg-cuEuf zm!ivtu34iB&wvx}!a|5um%@Z_<7n@=f*M3^2{$2Diy^V*lBvok+;Ag#5u*ags2D~M z*Qy>qsY(o^L`sZ^bz=P`L&3igK;5FC+yE-Qq^Vpt$laiFUkGx0g4`#A+_$OR*Mi*M zM&GhudQXr)s`9;N3mW}F{)Qm`R^+GU>#h6&KmT&l`T`(0OB(@U%E$3_gIT(4$Q^*V z;XDE6qN?ZZ&Z>?PR_It(5WP;J*%$Ra0 zfNIXn?jV;lW1`<&eZx+3X5MOK6IL1JIkLB@@{9Po9qS$Iw=w9Zv8Q5}6V^lk^9}{` z)6VK8>>H$y2kCdH^k4PIv7T^-f_&~0;vQ4^e}=Ekz}?e?!wOyVv{BH-o#L(wF}LR+ zb|}}YGwk+=yDzcsxb>v~$2f3=r!1%ez+Kcv}lc)cRI{V>fU_B-iY<*U1Ab zaEx}~csanaN8y;fE{?q`aNOE~ zF6kZX?(ufIqAho~<~wXz0mhRG<1ehh_(Z#87smIu(-p??4vgyqj2VTI6tA<5f2&=x z3*)JFy27}ng)!8kf2V;lE)TE3_++~z#t!SHqvzRnp2E7d1FI9X^8*U&irwFVBew#_ zJAngx>r6yUT{1A+A*;Fng~Ienq%`M0uTs7$3jRJ)l!+^}=xp=lhRde)M$nFFz$T=2 z6>Q1b-hyq;e_6q9LP;f|IXkcm&!{zGS}z9pDhlS|7R)^zF!KS-jDo4OV0L!EoCshn z1@l4x(~ox*YcAp33#qPO)YKT5*6jiGSq1&YtDx`efHth}1kh4JbB(YcZQS>FL4PTL zwgH`zC%BL9)#?%9h*@z&%!zZwFzCjvr#0+>dF~{!;P0f%Nx2&kv%)Ew^_0LoorxeV z;24$x>A1&v7qBlXIrb;kYU=GG)ZK{N1Gy&xN>#rUsmB&xZn= zG1hA3-St)ODxtZt@&kDG3~C=iYGW_sBD8GO2i?K)gF+KX7lncmMs3c%#(wnUgMLlx zKpj?T>$GM4UWoj=wGR06EY(rwL%@1Ke0aD6+Syn(%LiSwp?pXzT{7A&%kDm@S%bR9 zKW9Q*Sb~J{ezgAy?Xqrh_DPRFg3(RO&)jfyKiW0ADfz-xqnnby41C~S!RS8peU5Gl zbh0_Rp&$3r1r47NPy6G$2DBY|NT`u?W28kFO~ew6ojrI`6Bmm*cq;ek8Bucxh3kGz@XO3Jkpb|iC?Aw?nQw>l&qrcVaCkQOnR|OwOF#9(iF$0GMiZ=@&%6DWF_A{sHuZ0quG493hk7G4A@=O z>`Y>&DrPIH8D3P#Y7KqMBx|xdnVT$CO3s28&CL~S)oLXZMQwQtP3p0ya+Q30Btr#R zotff@^T#=-af7wlqX>$ctDPQ(`mYP^j3$RKf^e5#=`)FdP~YdAjE(TvqsH{&J_$mNcf^xAn4nw!5*p6_VGz=}59#G!bxqZy{1`{C){ z_edsQZr58(eC_9J4`@x(Md_7;sf)!jKoR!S6y|SChj?P%LZ{B8r8rucN_=iG}0{-T@O^s|9S#Xb;#m z=G5y!Jv9g*fnd|_lHvkiLM;Io=xW9f!bz`D`?VO)ziwBViKtQ7)>ZXi^$5uOP~nPh zf4SX;uDXAO-Tn{lDuZrs@2dJ{bo=+(ZRo1|N7(JJwX4h|)of=g)458atNxqO_pi4b z(pC45u%lqi};C|sLWSoak~IX zV&n=3#0wu|JY$M;j%OY`c_Nc4fy9=kN=_Ea3aD8$kV-l-W@alLUsE|J)7x1H%+d?z zoa&4Xfjh*Ad;nE-009AjIud|`r9u?Hufza+g$&}#gAC3i=o(9^dWzWdU=YNCfR_w9d|n=Yr!)MH7gTQDchl?SW}o- z=!-Ks`wT{gN~}_Z=feUL|?>D>>Pc?A?pxYPGy2y!c^iwa4;@AIpS~`%3I1RAu!EFW#Z( zA1@VWswFJ0MYXN@)zkU4 z?V#5K?U%YtGrzC<8xVc_ngV_kW?*Fspwh>c)Y}B*cFE8gmpiU#FHkTEf9!v-PW#Ww6_8{+Q{D9pZg z^pSS-fR}6nY&*EG^W%}uj{}~W!zPWt|Ix#bs+H%PNerusC6@C-bEW(_FC=mf;xjpC z+B0F+knd3qIGU5WnLMO2`6$1`H3^Z>i@oRIA%$sd$V>VveHNwL#g4NQtJ!8_62^L(-fU>Zs*L-SlH&Zq-B{tQx9qDB+%w zP&+$WvT}04CuJdjTi(OMO3hc|&Ss+N>6T98NV7wfT+LxUp-1$X9@0a`kRCI75a-`0 z())Bn{SD{?dJl5^uaptdM~no%hxH!7_zxfmr4P0+Y}8ZuW|=`eAwBgL82pl~hqz-w z7-KB~&hIUjYL#fmZj7Z5tsc;aTD1(S{x-*A80}~O`5i%jM-9KFL#=wQ6&q>gvTl|L z;YsNOtuPk!%kN#YkA2AJ_;U>f-D(Ki@d^>KYP9#NQq^Q|D+2eZ#i zsM-zzsBJkA-37kp5S10^5EN+*vCf+@GdhztRS{WG1v31PtN36dwAOKNL3ON1X zfPwf|IfL@kg0a&hZv<3MvurNXgRc2S{OS5iXf6g(u7JJ zTjPc8*&2ujhro+v!Qde!Wo4P4krRp(`_cgHxLvFtv?p(YELG%rz$zjnKgd#sHIHWj zPZ1nXNf_C={FM9m`L1#J_~19HbIhb4Ca$ivulMJKpU4E5;SYoqqv^ zAzsyHTV7j~akw-N;IWzQ>ZdrloH*Sv$|iicjB-fmIHTe6(8>{e-Z2_cq{i^2V-Xma zjNnW2zlxZ)Q%2pahX5JJk}?a1fS*$%0naroFqG2D@MJ55mv#Dn0do>5sdCyi&X}^n zc%#P!7UZPx)!X{&Rf{1^13_3Q@fns|k)JC;df_5aCX2B?`2ypAj{)ahoV5hG7}XZKWAai zGv?>~`D#yE15gD{8keX>vSUW{&H848BzbYDJx!hKq%|`%$&f{;nRysnSHNS=HQ_}z z%F)B48}OQ|hq)QcI6>oyAZ6^bc?w>1^=Lf?zc{VT@eUUZH}1v?9Ni2yTg3kYtnyxS zVwfe+7oOX>;iP&Oa2~h}O5=vt@th~oSxay(s1cSYeb}stpmfL$33E-5igNn=ryO{= z(3kIYrk;fFLI`bm9cPx!5=d*8aFS(^71ba3>*1tNwFT#t?V4h4yl7w@=^}h7G^pWp zRX_VW&J6cEX^hQX!mLNEoo_V?fSkN~99$ji3`DOCB35n^F%YXpKm06BQd#&dh8ys6 zqZea&6|lVE>c=(IY{=Jwa%dA`Ve~yEIR1l;)$qx{>R$&xEqo6(Rx482t4N^tRmzZ;Oxi?AmR1OR`HY?FeCT-C(^4pn}Va- zw+uSQ(?~@0=x#-v8dh;SC~cqLkXXuH2#R>{ z`(Jw%PZ|$CwFiU9AMKS9WRIFwC@&)OHRuZRmsx-W z3@48;oW6&j6nMu5eR}qs%7Q@ah4vkvm{8KIr-LC*+VU9!D{{)Vd-^#EM`pZdd@9&R zZOnsBo|}fS4lV}qx>Q|36>+3nmFIF&*g7a98=*lgjesKZ4?~aXJ>O2KSz?+$_9hqLg1wXems@0RGK`QG#-}ZmkG3)0cDNj>=5TY zrl$rHXN$>Bu z^Ngu=&VACc?G)=)oLy}tZVG>AQ7lk%KNj^jg{nZIxTdt`FM&48=i2!du7eGTyoH?B z-&ABf#573}%n!I?TfzV-PZW<527au>H2I5YHulOd;nk+P*Ot)aCkfP&&*Uro{1k(K z$Ux1NA~h|cO#UNgaQ?26%H*e+^^X~-`Dss518Kj>NxG(7wz@SfZ!MP1{f$`G6afE5 zETaqFkDFLFLa|H}$Qs!lvWqU0ft!X@gfU^pg~9Z&77)i^1mBkMoM0JY(gZSBz2v^{ZfF zs#z1?e84g^dlA7(BV1ry3-Rt7w01bgEt#iib)gw9>cHBfjBjx#0W%sl9B$8khD(HgH`L^eU`el5r9960R~b9Vu`T=he(hX2DdZkM%Hz!64&CV0vLKaI0l0 z$VoQoWTTtMK09tFj^H2T(AKKxy_%Btan~ywr$=*l(#unRjWBBRb=%Vye09{A_Z>q0t*;iqZvk#S7r=T%G?+mBgA3}E!}_Q>Y>evb z_0dF1U+irzhoG&k)zFKAToGz(I>3ebhqy-(@-Ofb%K?EbrR$C(q+R>!xv^~*(lH&4nxjmeSW|34L2@g!&(t0v~m(=o|KbP?ZG8mMZS35H^w|( zEI@vm7k(RQkTYBZb||H!mkj^%9&#hNzJO!-CA7ghhFW@Y?Le2P7ncHc>p`UO0+TLp zcf*a|CM?&jwF!CsDsTT)d8nV{jIL5B1*yoz#VKHU(ghhRs&$W^oX1!WxN(Q3DD>M+ z7R({7*&8>qWcnj~_B?ioRvtk4m)tPSZ8W@nML=Rv{Ruy-u{X-I7$fYDJWO>_?q05Y zu6Xnta2Qh^*C^E7s2u_GHB5KYFkj=6GMwK-Vh|U&=%dUXyc&CIsAX7E^VhW>eW*2R zKL&B^M_patU0JrhA3JM@p{}(b*ZTX>)3y!3P-R z7)&ylVjvjg859^48B8-MA?Wf);O1Y2M>**!k2mF}N1r#`8^NUder{?9(sP6-tZKtI zeVoTo7<5{RC~Z1@CsL1co8qBU_8luWqaqyFa?M^0jqceqx@eBxIoj1u@~-1}>_G&o zovAvutg8<-J@NF#q+&s%w-rZkvx5fve%K^6J`L$^;rrSxgwM!1Ld`Q+U_hoVe}aL_ z;3)7oM7-V1}`)CZ3cHS_$C4`RIZjR&s=PhY;t?~ zGYpgvOF0)uCU8V@upSl5R!#l}AzJi16H}rA4T~n03gm%F`3HpHLh&O0NxT=L^-cN5 zdxkwNX^iw|ELcg3*8e_Icd_#sL}*?~@JL$^nV|Zx7)HSKaX4!X;|QIH>3#YrY=kM5 zmw??6Pbg-LAZKF$fdgB8JE9s@hK?-u(fz|Jj`||o_;|qF6{YEyY6%@fMGx1HFuY|F1QuE4 zzoEl8PptZ+7h-XoF~3=Ay|P0W*dSF$N5w(G ze~0{uF=N7i^a)5pY6t!f(X`bbia3&MU;ZBk#CjtIh|+tCwI&dWnLX*mhQ$4`e#!LK zB8GyZ)9L3~i906!(C#bPE$MUTxRtpX-sS~PbQABvz2E0$`wMj6F$u_q!S05=I6 zMyo|65q}wlRR5rec&0gSU>0v*p`BCE|aN z%!^ILnlAaR&T3kgR;m$kJZfAhS7Jmk9%Xg-%4^+nSQvG~N^#raYZLxu368(P^}|cX zb9->)O&x$CBtdEbRNz`^JVNx-Vy(2eh~r|aj*E5L`?O=)6OiNUVaS6oISEG{i-W1* z$V?q=rJM03MLaI+r{^#M5j@l`)IT{9DYWWE$d|mL&VP)OR+1M}AcH80vZv7tGKDLB z<(_h1qu&{5tfJO}Kgg4;3CHq-%w2{ygo}VkzZYZ(DXVV?zxpcpp&P;vT?N19hVW~; z;jKMxyd3o9I(}8`Z@2s=L$6q&ybT!#lnjHMSh2;s|d5mI+niTq(cIXpHynaGfA8ze3pm$l$9C{u6Vf%IRVI zHh*(`i|H2`D5|@f5Mg?DR^*3Oe5zVo2z+G{#G6HC4Y&X3rdllf2>-4TsDC>^>FKw|=MJ#>_LDFZ5C?Fy2php2&~ZQl z-%KkqF&?09Q2`Ekiwg7skW>In-gxf2aKZqoYk)ngwWO8JW_x9Rjp4h z$m^~{wE&gzPp!z>!pgPgTM4`noNK2g{u34mH-j-EZ7r@0&%oz=(^DokY5B30>ndgE z=JL(;^f|7l^}wQG6+uogw{isq(!&rWh+%WckX!_B4!{DoCD_+?tu~I&H%EfC)0rM5 zz1h#k(@TtM&y1D7>~FjKk=x*czPYS)U4HG>F49b> z()ka;E7;3b0TNp4yeP9rE7~Rh@Byds+>)Mw5LK(X#VwjMbBd0 z{Jy0Nj-!DkH1PE8Sh{-MfLgD?Zwne>L&w`-2)3iFmY-19dDW0ZKTN=*3YR8DY08m* z2`o+CzZP*%pM#?KXNdU|v>7p6;w7xQ#QR5_89dY%*9Ak_!TMWp3I%Od>V7yXil7Xr zD{l>`l`RK4I2p*#wAc;a2)#WWf)RL>%)|S}$8a`+s z1bfDt?v+p&A-PkJI1`eu!cakHO)zNtRbftJCj9@DR*uj&4Bm4Qd5oHcOIJ#bR^U4R z=LeiDp&>wzhzSrK|8_r&AaN1pmx!$a>>A*#(|Zg5D@Qg|J@EEf)`K_%5tuCFXF@jG zfRe~#2`o7npO*9wL-qqvNFG_tjpB+rb&0!19l0Y!R|F zmme=ytHnwl>~Urb8QR{Gc;KCf^NSM?-BaAY|G>$erT6C^zwiG2+e$lkOr1USSmxlp z`K>;4{9}w1Z;Hc3j|_3~)+XXwS>|zn-$y!=7p2@55+8dD#Qu)PEXJm0Wo7j4!YB+3 zcUMP?K%$+$Furfmlry7aX}SVR=)Kf-J)>%?#pFdwwxVQzj#Qr!_ZC%8vdxaF6@!Ji zU}Jspt^&*>wikr}tNw#Q{LuVVzQ*hRurF{ydFG5=^mHM^{LrOmqFF2HRbS2lwwlwb zUp#GU( zFdhDX_RFOFw7{evK={W2SmA|I(F-dG-=tu1L_WcQilP^JL^;^E<$r$zDtViXSJ6|o z5mY-nI($2EsEric4d#psD0=|!dsI-PLl2FR8T+92LGAFs>48%NZY-48mKaI&{C^(# BVZQ(X literal 0 HcmV?d00001 diff --git a/models/__pycache__/models.cpython-38.pyc b/models/__pycache__/models.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ee372930dabffce6862c7f4dc634bb2a912e514e GIT binary patch literal 20400 zcmeHv3y@padER{ixOnb^%jI%O(GX2NKuJrA)Y}wSd}!qdwW3JMk`PibxaR@{h`j*M zy}R7qT*!)-wkS80XeUu^S9P$7n}lwvI?Yt$dRmQ}xNYKgI%@0M?QPvSablacn{--d z>X8+@-~Zo>*Dfi`iR2k~f*l;3$AA9w`pHXt^NaX<}< zO-Gg7+b*MbN>26F?Ff=?UCx{;%(<1CGc}`AQTQ3fZyI09b6!KtRXo*Imo+)!sby8D zBZ{qgYE$);mL{|nwW$letUjZ=ddpZ*nnqb^Vsy=@8}XE7^`LSwed5{U+A~@+28bvi zOfM!P8!_(F+C^1Sq#7;-$d888?6+469&2+pa6#w#2`|!0p2v&;GA70_!i_w_ zJl7aQjg%M{o5bb|8pD4ggt~^I7DA}>vcjcaSh|HvpASnjVd)cL>D##UrLc6MHMkPg z-W!%@x%|uR9<+wS@-1QcwJ1-^f8Hq{4$3bk?Jod=R%t6Bba@1+*IA{Dn%oJ9*Ig&T zoa1)h?yl>ZVO}MaHM;ABwWC?zB)0toE2GITF!ndTGMfBed)%v5CLhiYt&A>*L#X!3 z>|z&IS0f+aetU|-NmT=TG+xAYQ^L(r1*5fFSr#>5xsgnpB70mIEJ%(ToDgk(3=Cq zjkv~kFC_-U813|TVxQh1-nF9gi9k8;_6C?^8{Po+Pt^W!*y4M*)~Bw};*D2maj?_k z3!N5UVBWH}#Y5g;*y13!I2N|}Ah-CMo5M)IfuG;-QIEt%q+zK2gNp2A{ z7c{K4VYm0cU`(GvPJ8_=&UrYupg%w_O&&z6?bVej5k}-+PNQNdF>4(0%5BiHCv;*jrJWTuO zPNf+Yu9y?kVqTmrN5D7kd0N39SfHIG7K5F1F)8l?#GG);dNU=kPN$>DdpGdm6t*=; z#u@h=NG-7(`|}$W{`C;5UewD#?unw8%?r_{x%Bx?{STuA`H)wiYz}*dHw+kUqf*_| zT<5J5if2~u#dq(B@e ztf+TF+~1@0z&}N`dg}ZFupST}80&#{w^sG)K@WYX-Y=FfXx*L__B^WCBdS7w?vyyU z3<=|L^#3vCqH5Q|+5I49H!Xkrb!YcOeY2aA&##%?l>Du}*?r(AoZS@oWP5gvpY_=V z51$fG2lKlDyd8Ro(MozI@uKrOVhM_!xlf!gt2j|{I)Fp0_aXy4uDqh(p2YdDvw(Ok?= zR_wyesiJLLHOEiY=iF{8dD@c5E)ag10=ORo0#UKca)46C$x6}j;}2FA0{pp%A7N~M zG{EDlr@5Y7PvFV@G=gaiV3#O=cFPC9^YCXsIa$8tz~=A#$sOa9In6g`>$8^a&dvI| z)o}boamKC87is+YNX;sqUMzp&m5vS#&9YU&^tw7W0Yp`UVu0vaxNJD-@Xi@6$i!;cABZ(wYG zvLWjyizh3!io58?iu2`0yeYJK8)`p6_0d-&9cJQ9# z2d(+zG<|h>e6e0bh<;;D!=H)9qMz7P!>r8IXB&HK)GW0hRWxO3>x{IDu2tZXTF%6t zS{?m&_9D`*fC-P*ii@B(4DM8=)FqR@*|6mG=mZZ!-bk^VI>}ooHbDWtoxbLW`dcsT z5^s427Wz6c2oiE-z1CUXx4iu)``TZ(y8WNV6rAhp!f=r|k4+@gW*u+&Bz(B92kY1N z;HNPS7yCLf;8PtSR;{O+a6^30#-_wW7I zrAa?FTU;Oxoznvr&)l8(ydTe7vVOQIc8Q$93|0;zyBovtR$MrYs=Gu!&1|s{a(h3% zZ@wIq_$Ch2;%uX4UCyHR75Bn!6gcAuz~+aWD%pL{Lqb?q-wQeJj3yLf@zY?ldc}4x zX)*^;UtK_kS`PRlF+@MUr)=5QLPPHT0wE+8ZV(H<_Npwr4WCnG63{@H3QngI&eyuk zM<5PhH}62%nvO@1$tTn{MEHc-+(ftej51)@fQlBJQ5%);l+wCW*X8P$R6m188W1i* zp`|+_X7OJH`VNa<(+e8p8$KuB07d=V-q!L_9{;X2t^UMgm-hm1chGWgC)qj5Ug>Rl zO^a{#IR8U$YvDL|uW9wI80VLJTVB)Rn?262^tP5y@_H6(r;4@Gnx@~1!T$H&=GV0N zW)JqOy{$pUKUJ6R%$)m{w@Vjuoh_PAowX|EQ?64WRVAk}_wb`ZWzuxnq#n%Qc8goF7k$0du=7#Q*$r*N zp24C*ubVA8r!g(HqT^UjE-I&i$Jgu`->l4*Su67sRH;z0g|)zIzKVBMn@h>JZ+i7W?nX5?4DU@YVEZy|zz9ZA`nttG(tTS7bZk9B= zY{jm)6;d`?`N@`w(50j`k=LujdJ`r}z6YJ_IS6Q)cJ@xw($+_d(uTTuPxcX8LUk+K z&_qS{5a}V=W1zfyvQxQt`iY8Du^kulX30Bgj`T*ofr+yH#Dk=}S%;v-_;Jggo3$i_ zRzGnRgNLew!9mLuB2;{2nmR-aE$N{4qs2zUvV|W%M9Sw8>(C?l$k9O8d=R>(uTJ^# z9)0|1r94}&fVhrU4m~2TB@D5Hfxej6AJ}&|A3GYThkn#ra0zkhD5;4D0___q(yQ93 zOmAw}K|cc}mDG-Q`Cxw<5QF>LQh6I@cy$h-(nncnZG#N-B~c#q>1YFMj@gOuO|~CJ zcb&TQQ*zFx1u4v0McYrdQ}abBchR)nN&#&bKl#w~^kaqlA3OZekz>C3NGKiw-yEbQ zUw5G7r>CHFf}k)EY)e1Wm)x}~z1tt?D=*ai^^YF&v7xCGm zd&<{geUJ}x2M!lyan^!#CErWwNJB!n^UZf3yq_`UM*U=+A+T4@y#Xvi z?F?`belrYBYAM9~#$u^Xjc)Aqu?(W;!|G_Kl@S_^)}E6wjG>*5V$4}B=;>&uoh!x0 zJEhb%)iCf)sl%Pn4969Wu06)VuowNmG~Wqz6sV%q&M;SM; z{&-km8Po^$gqq^kQmXtoXkrH6z7L(H^l`|lV<~x9y7C$u`c_4VEd#2@FU9xtE$Nl z5l&)E@`F@MvlsAP#J7x7kcE!Iyfq`ArowobarTsSDQ&v4h$aD=3VA>o=4R(o3?^2q zR^p}u+|0y?QFR3ph(z&1fdv_K`35Rum58hf$LPf0f`|f2OM#N;FTvb|I0vXtGmx`1 z+~L0y?ZIEtF7t`~k5CyhS7WZv)rv$PA`dzWoKGR})Z?_kw0NqkRdqOCX;nk0xLhF; zQL0gA!PQzZ5Wi20FHtzLBHuT$=CP=+~cIj3laAjHU`UA90zZgJ*xu($@5NSYH#`^MLB z%KrmIE=A-e%49r2$>O}PEzAdN)?K*Y1XPI@j>uFYHl#<@?do=o*!0q9cd2@5rEhh} z_(I1Aug*aou6=NI^K`g?ZKfH-SJmL|){M}mtm68MuJcItuIk6({??2&O*pkFRhsii zN%P{KS)v)vW79>06kyr)%r&u6;5BsV=0%eHTf$}D3b>9J*+kbgiQZPiOFe~P>6#9k zHWAh~JVWRk!d%o-T{x$Z8{Ub0GYPi?13h>d*Nts>w5-XO!g}ZtViAlxC1~zPTI=CV0qWlb2P>pU zTkDw}4lp~^fe)9iz^gUELaQ67l*@U|P_I+&rGayR`$&5N?iZNZo>fC1wO}Q%Z!#N{ z<{(PP+G`5eL$gyl08GT1H?kv(CS@NUQzY7u_HBJ732L`p)>qO~2e^m5VQ-N7>kXCk zqu?p6IOdG5ij!E+u$LkQw~+K0&DJQ6lAbbL15>KgW1#_3TSt%^fU~D8goL{)^@^^S z0W*GgaFx7{R#VWd4z7TY(PbiYkAdl_F0b%Sx}UPGqQvn+rawrIxDxWyQ0dCJ^sD1? zPddSwL(Ed{Lr}&SpZ_(<=h8%&RPGBSUz?Os6z4X{PvS?mC?Lxr*vqLRj;9sMVCFI(2gR^j^z8O67C)$8#n5>U9M9 z$1KW^N~-}KLVlLoA_j(wLzqVSh$BJx(n!G1euax5=;n=mN2jJ(s`XVIiYFcU9M$C> zQ{R2{to*zDI~#DpE^2K7N9Ea5`A7pN8N}{NeHB;4jci?>ElT02;EbfKD-czq;E41G zq0iW6+y!1q0^H6YCBQMG5SK>5J8#w?IBQV|jl=2|bsKWG;O~0%wjXI)MmLGGs^ih6 zp)OwyTY}}~x6zDz5dn^3hy^zEjY7ds6hfUA@l>I3X0BKZN+g}Rk{9PcBIc*Vb9u(9 z)!JwC4FvLO3iy;JiQ^~@iB{fJQ^SdLAdDf|_W>8@&tFfzyMVg?zsYy<6Btx(gZz8= zku<WcEs`p(q6G4V$Jt#}iNQGYAm{OpN0<0Rg&K-43~u<-mkLQV(1Bj}K@{S%2g zn*veCO|~|(SY!r1bJTKgn#80l3q$f>U=!!AStY2*e@WYYg@P-HDf0KI;P)wb9|d2a zfb`#0f{FZBl=rVGV2;cD_}@^nBQMB5pd^VReq^Rrhp0soi63+7B^S5R@(-za72BZ` z`4wV2hY@*Ggol`^?!@1?x>a2o?69pXNDuNq_4lWH{L``Z8R}FNk?{EEvl_|1j}fMv zgNU@vJ0i^n9#}`t#bMydbPPNLY~oSzRx2(NF!jI>6oPRI#-A7)eM}gAhG0N1%G0)m z3-QY>^g+^H+tf_EC2El3^aYPqoFxRStw5o5$DkKeP#57)wX7c}m7nZDF&C;nTYuti z0(KlwDc)P@h^K_XhJkdDUxunz&4hUa?w})~WDR-njfdwbtW6|-=OWXAG)?_s`B{Dj zvD|QQ{`-_mM{s_iQIrn5Tfw`I?uwqJBC?suixhkWfj>~T+yY<7Su?Br9|l&+?u+859+$H z7Frc1^wcPRVbBZouUvXs9>b`=`P}dQ<&DRm`}X8F&)iE6D}OdANlIekn8M{-0?r~< zA<&pe!kA#y>ykFnC7)c}PlemGfcp_E;9EaOScua7c;LfoQ-INh4&XvhCQ2rUg@Y1V}uPz39khTyw5eCeNT2|exW)mrOX`oFIVQ>5BVE~ATCU|q&zz7O}rKe2+WvH95@_odN@S2FJ3WLI6CJt*)q8hIz$YLYncm(-9wEitGF3d5d4BJgL2@6BYO>tlFD3ci&JrOmj zjGWO?o~B2ltuz=Ko{qZYT%KOmf(KtHjl$&}x5Ue6-#&s?2H>i$O61}pnQGsM9D4Mk z%B^0cHPD77y0y2VY>3PDt|>$N#LUP82ertKgU2?&lJ~%b%1rkdiHCU{_TnyCK`?GF zS<*+9_GrB5vL4Lt84u;dsQoc70y_^`eEyxl5znKEJgd-H*c%md8_YRuL^0aAME6AT z7&YK9qbeNlc~y8vz-|hg(Mi}%@uU^*(uNqplOOVHrTw@b`*E~m(cqQq+m8X%2HuZh z9Q#q_2cN6!cK2g%>#z`Y_T$E2Kl+(-hPwn5Dp`ppIb8y*M?q4qx{DVizj*{vszTyfVOY298s_`^b%f zuKReiL$}Wn)|(N)`S*(yXL*6Q=me67X^YZX!%n<6uG)yqHm>B#yJTed?#(Xg*_*O! zdP~^IcJGjVmV29OV!L|kXz1XTQ$t5K6}fW9+DKoJsd-XdpJnmbS-wAUeUtYydH&^K;nn; ze^PL@E|&Vb2-gi!nGM*nyiE0ILUQW^?)!_B(M3s2O@4=h?^3|L`69*sAq8KefR+01 zA?6#^dd2qjr8cLgBVMwm{~Hu@2`1veA;twcUZN~w-IC7GF5AwVEDyktg&TBNLI^l1 z|AHWhM*L`SBkUVw>5iITe_t41c)S!!(1Xz1YHFU6g*r%d6 z=`%mnbnUN<$OtZT=~{PC&B7*?;_?LSRQMXEHja|5uwxMfE)3PpxB;L`9D>FHf%wwM zmFTi>S!3t}eOYSwEFvz>2zp@Qs&tiy<;5i%-KOA482AsY?Os!ZIH$R4HHxS69|e!s zSR!~Vyqm#=AIePP@+iLxLLV&8V|0TMfkKhOMIfCGQL67nON3tw@Dm)K^|6HVN$pvs ziL>a;N6*}avj#UfELmj1)o>~34mh7duka+iM*uXYmj=lV!7gV5Ic;JhJktj;tlW@f z5_%kQnWunB19$V4nvf9I9;LkF6fiOU5@LRw-+hpq2|J|(r~hDS4)P;xqVt8KEeh}e z;NzdEk4Q=WDS}*#2`Qf-U{PQ{0<{<#KTrAwRmaT{6|UO65Gj!B5P^&Xk%HwlK@uq} z>mV=iHY_nM^HQp_xTr9L3Y`zdY4h-+(&%}Qf|IP!@s2IbZd2H!w2gdvWC2;27lpLCfD4eK0ri(BexKCJlPP(GE@rh>0J1u^JL$ddn=f-h6>0tF=BNZJE3N=gM` zm0zKt+uWxq$$N#w_7t_5K*ZED>BN@A-R6*_{5~=UDi(Bj#3M+MHi6@m4EG>sI#8h_ z=+k|bX!JBq%+j)mFiRWbQ&sb{Dyrf=NK`@s4p>`G#hjf3AhSxuJKP3V{?36R|2;u; zhr=VGGvgT=3GvV<{!+@)+gE8BS2{4zNQb%6?If?&YUJdXc)q(dlG1!v&}S^nAUBRL z&(SErF*}%>DqsH5n_xL6w5DdKXd@!E4gc!`SFjKOBFoxydvWbb$^eBB2T2tmJ+6~S z*H}TG*~yEGI9@fXk1Dq-4=YEM$Dj^?2_#&;mc_}2D;vIANw?$6%t|h*C+4vTQG7|o z;Qx*xQs~jgAYjtlX8MmBX%@dI2cigzm6_H67#7}wRWsGW){r~gT1To2{vcFROSl#u zq0$v7M0i1l{5!#vkhA`E;n%N$AAMc;(KYZJUKf5tKfJxyi&w+3TxAR>$A446|B?ts z>ZSU|6R)o4nMk92^~lmaGl9}h%W0a4GaqA3Iy1dABm&j{_T3Z2sV45c?ecp-IZNvT zKS}nS3`%aj{W2*UKBP{T>Oil*L+Qv&eGXa=F&|dM1MMnCNm5cJ5n^u4Hx9#VOa2uS zzEOw6pZp&bHR;I~ywxrFH6-O9Q}8Dgd>sL~%)KCnII&69QzTTNFfr#Nr5>FH&nr_I2kfQld#>JvmkEJ=*Q zoCF^?eN>ZgM_aEB!vwY?+|^E_F@fZ(<00+z=7(9Y4pC=tVQh&jQW$AmbXL>R>d;9^ zC-AcR42zRwh-&If`@3#ubkg8ux4yzEFQxV<=gCsY`ShQpE&Bhc8i;LK4KyujXW zC~ow0LO!X&l%*<5DF~qk39YoE0(nmwk14^sZ02a@+H=378q&dBK_^dK4+2)38uWVw zzC%zDTPl7v3UTz16-@gjehtb~4|OpCuO7U@6O|cP4r4CcGQdV)^woK3(ps_@KoS66 zfD$CX0R0-R53xZK1jgtI(WY=(dqjW^w2T80nXdSmF&ZNg0(^EW;U}uenJf?m7AS)l z4D67uHS72T%x!x5No($#@39u|m$EKdB+f+wpVLtk<)cJKZar_zoNl6&ALAA%Aj6PR+IdOnr?4KuKijHC$wLW#Tv2(1G!Bm(`xG(>H}P*H;PMB}0iuN|z<`1W z;^ZSHVw4glxfE^y&L;T~;oq&u7PkYxlNB|LLnMI(Gk)4|&b^mJy)oSBog>@B4%Y!0{7vt=Mro;o+NZ%LQ4 z*<6~O(j+uu(t~}i?l3v|(Tc--ErV4Ij^%+ekEGBZDANo`anS*VkvBbTCXOG2D9cab zNHAT#n$M!{t*!UTEPDA%fch zPIv=Bo)$+KFBWMU2P!Pf-HH{G0fbRHv{*xXTV*KVD76&oclL<|2!bF;(GacIQX)u!)YB9pJ_O0O1j(W#T3gcF#r}iEE*G=Q ze+D3d&dQ2GS5$1;bfT(CnmP-)O~cexoupCIoE#muP1`)&9@i(wcBXM+=Vj4Oois@s zS4r)D_s;AKAZ4kMe2#mvIP=f{xc~iM_rL$W_rHT;I-OAPd#3!qihubDMfrV7^!^Ma z?#1K0fIuihttpQMZ#8^t{MKsvf{`~=Mdl8@f6h^UC2Q$);)Rn1jBA4iTU z;>d}IISJ$>L=rj4FefQeB8}Q9v0V&^>^U``7T1U&u?ew^*dexvQN#wsRxt*stk`x; z$-Uz;dZ*-6U)_Zu>DFawzOd+4YR+^?r=svTf_EAZ}*s;inD_SCW})M3Te zJhiEMN=p;kn%dNbURIygUA<+jDNUoSG%>nn)Qxz`s(Mtpm_E63LVH$g#sCoogz3dZ zWGlvfO1r4aZzC1ksx;$xn|LSij^mvah%G7dJkp7+ik-nbxm7`!+N#WH%@q2Txtc)gqg|WZ!mC@v1w8y<}W%A+d(8}m?D1>US%z?0! zRz}CTiTXy}_R4&(nN8SFpgzs)^;~}msT)A=px@T8m&SfFFDC5C5avdP`Ix(T75fJH zV`2VHoc~fVkIjTT!sT)MwXi(L<$s3MF5u3z;Xt8Vp3&x1akIGPoatqbVuupFx}#o3 z?7l#4$L*Ix923A{$lI@qW6wGqTYGTG5XWA|v2s-$x30r6)`Mdr#4*V@Ub!lced};+ z>)i zuZ!odb$E94;28<=+|78t*pH{DFYj81;~gCwG5ZT4jsw6El|)rnrjvWt;TZ41@v#uc zLB>I&yE2ZcbvUl=!9lZ0dtsPy{BoO4`?+h=JP+;-2;x);p$9Q{XX~A1Mcn%$DA+R# z*M;drVfy-uQTvZW?1xb!CW)T-WWgJp3MYtOy@5}Pq{xoq+#XZJkqdfrfH)D?xWP+_ zY#5_meiVE3MseSo%BKP4OnU>&tqpGgdnalWrPFxs=US~Rw0P4MTFiD@e4*3g3(QwG zw)lXT4O`4|i|y0&-Q420-5f^x2VPWUw_rrWig?d9G1!Df!CC@PLB{Lac_A>2bTAB*K}3wp}9x9`OH^#ch)*$ zx9R^FWBiGA7$57_?8EqYH=i+1^kAejo@V{>r_F;T~H=i-?>0mVMUkWik z35;=hbREWgwbC0oHQ&bzs}){~TZwayx3W zbms2I4lFZ`==MiLeDe%*ZwKbK9+>$Mrou2Z69JyvdthjuX&tK!L!6Od2Jt?Hcf!2` z65XJv@f_%OoBu5^^k+9f-`NAL*{_DUZH6Wq>BoInA9Q>D>wr#S{jrbkP?}NjnZ}DM zIL6)2D4_n+w1OFW44l5r16SCjY*W_MJ0Rj8PQN7UsNN@@x}bG?Ryc56v4>TK{@iKt!Bt2VPon>Om5Zv~rvIlgt7-Yd zRc7^v`erpHKeTREQ}VgKS-t;F&T2~jW@lE7AM{xT&z=?^!VF;FYymHa&SA8Yo=Lpt zypC9cVyEvFt7R3ZCr$%!gw0;$sB$v%tR}X3o6lU0|HU8%W{BH#%!hN`0a-=-%LFQg*sNT8?|D|%14|t6=&Wz77*rjYhl(mYxQ!)aVsU7o7!T2i?HTs-ezl7 z|D>jlazVTMaufO~hbf@pa<}t|g}PX*aX0*!ko5+}<|iAnK3klv)GF?>A1f}E8})iE zA46-FjV@)JQn6-rXVRFH^@S46c>71|PG$^h(HTcjRy^h8D3p9vD5n%TdLHa*3%0YT zenLBK98)f-OeU9Oxw2mC5Ww5s_EVjd7uCJOH<|2 zxAKP%zItiOk1Z6>60^?f0k>yHPb}Y$=Pg-(q$nmu&R||Ev(z4OZI0ojE1rD>RVPJ0 z&1|m_GJZdPXsH~O_$JQQ;zFZlT^>ipE7JZx6gXoD!10HgDw%uFLs(ctU)@42dsQ^3oR+Me%bOf1vP+f-zA5`0$=rSKw25cG7%z~q8s}deoT6fyI9Q%^$ zXV6FkGDax7bVtMN``3Y<$FZRYZ|bOg1C;a|?UwRU9{1#i7T@%k*Ym!tw3f(Bj)MzAv_0-O$=wJ-#osTgoSS{R*}DVr_0ivv0>B zf4klEhSuKdL4Kv(5=8m=x^zp6?%Uob-L(Z0RO*aXDbKr3fwYmF#+<>A2ATPowCb|F zPk?Pu(&+^#up|fk6>Dji!2lfB--}8NflcjOBW*o{L;I0e+bo`5hFymx8AeY?w*XznC-A zeH=J*hGg;c%97IM%JNPW1zY@X#K6z=(p(QqmiHic00#!oU`}62QYV)7?{+KpvaioI z?0l4S_TluQ?#TC2@E!zEBJ2_t8S30Z(K&^wsudl_a&l4mJ}TDil5bWP%B+m}3BppS z*upx?>%!{F~ zn~xuQbmrYN_uoD4EEJ_XPFme~#jdy&QZ!ld8J{ackCN6zUat!4EtonvLjz}K$=s19 zZf|?yR(2jQN*ikC-Q$nj5-MBSh6XCeA0(w>{4tQ@-Q&}_2mD0Eso0K-iL>NgG)a1+ z-oQ-Re&Qk0*{lbllK64UURM*}@Euit;@k$mh} zpceX3>#R$NQ^!a>JQ8T#NRg7%PGx#ilSlmwlvC0r+U42)G$68v+9G!wW@vp5pwh=! zGHrtl^d(Uq^yydw>y6oo@NKmpMR%RL^iy)trUfZ1SVh}Uwo^+*DfiQ~(Y}$l(a4hz z&&<5PaNqkMdHB)CeDm>876QISh(x~bK)Fv(LxTh{VIbJ6ex@%uxh}oWALuJD)cnoI znK4-+ESO$&pvJ;Bh(6p$eTct1WIKnt(TBRxBYv_CFnRP)Z{nfe#1UUFV%Nr>eE7kK zneqZFhGy46VnyFrs#s@yLlj-a7mDt@ufysfAL9-@Qk2C7DA-ayN$E&KLXz{%_Z+>C zG37@5WT16J4WD!5RMZVC=jz@lcRU#yIy zz{!#cLmyQWYE(5Ga6OLv;?3>)ePW* zX9#b4hC5hxswt!i%P<~8)!qP>pmqj02)`MICbbmeePgjyr$#sS`dG5)IX%G~jH1sN zjnmv8Ap%RQQiyu+iz&ol`&SC48}}lsRF*&t_i>0NJq=;X(@2X!EG@JX zI^G#P19-BYQU!*pxs6H(5gTj`2|!en0-iXy1t9#J++q2Wpx9>k>5O;?Ziyv_o8(<_f4Zc~xp$~DYAYDpEA7l0$ zNC+^!_K5@FN0tp%Rg*sqSY}M}S*oSkXYnlKDdQAmsiUxDmE=dLFdk-{nU`eG@=aG3 z(Ih}qArC0S%!L(}C=D6tqH#0F}R9%S#0#dwCU`YmD?wm$b`#V*e~ELjr3o_PJA$*hA!T1dsDi5P0edT3}i{ z)zzvxoU62|AyizZ5Q!+&sB_lUS~29BNU0zM?SdAjlps~c(w)$ndeZ=89Au@>X#!42 ztpwc4KxC+;RN-be7g(s{K#CbUUcO@ql}-S40FM9=4pi>#K@vuwm?2^ zv2-d}T!TvFPZRXZ1g+tezl6x8i2N6n$#{a22UMfPBVx{)!@?BjL@d6;<}5j??@7_ z>L=jh){HexIIbyGn)Aq<=EXg8j%GNIO&1MPfMxeG*ThPJ*U%-K7fJGa4wrXp;5uGp z8(q&NdRqxE@f3okYdLJ%L|EJK454ocb5T!q;ebM}b|>@AB-{%O^xzd-FSg;4*M)bJ zRWKJG4czH)t;jvWm14IY(~FliP?9Pl7mQ&A7oh6tSM0-X8uN0K(3?^F_U|u) zi6FNCXHQuO371sr6Ua~arl48Pu7QuyB_eZ=ftl$pukcN}m$I#*#PO2E zUW4R_yB$9bb*_xdzB(@Tq!S!G#4IK4#WEiJ{&b4ZrHL@9+#5!|Iwhkh&TW%ygC({a z7z85Dd=bY}mpKuOVD1teQMeKdw4(^KBPmc!caD&a8+F{OS$xbedkW4eKB8hkxd3yd zpP<`}N_nxq=rDt07Uf5!)qoBmf0^1M28Ihm*g;A5We{t)Y@4bm1pMjkp@mOh~1U?I0S?DEyHC75GAgJvX2KRAjZ7TC}?3I#t=2z6S-Q-#9m#bPZeku8Gf z#rYy)emXptOIEGcKAV}(zeI2(sIkOxjD|!jZ>p)GL^=@0knH<`i}UaQO}_gC>i+*G z-^ov6P`NGgC-IUrz+n3M;9jF7*>r-fbSuU965>-7fvDpqTiaPIG6NquYPmN}V$zj`A^C5yiF4Pi6IA4XpzVG&F-87ID*aCs zkbok8l>+wXU_Q(Im;@8~YZUxC1L9ikSS=3-8L&c)TU z{LfUpj_uHi{CQ$Kk0A1v2oEt+-HX35b*H+L?XayYNDuN${r%}4|8#79hC0=A{8L^D zPv6f7Q_fLD+U6aRW&;nbBj@5U@MJm$o&h%TsQ4Z$E)p>Hz~2*saSFzt7#n>|7<~p| zKrhNuwuRg8%PsUleif*vOYN4ZL5kA@9;-OCRO_ukp>@ZgucDwX!jWoKKS3%#*@0p% zRDHJo#N9+S3BO16MJRRnDg~-P%F~ofUs>Uu3G)WrtkZysHR$PUD*QxYYa-!0XUw$C zIMg4OpXI-z7&@-xh5{krcPOKUAV1J3N{1b&;95u5M?XSEWH^)0QSc8C_yc9jE$}5z z=t;#lNrfN*%S{8fVp0~D$>Gl3H|fWgAkxDM>9tlX1qf0}GB(vFGYA?y`o@7GAQ{ zaE;lzV_ivB7=UTl$p8wr87%=(HDQcEkX^}!tI`n-`c@YvDUK?DGiwCgUr;3;4#3@{t1{V)}0H zfmUDj|MR93AOFVG*H7O=PAY#m zB_qgYYM93DTLSJORv}QENX(dE{p*r85olyYL%F!03ioOOmn2reygoo!h}`{n;JJb8 zK;M|FRT{ELMI>Fq99DF5@nEsvMH#W;Vwt=u_O_2627riYf_JD5jGzGQvo-~k zp>x926L}UEQhAF@Mj4c^;;TP0cz96Z86F0pW{kK8sKtcwjm&pOD*{>pHGo!3`u=cD z3-XY{I<0AQWR)djLJWJ~&! z%nT_v#eKoom(0cJTTqkA$p0MWY5E$pl?GSCH=-_il&4p<;L9$QM&aL%i{n+aZ$FAw z2H>BrO61~;G1a~oIrN2#DtCI3)<7GU=+?dtWrJLHa6=i|CyqvbH>gF%9DG>=EO`&S zsLXVak$9NLAusNdDFoy8l5_fq(jJW$UDboxJ?)`<2(@4EA~5!l+2?Zt=RA)l@-c)Dqj%Hyz#kZ^Ql{UmMzV9KYR@#r7 zu^&e|W({7szWo?LZQ=bG#<3q&{<3p@-R^$uZ5?K!&VJk)>_=bS>xW)k-yr(_CI!4B zY4_y@A$ubHH&gQbrdYF*5ll6#OX# z%#WX=*uSLU%M{#7!M73kMzvnCeSM|Po#|MYpQ3;zN0L}zn}>Uh3likBnb@`bGYUGa zI&ZQJ01Fnb&RvOve^&lEK@eT|(cmiBH^{^zZ>B8GnU`es3Kr#mQ;eDVw-CDdheP_h#SEP}vI zp}HNWf4aRPXdDWNuMA&_9{ZM*%m(xv!Q=cA0vG23eNo}6bcctf#qAnhqu`q_@En-g zpK5B5<1|;TM)6(zbKvV*V15rTWpLAnGLtwv%I}1<2b1$CT^&TANu+QCNasP6>U&X0 zb-Jeo_z4c*?XgtyqWY-vF{OzU==5`^@5Bj%%Nv#_#=*;QALtG^KZkzdi|;<^ZzW3} z2zGfqkj^G1!jn9Uf#n9}MZDw*3QkhM#DQ!1N=-6=@^BApwIMGqS1HQ?Kl$5&_?+z)jX|=sse6j5^`_A*m5f7>^uOO zQ6k>qH&8q}$AtU>L3D@1BcYSxS&Sq&Dc-qGtGLo30rX6XL|Aks$*Z&;?blFvzPl)r z(tJVCXCcfWmjpnbom~LOykJ(UeB(zS{K`?GH8nd$n-Hn%@J}po?}`IIvZ{Uj0o=8c z@J}JcKN9^(dF$lS-BplhcJksPj!zcV_>Lvrj;}Inxu~98!jeSskeb2&3PYsO%TGhdq>q{D-)E#*1fv{?BP?2GS_5EU z_!z93sb*V)?oewJsW140U`Z|Eet4Kl*Ps;P6EfuA0Vai<&94i;c?0~&>%xz0fZy`E z@LT%f?So#t8jj^EV?a6nbprlzL@-h>)i<7abu-UI8s+OpmhPDely+K9(@dQHG%MAa znUz5i*Z^+ZH$i-B;*R~7-wSG5*%Y`?vQuSHa_eoE2Tk4%zHibuTkuo2==*Yu_V2Oe=2yz+b zhkh({!S$nX{jJZHZDDCHisHqr%NPr_=Piez&I+}nMxEI`?oT=!!2_i!GN$3d(3UGRYso*A6 zUW;zLmS(mw5u)zn3N-07`^*)t12Ay~9RE;>J`N8H;+!oU-%L$PRd2`Lz>oulyR&0s z?RcV=jFzeMQW%D`7tUKp(dnx)n;DGt$Se$jB6%!6oEu5A9;&0s?08h77rhmY=8; zUYm8MTO!n}(kBxpUm!B2HkQ}ZU|>Y>4S0mk%WZ`G8VX4A>=Ip`E@+nmyHwtswS(At zdP_OQC96$muMnMW2Np%okYu4x5M{BvF#=N)oZ$2kP2PyMUK@gWY)`n`okn8<$=Ak0 zn(ECDvtApd&REj5#1$!zv@tr-=>T>}`=E1rRehGlPBKz8^_9b2zcf0P@Ts@H#_BMo z4k+izl*sw?@1!n3u5lyK=Ct5^QWboXeT$*E(dDOA7`aqsWf-EWL89$(oIL`0PZ>76 zMCiR&*~?N<&fF*;P*+)Tq*J_tZl1gWgsnC;X#Wb_i=aNXRJ=6`aVU|EO#4y(P?RSh z%47nLKllnyR7$QK!ECl=g{{Eot4lIPtI6U5wSlia36j6|{3=!`5E!xxFFBJ1rob*`5X*se(zO;Ge}MT#J7KZyo`6J{I=#%S2nm?v)7m6kHs4Kt&;!Qn?>!`lK-G zZAVxo9fYiiIJ)sBKS1apkoigfIAzF}Vb371LLA=C%R-;WIt2e~s~RQeB~OF2_9*Ej z9{Kz!!hjHB*t_5j2;Bn0hAF_1f^WuQD~XF3rG!b|hFgHMN$y4XZ!5CJ?Z72vO%3A^ zQefANpE4Zu0X0!ZH9&T-P_3#Tf}{xMkepd5j^p!Z(n4+-cjfZ9sGqTEl1VM0^JaYB zbsNszd-e!fS+XX|^?JEx;qYA8LkHeeMci}igVxIQ{kN6(9X@jW_R5pRC+@uK@MPuo z{iW0QJ)S>$hqX5_PyQul3dE(skBkn?m22CG8&zrH+wcJCf+Z@&J;a2ZJx~~WI^1{Jyg#t3jVT!y-W>BX$9wJVu#>vXTSJ#OQ9MV% zE(Eyd7~+2i7cNKezv1$!ATPB0hY-P?0jPYgQuZSZ!CF*sNyV23q+~hyFzJFAp=Rjj2bB*h4-TCiI)VSuWh5pO IV~NcF1$yKo7ytkO literal 0 HcmV?d00001 diff --git a/models/export.py b/models/export.py new file mode 100644 index 0000000..28d15cc --- /dev/null +++ b/models/export.py @@ -0,0 +1,68 @@ +import argparse + +import torch + +from utils.google_utils import attempt_download + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--weights', type=str, default='./yolov4.pt', help='weights path') + parser.add_argument('--img-size', nargs='+', type=int, default=[640, 640], help='image size') + parser.add_argument('--batch-size', type=int, default=1, help='batch size') + opt = parser.parse_args() + opt.img_size *= 2 if len(opt.img_size) == 1 else 1 # expand + print(opt) + + # Input + img = torch.zeros((opt.batch_size, 3, *opt.img_size)) # image size(1,3,320,192) iDetection + + # Load PyTorch model + attempt_download(opt.weights) + model = torch.load(opt.weights, map_location=torch.device('cpu'))['model'].float() + model.eval() + model.model[-1].export = True # set Detect() layer export=True + y = model(img) # dry run + + # TorchScript export + try: + print('\nStarting TorchScript export with torch %s...' % torch.__version__) + f = opt.weights.replace('.pt', '.torchscript.pt') # filename + ts = torch.jit.trace(model, img) + ts.save(f) + print('TorchScript export success, saved as %s' % f) + except Exception as e: + print('TorchScript export failure: %s' % e) + + # ONNX export + try: + import onnx + + print('\nStarting ONNX export with onnx %s...' % onnx.__version__) + f = opt.weights.replace('.pt', '.onnx') # filename + model.fuse() # only for ONNX + torch.onnx.export(model, img, f, verbose=False, opset_version=12, input_names=['images'], + output_names=['classes', 'boxes'] if y is None else ['output']) + + # Checks + onnx_model = onnx.load(f) # load onnx model + onnx.checker.check_model(onnx_model) # check onnx model + print(onnx.helper.printable_graph(onnx_model.graph)) # print a human readable model + print('ONNX export success, saved as %s' % f) + except Exception as e: + print('ONNX export failure: %s' % e) + + # CoreML export + try: + import coremltools as ct + + print('\nStarting CoreML export with coremltools %s...' % ct.__version__) + # convert model from torchscript and apply pixel scaling as per detect.py + model = ct.convert(ts, inputs=[ct.ImageType(name='images', shape=img.shape, scale=1 / 255.0, bias=[0, 0, 0])]) + f = opt.weights.replace('.pt', '.mlmodel') # filename + model.save(f) + print('CoreML export success, saved as %s' % f) + except Exception as e: + print('CoreML export failure: %s' % e) + + # Finish + print('\nExport complete. Visualize with https://github.com/lutzroeder/netron.') diff --git a/models/models.py b/models/models.py new file mode 100644 index 0000000..a04b48e --- /dev/null +++ b/models/models.py @@ -0,0 +1,761 @@ +from utils.google_utils import * +from utils.layers import * +from utils.parse_config import * +from utils import torch_utils + +ONNX_EXPORT = False + + +def create_modules(module_defs, img_size, cfg): + # Constructs module list of layer blocks from module configuration in module_defs + + img_size = [img_size] * 2 if isinstance(img_size, int) else img_size # expand if necessary + _ = module_defs.pop(0) # cfg training hyperparams (unused) + output_filters = [3] # input channels + module_list = nn.ModuleList() + routs = [] # list of layers which rout to deeper layers + yolo_index = -1 + + for i, mdef in enumerate(module_defs): + modules = nn.Sequential() + + if mdef['type'] == 'convolutional': + bn = mdef['batch_normalize'] + filters = mdef['filters'] + k = mdef['size'] # kernel size + stride = mdef['stride'] if 'stride' in mdef else (mdef['stride_y'], mdef['stride_x']) + if isinstance(k, int): # single-size conv + modules.add_module('Conv2d', nn.Conv2d(in_channels=output_filters[-1], + out_channels=filters, + kernel_size=k, + stride=stride, + padding=k // 2 if mdef['pad'] else 0, + groups=mdef['groups'] if 'groups' in mdef else 1, + bias=not bn)) + else: # multiple-size conv + modules.add_module('MixConv2d', MixConv2d(in_ch=output_filters[-1], + out_ch=filters, + k=k, + stride=stride, + bias=not bn)) + + if bn: + modules.add_module('BatchNorm2d', nn.BatchNorm2d(filters, momentum=0.03, eps=1E-4)) + else: + routs.append(i) # detection output (goes into yolo layer) + + if mdef['activation'] == 'leaky': # activation study https://github.com/ultralytics/yolov3/issues/441 + modules.add_module('activation', nn.LeakyReLU(0.1, inplace=True)) + elif mdef['activation'] == 'swish': + modules.add_module('activation', Swish()) + elif mdef['activation'] == 'mish': + modules.add_module('activation', Mish()) + elif mdef['activation'] == 'emb': + modules.add_module('activation', F.normalize()) + elif mdef['activation'] == 'logistic': + modules.add_module('activation', nn.Sigmoid()) + elif mdef['activation'] == 'silu': + modules.add_module('activation', nn.SiLU()) + + elif mdef['type'] == 'deformableconvolutional': + bn = mdef['batch_normalize'] + filters = mdef['filters'] + k = mdef['size'] # kernel size + stride = mdef['stride'] if 'stride' in mdef else (mdef['stride_y'], mdef['stride_x']) + if isinstance(k, int): # single-size conv + modules.add_module('DeformConv2d', DeformConv2d(output_filters[-1], + filters, + kernel_size=k, + padding=k // 2 if mdef['pad'] else 0, + stride=stride, + bias=not bn, + modulation=True)) + else: # multiple-size conv + modules.add_module('MixConv2d', MixConv2d(in_ch=output_filters[-1], + out_ch=filters, + k=k, + stride=stride, + bias=not bn)) + + if bn: + modules.add_module('BatchNorm2d', nn.BatchNorm2d(filters, momentum=0.03, eps=1E-4)) + else: + routs.append(i) # detection output (goes into yolo layer) + + if mdef['activation'] == 'leaky': # activation study https://github.com/ultralytics/yolov3/issues/441 + modules.add_module('activation', nn.LeakyReLU(0.1, inplace=True)) + elif mdef['activation'] == 'swish': + modules.add_module('activation', Swish()) + elif mdef['activation'] == 'mish': + modules.add_module('activation', Mish()) + elif mdef['activation'] == 'silu': + modules.add_module('activation', nn.SiLU()) + + elif mdef['type'] == 'dropout': + p = mdef['probability'] + modules = nn.Dropout(p) + + elif mdef['type'] == 'avgpool': + modules = GAP() + + elif mdef['type'] == 'silence': + filters = output_filters[-1] + modules = Silence() + + elif mdef['type'] == 'scale_channels': # nn.Sequential() placeholder for 'shortcut' layer + layers = mdef['from'] + filters = output_filters[-1] + routs.extend([i + l if l < 0 else l for l in layers]) + modules = ScaleChannel(layers=layers) + + elif mdef['type'] == 'shift_channels': # nn.Sequential() placeholder for 'shortcut' layer + layers = mdef['from'] + filters = output_filters[-1] + routs.extend([i + l if l < 0 else l for l in layers]) + modules = ShiftChannel(layers=layers) + + elif mdef['type'] == 'shift_channels_2d': # nn.Sequential() placeholder for 'shortcut' layer + layers = mdef['from'] + filters = output_filters[-1] + routs.extend([i + l if l < 0 else l for l in layers]) + modules = ShiftChannel2D(layers=layers) + + elif mdef['type'] == 'control_channels': # nn.Sequential() placeholder for 'shortcut' layer + layers = mdef['from'] + filters = output_filters[-1] + routs.extend([i + l if l < 0 else l for l in layers]) + modules = ControlChannel(layers=layers) + + elif mdef['type'] == 'control_channels_2d': # nn.Sequential() placeholder for 'shortcut' layer + layers = mdef['from'] + filters = output_filters[-1] + routs.extend([i + l if l < 0 else l for l in layers]) + modules = ControlChannel2D(layers=layers) + + elif mdef['type'] == 'alternate_channels': # nn.Sequential() placeholder for 'shortcut' layer + layers = mdef['from'] + filters = output_filters[-1] * 2 + routs.extend([i + l if l < 0 else l for l in layers]) + modules = AlternateChannel(layers=layers) + + elif mdef['type'] == 'alternate_channels_2d': # nn.Sequential() placeholder for 'shortcut' layer + layers = mdef['from'] + filters = output_filters[-1] * 2 + routs.extend([i + l if l < 0 else l for l in layers]) + modules = AlternateChannel2D(layers=layers) + + elif mdef['type'] == 'select_channels': # nn.Sequential() placeholder for 'shortcut' layer + layers = mdef['from'] + filters = output_filters[-1] + routs.extend([i + l if l < 0 else l for l in layers]) + modules = SelectChannel(layers=layers) + + elif mdef['type'] == 'select_channels_2d': # nn.Sequential() placeholder for 'shortcut' layer + layers = mdef['from'] + filters = output_filters[-1] + routs.extend([i + l if l < 0 else l for l in layers]) + modules = SelectChannel2D(layers=layers) + + elif mdef['type'] == 'sam': # nn.Sequential() placeholder for 'shortcut' layer + layers = mdef['from'] + filters = output_filters[-1] + routs.extend([i + l if l < 0 else l for l in layers]) + modules = ScaleSpatial(layers=layers) + + elif mdef['type'] == 'BatchNorm2d': + filters = output_filters[-1] + modules = nn.BatchNorm2d(filters, momentum=0.03, eps=1E-4) + if i == 0 and filters == 3: # normalize RGB image + # imagenet mean and var https://pytorch.org/docs/stable/torchvision/models.html#classification + modules.running_mean = torch.tensor([0.485, 0.456, 0.406]) + modules.running_var = torch.tensor([0.0524, 0.0502, 0.0506]) + + elif mdef['type'] == 'maxpool': + k = mdef['size'] # kernel size + stride = mdef['stride'] + maxpool = nn.MaxPool2d(kernel_size=k, stride=stride, padding=(k - 1) // 2) + if k == 2 and stride == 1: # yolov3-tiny + modules.add_module('ZeroPad2d', nn.ZeroPad2d((0, 1, 0, 1))) + modules.add_module('MaxPool2d', maxpool) + else: + modules = maxpool + + elif mdef['type'] == 'local_avgpool': + k = mdef['size'] # kernel size + stride = mdef['stride'] + avgpool = nn.AvgPool2d(kernel_size=k, stride=stride, padding=(k - 1) // 2) + if k == 2 and stride == 1: # yolov3-tiny + modules.add_module('ZeroPad2d', nn.ZeroPad2d((0, 1, 0, 1))) + modules.add_module('AvgPool2d', avgpool) + else: + modules = avgpool + + elif mdef['type'] == 'upsample': + if ONNX_EXPORT: # explicitly state size, avoid scale_factor + g = (yolo_index + 1) * 2 / 32 # gain + modules = nn.Upsample(size=tuple(int(x * g) for x in img_size)) # img_size = (320, 192) + else: + modules = nn.Upsample(scale_factor=mdef['stride']) + + elif mdef['type'] == 'route': # nn.Sequential() placeholder for 'route' layer + layers = mdef['layers'] + filters = sum([output_filters[l + 1 if l > 0 else l] for l in layers]) + routs.extend([i + l if l < 0 else l for l in layers]) + modules = FeatureConcat(layers=layers) + + elif mdef['type'] == 'route2': # nn.Sequential() placeholder for 'route' layer + layers = mdef['layers'] + filters = sum([output_filters[l + 1 if l > 0 else l] for l in layers]) + routs.extend([i + l if l < 0 else l for l in layers]) + modules = FeatureConcat2(layers=layers) + + elif mdef['type'] == 'route3': # nn.Sequential() placeholder for 'route' layer + layers = mdef['layers'] + filters = sum([output_filters[l + 1 if l > 0 else l] for l in layers]) + routs.extend([i + l if l < 0 else l for l in layers]) + modules = FeatureConcat3(layers=layers) + + elif mdef['type'] == 'route_lhalf': # nn.Sequential() placeholder for 'route' layer + layers = mdef['layers'] + filters = sum([output_filters[l + 1 if l > 0 else l] for l in layers])//2 + routs.extend([i + l if l < 0 else l for l in layers]) + modules = FeatureConcat_l(layers=layers) + + elif mdef['type'] == 'shortcut': # nn.Sequential() placeholder for 'shortcut' layer + layers = mdef['from'] + filters = output_filters[-1] + routs.extend([i + l if l < 0 else l for l in layers]) + modules = WeightedFeatureFusion(layers=layers, weight='weights_type' in mdef) + + elif mdef['type'] == 'reorg3d': # yolov3-spp-pan-scale + pass + + elif mdef['type'] == 'reorg': # yolov3-spp-pan-scale + filters = 4 * output_filters[-1] + modules.add_module('Reorg', Reorg()) + + elif mdef['type'] == 'dwt': # yolov3-spp-pan-scale + filters = 4 * output_filters[-1] + modules.add_module('DWT', DWT()) + + elif mdef['type'] == 'implicit_add': # yolov3-spp-pan-scale + filters = mdef['filters'] + modules = ImplicitA(channel=filters) + + elif mdef['type'] == 'implicit_mul': # yolov3-spp-pan-scale + filters = mdef['filters'] + modules = ImplicitM(channel=filters) + + elif mdef['type'] == 'implicit_cat': # yolov3-spp-pan-scale + filters = mdef['filters'] + modules = ImplicitC(channel=filters) + + elif mdef['type'] == 'implicit_add_2d': # yolov3-spp-pan-scale + channels = mdef['filters'] + filters = mdef['atoms'] + modules = Implicit2DA(atom=filters, channel=channels) + + elif mdef['type'] == 'implicit_mul_2d': # yolov3-spp-pan-scale + channels = mdef['filters'] + filters = mdef['atoms'] + modules = Implicit2DM(atom=filters, channel=channels) + + elif mdef['type'] == 'implicit_cat_2d': # yolov3-spp-pan-scale + channels = mdef['filters'] + filters = mdef['atoms'] + modules = Implicit2DC(atom=filters, channel=channels) + + elif mdef['type'] == 'yolo': + yolo_index += 1 + stride = [8, 16, 32, 64, 128] # P3, P4, P5, P6, P7 strides + if any(x in cfg for x in ['yolov4-tiny', 'fpn', 'yolov3']): # P5, P4, P3 strides + stride = [32, 16, 8] + layers = mdef['from'] if 'from' in mdef else [] + modules = YOLOLayer(anchors=mdef['anchors'][mdef['mask']], # anchor list + nc=mdef['classes'], # number of classes + img_size=img_size, # (416, 416) + yolo_index=yolo_index, # 0, 1, 2... + layers=layers, # output layers + stride=stride[yolo_index]) + + # Initialize preceding Conv2d() bias (https://arxiv.org/pdf/1708.02002.pdf section 3.3) + try: + j = layers[yolo_index] if 'from' in mdef else -2 + bias_ = module_list[j][0].bias # shape(255,) + bias = bias_[:modules.no * modules.na].view(modules.na, -1) # shape(3,85) + #bias[:, 4] += -4.5 # obj + bias.data[:, 4] += math.log(8 / (640 / stride[yolo_index]) ** 2) # obj (8 objects per 640 image) + bias.data[:, 5:] += math.log(0.6 / (modules.nc - 0.99)) # cls (sigmoid(p) = 1/nc) + module_list[j][0].bias = torch.nn.Parameter(bias_, requires_grad=bias_.requires_grad) + + #j = [-2, -5, -8] + #for sj in j: + # bias_ = module_list[sj][0].bias + # bias = bias_[:modules.no * 1].view(1, -1) + # bias.data[:, 4] += math.log(8 / (640 / stride[yolo_index]) ** 2) + # bias.data[:, 5:] += math.log(0.6 / (modules.nc - 0.99)) + # module_list[sj][0].bias = torch.nn.Parameter(bias_, requires_grad=bias_.requires_grad) + except: + print('WARNING: smart bias initialization failure.') + + elif mdef['type'] == 'jde': + yolo_index += 1 + stride = [8, 16, 32, 64, 128] # P3, P4, P5, P6, P7 strides + if any(x in cfg for x in ['yolov4-tiny', 'fpn', 'yolov3']): # P5, P4, P3 strides + stride = [32, 16, 8] + layers = mdef['from'] if 'from' in mdef else [] + modules = JDELayer(anchors=mdef['anchors'][mdef['mask']], # anchor list + nc=mdef['classes'], # number of classes + img_size=img_size, # (416, 416) + yolo_index=yolo_index, # 0, 1, 2... + layers=layers, # output layers + stride=stride[yolo_index]) + + # Initialize preceding Conv2d() bias (https://arxiv.org/pdf/1708.02002.pdf section 3.3) + try: + j = layers[yolo_index] if 'from' in mdef else -1 + bias_ = module_list[j][0].bias # shape(255,) + bias = bias_[:modules.no * modules.na].view(modules.na, -1) # shape(3,85) + #bias[:, 4] += -4.5 # obj + bias.data[:, 4] += math.log(8 / (640 / stride[yolo_index]) ** 2) # obj (8 objects per 640 image) + bias.data[:, 5:] += math.log(0.6 / (modules.nc - 0.99)) # cls (sigmoid(p) = 1/nc) + module_list[j][0].bias = torch.nn.Parameter(bias_, requires_grad=bias_.requires_grad) + except: + print('WARNING: smart bias initialization failure.') + + else: + print('Warning: Unrecognized Layer Type: ' + mdef['type']) + + # Register module list and number of output filters + module_list.append(modules) + output_filters.append(filters) + + routs_binary = [False] * (i + 1) + for i in routs: + routs_binary[i] = True + return module_list, routs_binary + + +class YOLOLayer(nn.Module): + def __init__(self, anchors, nc, img_size, yolo_index, layers, stride): + super(YOLOLayer, self).__init__() + self.anchors = torch.Tensor(anchors) + self.index = yolo_index # index of this layer in layers + self.layers = layers # model output layer indices + self.stride = stride # layer stride + self.nl = len(layers) # number of output layers (3) + self.na = len(anchors) # number of anchors (3) + self.nc = nc # number of classes (80) + self.no = nc + 5 # number of outputs (85) + self.nx, self.ny, self.ng = 0, 0, 0 # initialize number of x, y gridpoints + self.anchor_vec = self.anchors / self.stride + self.anchor_wh = self.anchor_vec.view(1, self.na, 1, 1, 2) + + if ONNX_EXPORT: + self.training = False + self.create_grids((img_size[1] // stride, img_size[0] // stride)) # number x, y grid points + + def create_grids(self, ng=(13, 13), device='cpu'): + self.nx, self.ny = ng # x and y grid size + self.ng = torch.tensor(ng, dtype=torch.float) + + # build xy offsets + if not self.training: + yv, xv = torch.meshgrid([torch.arange(self.ny, device=device), torch.arange(self.nx, device=device)]) + self.grid = torch.stack((xv, yv), 2).view((1, 1, self.ny, self.nx, 2)).float() + + if self.anchor_vec.device != device: + self.anchor_vec = self.anchor_vec.to(device) + self.anchor_wh = self.anchor_wh.to(device) + + def forward(self, p, out): + ASFF = False # https://arxiv.org/abs/1911.09516 + if ASFF: + i, n = self.index, self.nl # index in layers, number of layers + p = out[self.layers[i]] + bs, _, ny, nx = p.shape # bs, 255, 13, 13 + if (self.nx, self.ny) != (nx, ny): + self.create_grids((nx, ny), p.device) + + # outputs and weights + # w = F.softmax(p[:, -n:], 1) # normalized weights + w = torch.sigmoid(p[:, -n:]) * (2 / n) # sigmoid weights (faster) + # w = w / w.sum(1).unsqueeze(1) # normalize across layer dimension + + # weighted ASFF sum + p = out[self.layers[i]][:, :-n] * w[:, i:i + 1] + for j in range(n): + if j != i: + p += w[:, j:j + 1] * \ + F.interpolate(out[self.layers[j]][:, :-n], size=[ny, nx], mode='bilinear', align_corners=False) + + elif ONNX_EXPORT: + bs = 1 # batch size + else: + bs, _, ny, nx = p.shape # bs, 255, 13, 13 + if (self.nx, self.ny) != (nx, ny): + self.create_grids((nx, ny), p.device) + + # p.view(bs, 255, 13, 13) -- > (bs, 3, 13, 13, 85) # (bs, anchors, grid, grid, classes + xywh) + p = p.view(bs, self.na, self.no, self.ny, self.nx).permute(0, 1, 3, 4, 2).contiguous() # prediction + + if self.training: + return p + + elif ONNX_EXPORT: + # Avoid broadcasting for ANE operations + m = self.na * self.nx * self.ny + ng = 1. / self.ng.repeat(m, 1) + grid = self.grid.repeat(1, self.na, 1, 1, 1).view(m, 2) + anchor_wh = self.anchor_wh.repeat(1, 1, self.nx, self.ny, 1).view(m, 2) * ng + + p = p.view(m, self.no) + xy = torch.sigmoid(p[:, 0:2]) + grid # x, y + wh = torch.exp(p[:, 2:4]) * anchor_wh # width, height + p_cls = torch.sigmoid(p[:, 4:5]) if self.nc == 1 else \ + torch.sigmoid(p[:, 5:self.no]) * torch.sigmoid(p[:, 4:5]) # conf + return p_cls, xy * ng, wh + + else: # inference + io = p.sigmoid() + io[..., :2] = (io[..., :2] * 2. - 0.5 + self.grid) + io[..., 2:4] = (io[..., 2:4] * 2) ** 2 * self.anchor_wh + io[..., :4] *= self.stride + #io = p.clone() # inference output + #io[..., :2] = torch.sigmoid(io[..., :2]) + self.grid # xy + #io[..., 2:4] = torch.exp(io[..., 2:4]) * self.anchor_wh # wh yolo method + #io[..., :4] *= self.stride + #torch.sigmoid_(io[..., 4:]) + return io.view(bs, -1, self.no), p # view [1, 3, 13, 13, 85] as [1, 507, 85] + + +class JDELayer(nn.Module): + def __init__(self, anchors, nc, img_size, yolo_index, layers, stride): + super(JDELayer, self).__init__() + self.anchors = torch.Tensor(anchors) + self.index = yolo_index # index of this layer in layers + self.layers = layers # model output layer indices + self.stride = stride # layer stride + self.nl = len(layers) # number of output layers (3) + self.na = len(anchors) # number of anchors (3) + self.nc = nc # number of classes (80) + self.no = nc + 5 # number of outputs (85) + self.nx, self.ny, self.ng = 0, 0, 0 # initialize number of x, y gridpoints + self.anchor_vec = self.anchors / self.stride + self.anchor_wh = self.anchor_vec.view(1, self.na, 1, 1, 2) + + if ONNX_EXPORT: + self.training = False + self.create_grids((img_size[1] // stride, img_size[0] // stride)) # number x, y grid points + + def create_grids(self, ng=(13, 13), device='cpu'): + self.nx, self.ny = ng # x and y grid size + self.ng = torch.tensor(ng, dtype=torch.float) + + # build xy offsets + if not self.training: + yv, xv = torch.meshgrid([torch.arange(self.ny, device=device), torch.arange(self.nx, device=device)]) + self.grid = torch.stack((xv, yv), 2).view((1, 1, self.ny, self.nx, 2)).float() + + if self.anchor_vec.device != device: + self.anchor_vec = self.anchor_vec.to(device) + self.anchor_wh = self.anchor_wh.to(device) + + def forward(self, p, out): + ASFF = False # https://arxiv.org/abs/1911.09516 + if ASFF: + i, n = self.index, self.nl # index in layers, number of layers + p = out[self.layers[i]] + bs, _, ny, nx = p.shape # bs, 255, 13, 13 + if (self.nx, self.ny) != (nx, ny): + self.create_grids((nx, ny), p.device) + + # outputs and weights + # w = F.softmax(p[:, -n:], 1) # normalized weights + w = torch.sigmoid(p[:, -n:]) * (2 / n) # sigmoid weights (faster) + # w = w / w.sum(1).unsqueeze(1) # normalize across layer dimension + + # weighted ASFF sum + p = out[self.layers[i]][:, :-n] * w[:, i:i + 1] + for j in range(n): + if j != i: + p += w[:, j:j + 1] * \ + F.interpolate(out[self.layers[j]][:, :-n], size=[ny, nx], mode='bilinear', align_corners=False) + + elif ONNX_EXPORT: + bs = 1 # batch size + else: + bs, _, ny, nx = p.shape # bs, 255, 13, 13 + if (self.nx, self.ny) != (nx, ny): + self.create_grids((nx, ny), p.device) + + # p.view(bs, 255, 13, 13) -- > (bs, 3, 13, 13, 85) # (bs, anchors, grid, grid, classes + xywh) + p = p.view(bs, self.na, self.no, self.ny, self.nx).permute(0, 1, 3, 4, 2).contiguous() # prediction + + if self.training: + return p + + elif ONNX_EXPORT: + # Avoid broadcasting for ANE operations + m = self.na * self.nx * self.ny + ng = 1. / self.ng.repeat(m, 1) + grid = self.grid.repeat(1, self.na, 1, 1, 1).view(m, 2) + anchor_wh = self.anchor_wh.repeat(1, 1, self.nx, self.ny, 1).view(m, 2) * ng + + p = p.view(m, self.no) + xy = torch.sigmoid(p[:, 0:2]) + grid # x, y + wh = torch.exp(p[:, 2:4]) * anchor_wh # width, height + p_cls = torch.sigmoid(p[:, 4:5]) if self.nc == 1 else \ + torch.sigmoid(p[:, 5:self.no]) * torch.sigmoid(p[:, 4:5]) # conf + return p_cls, xy * ng, wh + + else: # inference + #io = p.sigmoid() + #io[..., :2] = (io[..., :2] * 2. - 0.5 + self.grid) + #io[..., 2:4] = (io[..., 2:4] * 2) ** 2 * self.anchor_wh + #io[..., :4] *= self.stride + io = p.clone() # inference output + io[..., :2] = torch.sigmoid(io[..., :2]) * 2. - 0.5 + self.grid # xy + io[..., 2:4] = (torch.sigmoid(io[..., 2:4]) * 2) ** 2 * self.anchor_wh # wh yolo method + io[..., :4] *= self.stride + io[..., 4:] = F.softmax(io[..., 4:]) + return io.view(bs, -1, self.no), p # view [1, 3, 13, 13, 85] as [1, 507, 85] + +class Darknet(nn.Module): + # YOLOv3 object detection model + + def __init__(self, cfg, img_size=(416, 416), verbose=False): + super(Darknet, self).__init__() + + self.module_defs = parse_model_cfg(cfg) + self.module_list, self.routs = create_modules(self.module_defs, img_size, cfg) + self.yolo_layers = get_yolo_layers(self) + # torch_utils.initialize_weights(self) + + # Darknet Header https://github.com/AlexeyAB/darknet/issues/2914#issuecomment-496675346 + self.version = np.array([0, 2, 5], dtype=np.int32) # (int32) version info: major, minor, revision + self.seen = np.array([0], dtype=np.int64) # (int64) number of images seen during training + self.info(verbose) if not ONNX_EXPORT else None # print model description + + def forward(self, x, augment=False, verbose=False): + + if not augment: + return self.forward_once(x) + else: # Augment images (inference and test only) https://github.com/ultralytics/yolov3/issues/931 + img_size = x.shape[-2:] # height, width + s = [0.83, 0.67] # scales + y = [] + for i, xi in enumerate((x, + torch_utils.scale_img(x.flip(3), s[0], same_shape=False), # flip-lr and scale + torch_utils.scale_img(x, s[1], same_shape=False), # scale + )): + # cv2.imwrite('img%g.jpg' % i, 255 * xi[0].numpy().transpose((1, 2, 0))[:, :, ::-1]) + y.append(self.forward_once(xi)[0]) + + y[1][..., :4] /= s[0] # scale + y[1][..., 0] = img_size[1] - y[1][..., 0] # flip lr + y[2][..., :4] /= s[1] # scale + + # for i, yi in enumerate(y): # coco small, medium, large = < 32**2 < 96**2 < + # area = yi[..., 2:4].prod(2)[:, :, None] + # if i == 1: + # yi *= (area < 96. ** 2).float() + # elif i == 2: + # yi *= (area > 32. ** 2).float() + # y[i] = yi + + y = torch.cat(y, 1) + return y, None + + def forward_once(self, x, augment=False, verbose=False): + img_size = x.shape[-2:] # height, width + yolo_out, out = [], [] + if verbose: + print('0', x.shape) + str = '' + + # Augment images (inference and test only) + if augment: # https://github.com/ultralytics/yolov3/issues/931 + nb = x.shape[0] # batch size + s = [0.83, 0.67] # scales + x = torch.cat((x, + torch_utils.scale_img(x.flip(3), s[0]), # flip-lr and scale + torch_utils.scale_img(x, s[1]), # scale + ), 0) + + for i, module in enumerate(self.module_list): + name = module.__class__.__name__ + #print(name) + if name in ['WeightedFeatureFusion', 'FeatureConcat', 'FeatureConcat2', 'FeatureConcat3', 'FeatureConcat_l', 'ScaleChannel', 'ShiftChannel', 'ShiftChannel2D', 'ControlChannel', 'ControlChannel2D', 'AlternateChannel', 'AlternateChannel2D', 'SelectChannel', 'SelectChannel2D', 'ScaleSpatial']: # sum, concat + if verbose: + l = [i - 1] + module.layers # layers + sh = [list(x.shape)] + [list(out[i].shape) for i in module.layers] # shapes + str = ' >> ' + ' + '.join(['layer %g %s' % x for x in zip(l, sh)]) + x = module(x, out) # WeightedFeatureFusion(), FeatureConcat() + elif name in ['ImplicitA', 'ImplicitM', 'ImplicitC', 'Implicit2DA', 'Implicit2DM', 'Implicit2DC']: + x = module() + elif name == 'YOLOLayer': + yolo_out.append(module(x, out)) + elif name == 'JDELayer': + yolo_out.append(module(x, out)) + else: # run module directly, i.e. mtype = 'convolutional', 'upsample', 'maxpool', 'batchnorm2d' etc. + #print(module) + #print(x.shape) + x = module(x) + + out.append(x if self.routs[i] else []) + if verbose: + print('%g/%g %s -' % (i, len(self.module_list), name), list(x.shape), str) + str = '' + + if self.training: # train + return yolo_out + elif ONNX_EXPORT: # export + x = [torch.cat(x, 0) for x in zip(*yolo_out)] + return x[0], torch.cat(x[1:3], 1) # scores, boxes: 3780x80, 3780x4 + else: # inference or test + x, p = zip(*yolo_out) # inference output, training output + x = torch.cat(x, 1) # cat yolo outputs + if augment: # de-augment results + x = torch.split(x, nb, dim=0) + x[1][..., :4] /= s[0] # scale + x[1][..., 0] = img_size[1] - x[1][..., 0] # flip lr + x[2][..., :4] /= s[1] # scale + x = torch.cat(x, 1) + return x, p + + def fuse(self): + # Fuse Conv2d + BatchNorm2d layers throughout model + print('Fusing layers...') + fused_list = nn.ModuleList() + for a in list(self.children())[0]: + if isinstance(a, nn.Sequential): + for i, b in enumerate(a): + if isinstance(b, nn.modules.batchnorm.BatchNorm2d): + # fuse this bn layer with the previous conv2d layer + conv = a[i - 1] + fused = torch_utils.fuse_conv_and_bn(conv, b) + a = nn.Sequential(fused, *list(a.children())[i + 1:]) + break + fused_list.append(a) + self.module_list = fused_list + self.info() if not ONNX_EXPORT else None # yolov3-spp reduced from 225 to 152 layers + + def info(self, verbose=False): + torch_utils.model_info(self, verbose) + + +def get_yolo_layers(model): + return [i for i, m in enumerate(model.module_list) if m.__class__.__name__ in ['YOLOLayer', 'JDELayer']] # [89, 101, 113] + + +def load_darknet_weights(self, weights, cutoff=-1): + # Parses and loads the weights stored in 'weights' + + # Establish cutoffs (load layers between 0 and cutoff. if cutoff = -1 all are loaded) + file = Path(weights).name + if file == 'darknet53.conv.74': + cutoff = 75 + elif file == 'yolov3-tiny.conv.15': + cutoff = 15 + + # Read weights file + with open(weights, 'rb') as f: + # Read Header https://github.com/AlexeyAB/darknet/issues/2914#issuecomment-496675346 + self.version = np.fromfile(f, dtype=np.int32, count=3) # (int32) version info: major, minor, revision + self.seen = np.fromfile(f, dtype=np.int64, count=1) # (int64) number of images seen during training + + weights = np.fromfile(f, dtype=np.float32) # the rest are weights + + ptr = 0 + for i, (mdef, module) in enumerate(zip(self.module_defs[:cutoff], self.module_list[:cutoff])): + if mdef['type'] == 'convolutional': + conv = module[0] + if mdef['batch_normalize']: + # Load BN bias, weights, running mean and running variance + bn = module[1] + nb = bn.bias.numel() # number of biases + # Bias + bn.bias.data.copy_(torch.from_numpy(weights[ptr:ptr + nb]).view_as(bn.bias)) + ptr += nb + # Weight + bn.weight.data.copy_(torch.from_numpy(weights[ptr:ptr + nb]).view_as(bn.weight)) + ptr += nb + # Running Mean + bn.running_mean.data.copy_(torch.from_numpy(weights[ptr:ptr + nb]).view_as(bn.running_mean)) + ptr += nb + # Running Var + bn.running_var.data.copy_(torch.from_numpy(weights[ptr:ptr + nb]).view_as(bn.running_var)) + ptr += nb + else: + # Load conv. bias + nb = conv.bias.numel() + conv_b = torch.from_numpy(weights[ptr:ptr + nb]).view_as(conv.bias) + conv.bias.data.copy_(conv_b) + ptr += nb + # Load conv. weights + nw = conv.weight.numel() # number of weights + conv.weight.data.copy_(torch.from_numpy(weights[ptr:ptr + nw]).view_as(conv.weight)) + ptr += nw + + +def save_weights(self, path='model.weights', cutoff=-1): + # Converts a PyTorch model to Darket format (*.pt to *.weights) + # Note: Does not work if model.fuse() is applied + with open(path, 'wb') as f: + # Write Header https://github.com/AlexeyAB/darknet/issues/2914#issuecomment-496675346 + self.version.tofile(f) # (int32) version info: major, minor, revision + self.seen.tofile(f) # (int64) number of images seen during training + + # Iterate through layers + for i, (mdef, module) in enumerate(zip(self.module_defs[:cutoff], self.module_list[:cutoff])): + if mdef['type'] == 'convolutional': + conv_layer = module[0] + # If batch norm, load bn first + if mdef['batch_normalize']: + bn_layer = module[1] + bn_layer.bias.data.cpu().numpy().tofile(f) + bn_layer.weight.data.cpu().numpy().tofile(f) + bn_layer.running_mean.data.cpu().numpy().tofile(f) + bn_layer.running_var.data.cpu().numpy().tofile(f) + # Load conv bias + else: + conv_layer.bias.data.cpu().numpy().tofile(f) + # Load conv weights + conv_layer.weight.data.cpu().numpy().tofile(f) + + +def convert(cfg='cfg/yolov3-spp.cfg', weights='weights/yolov3-spp.weights', saveto='converted.weights'): + # Converts between PyTorch and Darknet format per extension (i.e. *.weights convert to *.pt and vice versa) + # from models import *; convert('cfg/yolov3-spp.cfg', 'weights/yolov3-spp.weights') + + # Initialize model + model = Darknet(cfg) + ckpt = torch.load(weights) # load checkpoint + try: + ckpt['model'] = {k: v for k, v in ckpt['model'].items() if model.state_dict()[k].numel() == v.numel()} + model.load_state_dict(ckpt['model'], strict=False) + save_weights(model, path=saveto, cutoff=-1) + except KeyError as e: + print(e) + +def attempt_download(weights): + # Attempt to download pretrained weights if not found locally + weights = weights.strip() + msg = weights + ' missing, try downloading from https://drive.google.com/open?id=1LezFG5g3BCW6iYaV89B2i64cqEUZD7e0' + + if len(weights) > 0 and not os.path.isfile(weights): + d = {''} + + file = Path(weights).name + if file in d: + r = gdrive_download(id=d[file], name=weights) + else: # download from pjreddie.com + url = 'https://pjreddie.com/media/files/' + file + print('Downloading ' + url) + r = os.system('curl -f ' + url + ' -o ' + weights) + + # Error check + if not (r == 0 and os.path.exists(weights) and os.path.getsize(weights) > 1E6): # weights exist and > 1MB + os.system('rm ' + weights) # remove partial downloads + raise Exception(msg) diff --git a/object_detection.py b/object_detection.py new file mode 100644 index 0000000..792afd0 --- /dev/null +++ b/object_detection.py @@ -0,0 +1,220 @@ + +import time +import cv2 +import torch +# import torch.backends.cudnn as cudnn +from numpy import random +import numpy as np + +from utils.datasets import letterbox +from utils.general import non_max_suppression, scale_coords, xyxy2xywh +from utils.plots import plot_one_box +from utils.torch_utils import select_device, time_synchronized + +from models.models import * + + +# path +CONFIG_PATH = 'config/' +WEIGHTS_PATH = CONFIG_PATH + 'yolor_p6.pt' +NAMES_PATH = CONFIG_PATH + 'coco.names' +DEVICE = "cpu" +CFG_PATH = CONFIG_PATH + 'yolor_p6.cfg' +IMAGE_SIZE = 1280 + +class ObjectDetection: + + def __init__(self): + self.device = select_device(DEVICE) + # half precision only supported on CUDA + self.half = self.device.type != 'cpu' + + # load model + # .cuda() #if you want cuda remove the comment + self.model = Darknet(CFG_PATH, IMAGE_SIZE) + self.model.load_state_dict(torch.load(WEIGHTS_PATH, map_location=self.device)['model']) + self.model.to(DEVICE).eval() + + if self.half: + self.model.half() + + # Get names and colors + self.names = self.load_classes(NAMES_PATH) + self.color = [255, 0, 0] + + def load_classes(self, path): + # Loads *.names file at 'path' + with open(path, 'r') as f: + names = f.read().split('\n') + # filter removes empty strings (such as last line) + return list(filter(None, names)) + + def detect(self, input_image): + + # Run inference + t0 = time.time() + img = torch.zeros((1, 3, IMAGE_SIZE, IMAGE_SIZE), device=self.device) # init img + # run once + _ = self.model(img.half() if self.half else img) if self.device.type != 'cpu' else None + + # Padded resize + img = letterbox(input_image, new_shape=IMAGE_SIZE, auto_size=64)[0] + + # Convert + # BGR to RGB, to 3x416x416 + img = img[:, :, ::-1].transpose(2, 0, 1) + img = np.ascontiguousarray(img) + + print("recieving image with shape {}".format(img.shape)) + + img = torch.from_numpy(img).to(DEVICE) + # uint8 to fp16/32 + img = img.half() if self.half else img.float() + # 0 - 255 to 0.0 - 1.0 + img /= 255.0 + if img.ndimension() == 3: + img = img.unsqueeze(0) + + # Inference + print("Inferencing ...") + pred = self.model(img)[0] + + # Apply NMS + pred = non_max_suppression( + pred, conf_thres=0.4, iou_thres=0.5, classes=None, agnostic=False) + + print("found {} object".format(len(pred))) + + # print string + s = "" + s += '%gx%g ' % img.shape[2:] + + # Process detections + for i, det in enumerate(pred): + if det is not None and len(det): + # Rescale boxes from img_size to im0 size + det[:, :4] = scale_coords( + img.shape[2:], det[:, :4], input_image.shape).round() + + # Print results + for c in det[:, -1].unique(): + n = (det[:, -1] == c).sum() # detections per class + s += '%g %ss, ' % (n, self.names[int(c)]) # add to string + + # Write results + for *xyxy, conf, cls in det: + + # python yolor_example.py + label = '%s %.2f' % (self.names[int(cls)], conf) + plot_one_box(xyxy, input_image, label=label, + color=self.color, line_thickness=3) + + # Print time (inference + NMS)q + print('{}Done. {:.3} s'.format(s, time.time() - t0)) + + return input_image + + def get_bbox(self, input_image): + + # object bbox list + bbox_list = [] + + # Run inference + t0 = time.time() + img = torch.zeros((1, 3, IMAGE_SIZE, IMAGE_SIZE), device=self.device) # init img + # run once + _ = self.model(img.half() if self.half else img) if self.device.type != 'cpu' else None + + # Padded resize + img = letterbox(input_image, new_shape=IMAGE_SIZE, auto_size=64)[0] + + # Convert + # BGR to RGB, to 3x416x416 + img = img[:, :, ::-1].transpose(2, 0, 1) + img = np.ascontiguousarray(img) + + print("recieving image with shape {}".format(img.shape)) + + img = torch.from_numpy(img).to(DEVICE) + # uint8 to fp16/32 + img = img.half() if self.half else img.float() + # 0 - 255 to 0.0 - 1.0 + img /= 255.0 + if img.ndimension() == 3: + img = img.unsqueeze(0) + + # Inference + print("Inferencing ...") + pred = self.model(img)[0] + + # Apply NMS + pred = non_max_suppression( + pred, conf_thres=0.4, iou_thres=0.5, classes=None, agnostic=False) + + print("found {} object".format(len(pred))) + + # print string + s = "" + s += '%gx%g ' % img.shape[2:] + + # Process detections + for i, det in enumerate(pred): + if det is not None and len(det): + # Rescale boxes from img_size to im0 size + det[:, :4] = scale_coords( + img.shape[2:], det[:, :4], input_image.shape).round() + + # Print results + for c in det[:, -1].unique(): + n = (det[:, -1] == c).sum() # detections per class + s += '%g %ss, ' % (n, self.names[int(c)]) # add to string + + # Write results + for *xyxy, conf, cls in det: + temp = [] + for ts in xyxy: + temp.append(ts.item()) + bbox = list(np.array(temp).astype(int)) + bbox.append(self.names[int(cls)]) + bbox_list.append(bbox) + + # Print time (inference + NMS)q + print('{}Done. {:.3} s'.format(s, time.time() - t0)) + + return bbox_list + + # format bbox list for mediapipe + def format_bbox(self, bbox_list): + format_bboxs = [] + for bbox in bbox_list: + format_bboxs.append([bbox[4], tuple([bbox[0], bbox[1], bbox[2] - bbox[0], bbox[3] - bbox[1]]), False]) + return format_bboxs + +def main(): + # create model + OD = ObjectDetection() + + # load our input image and grab its spatial dimensions + img = cv2.imread("./test1.jpg") + cv2.imshow('test1', img) + + # preprocess image + npimg = np.array(img) + image = npimg.copy() + image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) + + with torch.no_grad(): + # get detected image + res = OD.detect(image) + + # get bboxs of object in images + bboxs = OD.get_bbox(image) + + # show output + image = cv2.cvtColor(res, cv2.COLOR_BGR2RGB) + cv2.imshow('yolor_test1', image) + cv2.waitKey(0) + + +if __name__ == '__main__': + main() diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..583e1ee --- /dev/null +++ b/requirements.txt @@ -0,0 +1,28 @@ +# pip install -qr requirements.txt + +# base ---------------------------------------- +Cython +matplotlib>=3.2.2 +numpy>=1.18.5 +opencv-python>=4.1.2 +Pillow +PyYAML>=5.3.1 +scipy>=1.4.1 +tensorboard>=1.5 +tqdm>=4.41.0 + +# logging ------------------------------------- +# wandb + +# plotting ------------------------------------ +seaborn>=0.11.0 +pandas + +# export -------------------------------------- +# coremltools>=4.1 +# onnx>=1.8.1 +# scikit-learn==0.19.2 # for coreml quantization + +# extras -------------------------------------- +thop # FLOPS computation +pycocotools==2.0.0 # COCO mAP diff --git a/test1.jpg b/test1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..02e7c3cc07c813f12d0a46af5f3263be9a356786 GIT binary patch literal 15345 zcmd73V|1lW&^Eeb+cqY)oe3tkor!JRP9{z!wrx$EiEZ0EHcsYwzwce^{5`);@3ro= z_O9-#?ykD4udb^7x%{~eK$a5!Ar1fm0RTY0K7h|PKrjFX1{N9?8V(Ky4i*j$4iOn1 z77hUo3j-M)2@@3+5fc>&9SI2?6&)K56CH;bmkd!qzt4C41^Q_iUI`ywKPA%6{e{P+0(TZ8s>Vsd9Vl6i02eUIiBigB_Eu=E)BelSbC z%n={`uzrzY@@Vw*5k5F~^$ePPN8(HV#-LR?WLf0o|9_SOQr0pU)%)VGLKWxs&}dsd zRr565=nkAdfSo_Nx_EqKud|)hml!InJC&2q%D+8kb8@jhF(G>~JCe_z;=2(vyvY(s zj3SG%5nbGU5j-L)JdlVx@^ic9N5_4S6~FK~w9Egnc+ne5RM$h&SHF#Dr?8zA6t=RS z@lpt#{x=7Zk+WIGjGK{{Y+i4_g7u@@y(K>VmAA}ItJ@79J-6JmUt4v<3vp#-u3bb# zqpMB==m$|+62XaHso}5A%txEXmwH$Qb3@IZ9dmcrpMU~izZWN6bgw~v;L(N(erK0& zmroDc@h9N-M@)Uu%{h+sy&unTU6izse2r<5_ts1xp3r~Oki`kp&FEphY!J;2HPN#= z@s)lcz<;-1cZcD1k5#v?WJ@;jI$`{DM&1t zKJx_rdga%*z0Y1~wN)~A%8xYuW!iyL8fR`)`*k!>f+3pV{H z8o>U*)#Sy-P~;7zFPgftf4?igN0t12yUDzH zeoKKJm*|6bTT*4@`I}E?vZu&a(pWD+z>eomgUDE}dtM?h?|`~iy5J*--dFdmTa)zo z=)l6RE9Wn8-xCRv02OhZ(nj+%NZ$Ov#Y@hoFdyZV zy4|i*)AuXV@LQH61y1hxXX2#`^mQq?yDU~Fvr->#mJ;Zx%t^-p3_rrPVQL+cWEk@tQq#khA$>k+= z4^M5UN5|+kMM&2ZL5MTYLw>bqPcOq zpqTfnA;(zR@^(x!j017CN}vU=ir@DaEkDzLvW_y}e0wIm8e%lt*~ud}4mUp3B~F}% z6ZMK0o~)`-z5%o4X`kIQo||qnF<~7&bzEI3?L9i*Q6sGzi(?NH3vy_~M^D|8lmjEb z`urN}m^jvZ=74Xt{`UNCE1}MGx1)Y4^iL2HO~T-TK0C0AR+d~z9kqQDF^gN)+_a+2K<=#xMFDq^m({=m0xGR;yxAAvMv8t5jD`Xz=|4#TB%-r5vyj@e^PBc|X35|os-H)f7S``n zJ`B^F(v&jX#?Ug8Xv`J6^Ytmr+TSeVqE`1PybqDMbBdcAt=o!2AF`>&<+ked+?*+~LCl6V+gT;HUJ#{UqP)ZxMYl<~u zZ36hp^ET=G)_BKqdvm0Msf&K9cD6QH)f0WTcRcIKy{?Jxg;wtX`}^!pHky|JQ{#?D zXYum;B?e2y{;>9qqk?AMVlS7)ks z#B@ik!ptnJX{>#+`;xXhp}rYtQQ)(3Wi>$7)&EQVWU+dInP#iT``*%?Tt`NpC|BsH z?T)LuKgG4h!kN2uGEP)EPJ5VS>oz-%K;|^T^zsMLtRZ8loVAQu4tI@`9Nzz*GyvfC z6Ch-&&wBYHNY>!ZD&6KM)5Np-OT!d_K791--uUmI^;nJuaW|*z7BC~OjYXiioAWE- zq~zwh4}4grlv$8q+t7aEHJd7pmdgJy{6Ehd4Z&HNqXQkHAN!L6j?0f-WW5ePinZfL zvMNr}EO55ikpHjl|KGlRfddv~0O%JOfC2*tfdGL0V*>&T07C&sWkyCLCShR}h9DI} zCu0-AAZHR)Q2g#6|Ah?x!3Gd$&`-dzHgdVNV1?Ht@YyYAtn)+n5DhmiQ2kwx747bn zCEAw--j#uY7gy|n&uiU|s?-;6R4PWKj0vMDqe{Je$xrreU_~iksaW?Dkfixm_@TsD zMrr3;kVF@xb9d-zkL>)6oocB~v8a_=JbEYI5Ic0@dna6nt%rk<7>jrkEYLsNOYqeM@)ftb#Imn3_J#;^98+_ZL# zET=SvBvuTcQzX5Y^($na`Gt&*QZ*plgjDUVj<8rOn(_gvHHlaH>J)-)nJ1dQdhhs6 zE2QYeOj}+{%LxKHN&a*`;{zzG(=;@0(TqKK z-Z@IOdhF`*`T5@^m^)0#}-^jFY8O7 z=xI&-9gTv!qriMVApmXQ9uG?xpT2C$+6Ygz)+Vi5!!1oNatNBkV4@DD0moQ6qx8ih z*`(|sL{0I$aL|7qHt9P$;ffMQ!5%%|=o+(g)UF+kLX&v?oRy;`f`w?E{LcI_ADJ-o z`?LC+m!EkS9P{C_F;iFMl@JVHx`vCw!=i55+&S7O0LL{w^hnHoz5E(rdP%y<_pA{0de0r+04h7nqAzAP4a4|pe{*(MvzT8F^jrXGzWcsA%U#BXmuW6Zp1RF5C&_oncjFu-mI?6^oaQF) zQFW4lA*0pwP=Wp{%lqEy4H90WJdV)X)V4#fP{Uyro~$ax;$n_sMM0aPdFy;uG(H^3 zm8VX7oY46}@}_z<`wYwZ=BF|^ zCnUpa<&wwVwg>^fVa*RrU#U%hKh;#O@xMzWeLzBqSjVS|wkM!O>$=icMicGTA1Ilx zFLCleHX8gI`t1=4W={{Su=lYW<~{%J%BV+EzRX1L2xNLNVwq1|=`t2=DBCs7IZfab z(EACf^K@s{FZ_=|P;V4k-N0;|vwb$(ZSSr@$0q>Xxwiz8@h<5lMdgf-kaY0EKJ6d9 z1=rFwD~;icfi9PW-(JK#*}Z+G-V#g<^`it#wtJ#lU;%sJYiefvbcGC--}YU#MRtR} zYYEd@)Z%a1@4nSYub7N>5k-W@Uz?L2L9Tpc>jhslZ+ek3sLd{IM(5Et5bG{D^2BY6 z579+iR#>t=Y|3~SQ5r373?XG^7X(@7i>9W%tc-ykJIawIyToE8 z82E;9+aL8#YDpot(+b7NqnVM$Xjo_f2-4{4rdm|me*{g!v^p4_qt&`@%yyS4W>3|0 zxrR>D`)@xwxoQ?E^I57*Gn;N92ZIyL%dsKLQl{E*aBWwaXdF`)m9WYzJWtzuF0POjoPcqrVHdQ24N2l zi_BFOC@U@Oo3DM$BE}zhNj}M3%G^70M@(Nd6j2@5*rEOi0}HjSJ9)DAP|lM|mG7+z z+tiy(E+g{WvW-6QX;Qt#BZ~~|E1%*>mu+@muC?eh9_3##hR4Hx3M$v3#b4X~5upnm zd7-u&!zVzW1|}=U1}mUm8gyaNHEM@j_HeMt4aG@~(5T+ls8O)uQ;ZPnv8sF;*kA!; zN1X0t;j5MeR4OM;R<2|_L30G)dJm}@h;kO~{N4Y^7|Ag{z8*r79hNT~9(BjflU`I< z-xl~oc`w?pTYPh^#hNK%f$Cv_n z%PJLTCFNGSq7D;=?Ws&kU|1prp;+?L(z~Nu1BnDl@@ISeLlA)z+e-^V?Bo-$F!goL z)1v@DzyT1T5a6Kyo$~+U7NB3}JTfW?Gm|g`8Zjvg87rHjkcfey{eMS1C`jhfO?Wy~aO zr&lZ_X2Q_EPomvEe`kgd{W+`EzV%o0zxi0fBH7FbhU^coC=d+9QG;CuD1J5py}^D^K9S3_AvYypx| z(!bFIvoGVqdDe%h*!lDb*@NOn12JiZ37x3X)qft3{eVT$O7y7)2d#1b*uYm@;7yRR zchZCxAV8n6H z5h+}M4<&|nKFaog6#N#_+E0Krjr>z~bcZyh9^SJ;-e}=Y-Caw!n33GYBotsM5Feog zrSMKDX7J=%v~0oO+?Ix)I40gnx77`5cJmp7t9~J`D@te=84lMUgeOSyxYKK2xT1eh z9`_f&VI%JDh1j1NITzcY*3RYzV^ui8OJ6O2mD=-Why%5gb>0FFR9CDn>}v$uf}N_Q z@uhfIwTR_>|02sduc{dut2$NcdQn0*nPz{C>|8x7YxOMbof|RnaFsG8ihlo3z>LER z$ktOzl~&I+@PAAY6a)+m6ddw@P4Ek9f}hkVjQ05Bss`S;s1`L29>9~Y5pyP zm@o`Nzf`pyg-^)OE>^}irLsrC`<-yeQ@cH;ZPQ2}&|P}B0INq8WvQX0&^V2|0R1hh z(wd%ztogeaJNij&vyu7Slp()dP$FyYA*tPE2Xp56MJOQcI%-bXW@xKw$G{@iFoGN#AMp}J zdimsmab-q{jP&WT>||Up zl#GaX^=~D#%jNEWSIn6Vp3Os#l}Uo_T92 z=zzt%BuU#cg?NtOtUNzsbGx0GF}1d#CJ|Y_?PsU@V+sxT@`K?T7rIDL@v6t}{zf*7 z;1dg3G_}plubrd_ETGp(k^}K%`>X8d*i?>?GdJ}&aU48WaPbrHWtx1*U)LT`NC;R6 zF!29zk6+#qMe(0=6gC7S7IH`+Wl{>rtLvTFxndDfFuKmKpG8ytwd?r5_aPX;ulvw+ z2cR*zAj(uN*x^nKWq8J10-dH)8z!zOylbQ-u&kH8784B|LO+Ax?q^740Av8Rw?Koc zXNu2I6fS1?^_>>c!bgRO`wJyz#Oz`4;kBF{y7-(sCKamc6V*Lgry|8nelLlfliaW)%^VM@NP~80~KOGzh?+? z%;s*^2<6Oz5QjKzkq@UVD56y%mb5w^hX>{HkPR|_l;kJCM_Rh@{N{U3<9cB?UVe2c zxKnN{4R~eRBT;YZAC4m>M42%2Yd5k9bIc3G;-NLv|B!ukW!_%oQ}OtK_bP3Bbj)g=CxyR9>VX z8%J8kb_vU-Uh_?+9B&Ue*`I*iA0QgT_Gog3HZzUbW7xT29VuRj;Mwzws&-V#9$bB} z{kI$v<-Z?H7ej)FF!zHp6Ba@Yu}1>aa2HqU2fdeO$Z8MK^u9}HXp#BUGEG1ekW{H@ zC})9ddb$q(4PatLiR8;@(l)axf4)WKg}=Y7Ufn2F!{$KQ4XFH4m1>k^)b()7Qpfv4 zyNJjTJ2azcXiQycYdk)@Si+nLYanXmIMFP->JMrKSGyi`Dh~>Cf0c+sOeAv7?t*&( z9-0QQW1N`2sc-FRP&Co}alZ8yyiu)JYtCHcHA&e&5E+ZjniCEBeY3)C2bN^~zIch~ z+aME|14>M|h7Vsna_aWc1j`&7Zbqd#yaJ;6lTigLiDTtu54m$@!69iFo-37V%yxL} zRlqDfK2$ws%<~JeXdrmfYBhf+^{K* zkT4?)D;hc^~ff6nljd6Hd2NIKzM=$DkDg+bIPBKnP*=287p-luN1+R6~))*}k4 zPuw))v>C6ki8W~2EV6oo&thy)5d!VTRd0Z)&7)DeCc%~Df9cyC2x4-g<&($5pK#9; zeq3&PK_CfMs^SW9bxq)!*0Q(kECZc{%2Xfw=pvSNF!p|EcleUH{sxj#ZfEC&tP zeI->$oE&nsbP!?Bdl&YYp53OR#*Ce>t*utcX2Q4GaMRHl8Y3?vTUQ=^CwFe+xA0&m z&j$NpFMT8no_F_3mb$80hrLAS;Y--Q@k1*^>otvH3;?NF*L)AP#Uohyf=>Z|T(mA> zCtif=8FGsjY^}TwuTS(Z?RTnRdCDi!_zgp{ zdi&Q1qzU;;73w%{eW5|a;R*hW&s!zlTpKrgH>A#pU=j#Au15Gs0|h0}Qgd24qG0b^ zZMJ*1A}1y6PiDcdJ*K!Ms&v)y#~{L} ztCLA>dYx~*h)u81kw*ge%M}^Aqp`a4XY(Oi&wlphz+Gv6lGyXX@@FP1p4r<5eh{dh z6J+)sy5wp$IyKv5C5+GGU``a=qnKT?{4do;Su>XntlD~l zxlp8(US#NSN7c;Ht{GXvAA1I{X_|GF94R zS&9v^K9FSUE>&m354Yae;g$GAftGyy9qH-y$QP|G5%Vy+p8%M^8-jhQYxMxM1?M6O zi%)>~RNx8Qp`f|-9F~|_TB-X7-P7-8*6(Z?ggNX)E)2ARJ+cP^Q(zvhv7$zGtM(Oc z(!+tox*&J7Ch|X`G<(@RCcVc_-qKRx?t8b*U=G$o7Tf|D?4X`f@9n<(B_b{ogmz8f zr0v{j1c1`TEEPRw=BY%j65zWW=F)EeNDZZ3LY`}E+A|tZ(0ZPyANQ{YLgKXY<#;TS zv5S|J=_E z>YSHSQ~<(=C!r-Uq@Kw{(v8)bxFNhdCL7~vDXxPXKU2-T_lTM8;{YfV)PX(JD&R9Z zN9CPh-r>&-lJa7#7WBFd9C)!6IdX3Ti ze&VdRMhxw$*u#ada&A!J+S70RDYpHv$H}*a4`hWfimoP@yOgDyTZccNEkNxv^P$XZ zrdZKEtxGF!1_oP$+n-QgX;S$!AM7Sz(s_H+_mNk2ginbgB#Zq)7gprC zfkOn2C+vJd*P!GIIPKYUKDTj)?rwE)jiNvH^h=YEHc4vPUnYQ%7R;Jks%u0>ch+_* zWtrJ^qmGETqF2w_&L4Ie_-C8WBLvIze1PBWO-~U_dX9&LJrn)XiAcFM<7~BWsJ#M*$t{@`JdjQG|9;48kt}ZY)-dpG5am(jNN@k?5{Z@mEZ$;q(=cpwi<-wEpr1d%{b`#V6j<`zEwIXv1v8 zVL4Q9Y^;E=%e1GauI>f*Bki0g%QsX|l805gWO)$^^>*22VSaLf|FCAJ?f6yJPUGf_ z8&O`3smtnlJI}Bcjy3@tKXF^N?eUBa<`Vc|ZW1PsVp03;Z{;IGWY9(guP2=c-(gRy zQts7`UWV<8MlM*8!A?roiow7u464_bkvOi$j2_{0(~_?lzM?+VjW0FuY8HF_db2YR zVW+Qh4XH{hx9BmOSw6SoCcNsX>sv=nStZrHPDB3_+Bj$LvWIPi+kQKhxv5$vj+pT} z6)<;hQ;p1NHA#C?(mG)fF;)iSkxnxRCdO&Az{r$s>FKhA3NVxYbElM560l-*?-Tb9 z5zRLMI!bZ()2Zl)+yK+L608d37fA@0F8fl%TxA0y0ql`m&0fk7KUW-VGQz<4%q!Za zH3!l$s$}aDn-GAClYT`ha~IteP80du zJ1%|FZL(_{a`H}OgnrMQy1?~qEfvx_urW!Z?XfJ!Y|2=Cqx&dtY!WK6u(?%3Bb zRk6GAzEj2~FrBNF>=W?K;T2yD3K7oy=25Jey`UP=Dr8Rf%nnV_+y)a?Pf_h7dI*4|J)tYTK>YvMFl_Nh+h+ zDgqGAyk2yHk|*=XK%9Nw3tQa~YKYlf(eX;JOl;;fv!%V)x>`IAWGqQfHgL-ZEAhxx zkBoC5GQ1AHYZ=ARYj4RC7@a-h@_>=qy#aQ{M94K*UzX|2Q|vycD*syaOJ$+JQat^O zK+P>;;>_ukYGYNy{FV`Q+&R=b9HspC?YAeZa8grQ2r31HW<6t=O`uQXG6~XFT=i=+ARW+iZE8**cna>7kJtF;-cq%Sc z7Cvt|Vc?lQ8eJTkT$XWkgR`()Pig%a{jp1sUs*PaOl$&4vK65n1d6J}kqGSu;@i8# zkCn8l3o9>x64hK6pM1@x0=h+?lM`sSqz}p;^_I;b(<(3`a^BJ~hxDQQnK#6h@MC;n zKbvjih}I_n+mV3!Z3h}*VZ-znl!t62|3!^lRd|haAY#dvseC_!);BqTDx2-_m4A8Sdda2#&;}=+7 z&s5Dz?fwMp>K$6ZEs#Gtro{e%0Mp!g;bg;g*6LdpS<~s74y7;a>@CceRHzcOK%iah z++(aSSZ`I-UDFg5x~xPtm#`PG7+T-KlVDUAP^W@IzCaO+Sl{_6K_(E@8|pbV%4m}Q zi-Cd601kro1RE$UIJWi&G^34(NHHjs-6RDK)N6>*-UD2O?lj1_7igaJh*3})>LimSQIv4wZ-(v3 z`R19VoA6_--0G-k3VHURs5Q2pGruVOTe!knORkG~n)lIoBwh7dPZNb$Rdx|#$1a19 z4Zp{`+4TfIe`iqB$2FftQs=rzT^X%{ybEd7FzUv7{nj=5f|*2a^QRP2fx*b0tQ1@` zLDU)Vj60Q1F`Yw51C~0zpR*l@R}>p4(_$r}`jZD!+RUJh1`&zTZyqblGeNVE_oUoM zO|`Z}2@CvkieG7W_5pXfqfV@aq?2|aw!+2H0KB%8%|eMmFrn{ay05&;VqD@72!)nO zx(FSa%5ZUIx>579$acqNry1Q2hM`Bn39HZua=Hjo3?ntqg4U!!=!2CXc^Tp`X5Irp z*n*~;kf%31gT7htXc^63^bHoeR$t-IcL10W6FBtu&W5wX!Agt#3=mkUUKCx*Xo3i( z>sAr{sP-d1+|ay7`OcBA5(Tfpln;UMn)O0Ckzh-@X+14;iJz11_F48G9z;piKlUnq zU|><&Q7h&GnI0X0ir&qyLzU5Chk=Jm&5t4hPI8Q5TYfY!XqiD9TRv>vAM$1*(iR958d5smrvjOq$;i{Hm#p*ouSG^|=MrTv&<4|!zc|RHb2{y(drz?|f z)us4GIc~}Ck<}6IrswagA{LHXf&^m}ooFv{pQ3dp>ehVAxigfJztRIDxey{P;W{nx zZ&RNDZ4ji#K9)}a(7hJ*ez{RwHz3K!{MW*0-zfuvgO`1l?#BR)fZO`_1g%)$(kXih zV)dl6@I$6J@anG%ow|qRT}^8+<@V4Gf`lP-jm*`UIF#9P3WM~_n*qSQ_0R_+cU9?yo>R=kj7s>M=*w{_Q80{3a_?E z7u44@VH~updlIZ$z9c#f2wLa&AOy@jzw806-=QbY%HkpMmUu;~02dN1mio8+Yb#98 zja^_un*3C)H1uF%em*Fz&{&`*=r&IpgiEv^Xu^%0-4CZyI<+&U04G|pK<^3!JCWX> zO4Un-k&as?HP>~vaYH^kCBC?36v+(M9q8Z(zn<^{p!=-q!q%ZPDRXz@lvM7CME$IN zOlU=E6%SyytUEz&X}~cliR|J>o%H)+h#}bp(dIRp*44Xf^5NK==;kMwf;jHYW17|O zyN!Bqb3E7wv-=1KyxNn=a%;*!WFZ8RrKh{a^p;N8PE5v#iufjKu&2|G4ZziKY^;|q z5;yIb!Q^c}NQ1TEa9dj8rycdd+2@d2UL*?7bk_wAoDqVq`>-&o?aq^uExkSgBY(vq zN~7b*bC+BRWRdKzp`uIvp1N_NpF*woiN2#txe?11JaJ3+wWMs0)ngZkT~RoFZ|J)l z$m5+NP2l#={k@TGog|!F4b#^~1|PYe8O~|a6W)QF(v2gkhyhkkgJvo*6-v)94&V9b&d?P_AKH6uWa&Hzg{(V`pP3q%gFVO#G#HE5ul>^& zPn2GRusNk8jp|IaEKQUGpYL`w#Q2R85n_Xv|DX%cE1b>q9h+ELYS=#itq>MifJhO2 zT5!S*P;W%-Je$U;XSEYweb~Of@dNLF1m07-Bi|=jt$*Ftm&~+6B}yvTe@wX3T)$7) zB@hJcS0pcO03m}V-@j^{2-&;7^GlQS3jQX3joqT*L-|PnX!uUXvy>@difzK52KBwg z@HM{4 z!=7iOu|Qq(>)lSDNUZA4d+N za`_V=CsUKb81|%bmw?Heldq#;h2dG$KiM_yAG?)CIu@79Sw8`CfW5V$SPv?mxAmo@z`Ru!L_b(sU6`A( z)L!4L8873SncyT8ReX)2v%ZVg`h#RPW>3wE2slhal48!FM~1iM4_w9trAOPNy$`^t zd|}J2AwLVG4$jNl%vNy2-7Y5S5=an+PDdMoJ0nCWAHiWm-O$z8EE$zAyb)nL?;G|9 zhGLWNCqQ9uo2ln&haWEhEpTcnsqGwUUBAM-ZU`eaM>8V!b!K6AFqF>4Tld$el{4V z(aL+pM}Xt7W${cpWMT9u;n$C8XLyGb=h6*R5Lx|tC&VJ5mU!KG%kOFXlJQ;Jor>@%zig#$-KvR zWD3O_6q4g@+0x_+w?Kb@Smk|NcBur^ zUcV>FJhXxjx(gH@n+5oIfK|OC>>d?_t|>tw6U1P?^B|aw7Y0|6w474g2;1nIzVtD7 zik^#!KR>J5uZ5{aqO@y&o7QDf2c#4MD(PmI5ia{JBX0KqGe!fPaGgylvg0T4ix8ZVKFN;z1vbT zjMiqM+q8JW-@9}AzMVc3x1Em-RVy9#OwOpE|HV_R%0EX^a3V5s6ANCZK@2U&87;pz zT`3*`f!>j*l)vyd$0(vY|Y z?wK|Q4Jmx(sue^c0*waQFt3nik*28~uBni=AfuZ!Fe>!)2_RS0z_c&Bg+2VHuYq0M zR$I_oA1c=`7ROe{v9E$5^+^Of|-g$H1R+A{Yp_rkT+Vu4eXXhAAm8C&ogSfc6&?V$VZ^W z3E9z4rPT0P!%FsMs%og4M;u1Oqf5a!7uhncXIcR!@DRC8;0Pad@em6DOYTtiGHFNd zBV=rbl}?FmkT_*O1>8oW+QDK(itP#!yk_Jswdhi_;y-!@DXvUPJ_j_o+dpJoAS^~n zx>)jS;@D%i^dvCyf}Wh>;FG=PqY|Sd`a3&%;vi>Zu z?zptK`5h|mvgK!$ei^g^Jh@>Nf}%oA z!*}EQiq=SC>A~KK)8zf?HKCfuF?+Y>?Bt)F&t9TpJKq(4uzW=UJp^ zw#~@(6}CVECU=Qm!YoL4dQ>`O=sMJB#{)L4Xt6{teDqoC#lu!RHVoJdOTjk7-i^b0u)8@K*1ik- zp6J_^UOVYb)!04Bx`H7DMk%E}E9F!Yz3G0%I=H?H-M;?Z1_cQL3iZY1t9`{fK$#Vh zNtlFDmA>K~byr}-33>Isq(Y86*Jv{$q6S9xf%*TZ<_$*htKf)?rRShD!p)aDO_J4# zO~X`0)G-#t#os-b&u7Jc!v+Jnc3@v%$eDEW6JST=l3>S&OHJB3k!Q9W5OV1RPt4&K zb<-gGk*GT+mD=a?+sz4WXJ0cB?C8rZP^3UJtyqn%m)K-`(dI(Tx^>Y2$Hh#`>8egdz@t&+Z zv@W~*ZMk!nZ938Yt1L?UTTrz}CdZ3ZNjJT9F=E?29>)u8NjD?Npl+QqxQLhh0w>yA zD$-TpqaWg}*ebN>Iu)z?tk(c*_#s7M{v5pB>YyUBDiaSDb(+hx*b)NpLH%Mvc?wGB zw!x0V$#+9wSQcS?-6wNr@|N7OmUVR62*LGzy)8uG+S$#ZZ%@jY>j#~)Bgvi7sMtIu za=4heO=#4JJqc{8L+JK$s@fmhcC#`eGZW_SP7G8o90mOqNg%h@C%eRd#_ z$U=Pi_NYdSb_lX{0@bNtu|Ck&|^-RW7s|32;VF9(QIbJcygG%SoZ@ zoc^y2;;aC1cEk3TJ9+vGO3%0}M( zt7XJ5xGt_?YxMbenxS5_1a#iJZ2Q3Lf-kUV5xsm5&7`Gdmc}{I1MDu)SRniAdt)Uz zZyu#GzM*sO&lc)cqB+fjHG1bQvgcqz_1rp|YFP{bovp>tCqQr}@VhVSCm;nvMKx>h zRSKljYuNKgp$bLpHA6Ps&+QSA_afq;Gd!&6f!-fKPDTacEA!)rgOT9EC}44}c>(VP d+#!Y({CA*G>@FBsWOH_qTR4;C2ma^E{{!a6*AoB$ literal 0 HcmV?d00001 diff --git a/utils/__init__.py b/utils/__init__.py new file mode 100644 index 0000000..d3f5a12 --- /dev/null +++ b/utils/__init__.py @@ -0,0 +1 @@ + diff --git a/utils/__pycache__/__init__.cpython-36.pyc b/utils/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3cb2beb5d1d8dbcf92564440399d46e705de7bf5 GIT binary patch literal 132 zcmXr!<>k6`^j-oJ0|UcjAcg}*Aj<)Wi&=m~3PUi1CZpdg`k0!5zt2~0rxF^B^Lj6jA15Erumi4=xl22Do4l?+87VFd9@)x|2tCABOj wH#M&$zbK|MKL<#bmSpA>$Hd2H=4F<|$LkeT-r}&y%}*)KNwouM{|v+o00yfYK>z>% literal 0 HcmV?d00001 diff --git a/utils/__pycache__/__init__.cpython-38.pyc b/utils/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a3ba54c578c3f8afc6192a8e39c7194181f7506a GIT binary patch literal 128 zcmWIL<>g`k0!5zt2~0rxF^Gc<7=auIATDMB5-AM944RC7D;bJF!U*D*s*6>OOKMq6 wZfagheo;(ieh!cI&j){-Y%*!l^kJl@xyv1RYo1apelWGUj{uziF02>P%Q~&?~ literal 0 HcmV?d00001 diff --git a/utils/__pycache__/__init__.cpython-39.pyc b/utils/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6e3d610bc34b138608ec97ff38a42627ad3bb063 GIT binary patch literal 143 zcmYe~<>g`kg0u@?5}APXV-N=!FakLaKwQiMBvKfH88jLFRx%WUgb~Cq6KAWKqWq-% z|Owoczk%)Vz|I(vr-a;+XjO%)HE!_;|g7%3B;Zx%nxj NIjMFa(>?<+0|1ZBA`$=q literal 0 HcmV?d00001 diff --git a/utils/__pycache__/datasets.cpython-36.pyc b/utils/__pycache__/datasets.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..456874e1920b031b5cc555affd75f65fab135e22 GIT binary patch literal 38035 zcmeHw3v?XUdEPwsjm7&(5TsTVNf8tUk`P6SGA)wgL!vA}G)YPJM)G!nSpd6O>@M#N zNC2I+9YeNdH?(OxsS_u4T^~1*-Ng0jaS}WAX%r{%aZl{l&BJvjk2CcZnjMw2RC(*-SazGlJIdP^Z^_;w@pL(}cx(37#U0rla^G3LZEt-~yPY9t*coxQIHS&1XUy3)m&o3}yt^<^+EW@|Gfo*#k7W1q zuCRBD;cQ>9u9<54IpdmnY5S9gbBmLC#>n2`-0JMWb)WNn&h5^gXUy!K&eP7gvlp?6 z2My;AXWui1v#-!w+Fv?QI#`-qv$J<0{Z3~B=@W%RxW60s`<(;0Kd@$H5998jGl{!N z+}(q_yPQL~JA}J?ad)?K7*b?oPzT@=kc~vztpL2??r;5`{Uct$&7F0Di zTU4&MdUW3NYVLje_s6mR*0Oe~HkXZ*YK6J1U8^AQinG}Wf?2E^-Y-@!XYKsuB4V{e3{ZSg ziZ32Q&Xq+3iwl>7j>lB7Q1S9!v09liIg68J6kLl zg7{p)bBn7SUu)t(V9tKjP{U~Tg**1oR~HNW3whS!+hgUc>l#ku65u@-4?)u+(86 zBfG^wXm2rQt-4tmK~4lYQyX)hG%9Hm!)F?=H!9ZaPzQq;ow^>wldf~l6oczTvg6Sp z!Z8dYPI1P|CUWR|0l*$ai(X;T4LYi=X&<)(ySO+Pm~N0zRSgOo-#y97IktEwT6YH! z7*@=*%mFiD_Te{R#>|+dZb$l=Z!njCYCD5l5X^M)9)G>~v9=7+hY|4%KecAQd#`D5 zkbKj({Fop0lYZQflq}Eo?KRU+_$kM{+EMQW?lWF%oHt%)coE<7qJAg9(W+Zs#EX^U zYlh!3YGC-Rg@}5CZ&fVE_S17{q27fSVhgrw zh-a3%oaoh9y~pqIJ7+BnaeQh#5ybOLbBkChfjz%c3nHp8;|1xN{LFla4P2mxVr8ye z$jy}9AjXC76au@JcThNAS^(v?n+fH6qFmp-TY#$Tu?!( zR;=U}3yW2?5~OCT<#OID`~v@%51R$_#qnN zlM71SjfTg26>$VWYjMt9RRv!Gth;C=?j7#wGCf+3Q}J$Mk%_E7DdrBA_|w&>lvUwCK2qrOdwmUWc#4%K%+(}3M6VH zjU{40qDC$*1lpPkIzvRIaRBgO)Lp6pJtcBE;(}Z*h*hUcM1~!?9N4G4=P4w1<<1K) z7UB-|C^{f@yaKZmz#=LSBm~+MaSP?yAd**eK${C!cm)Yg=72}@uA9rXR=Ro&tz72& zkm4C;7pTta)*G?XM5Axb!64ct2Ov}@jR>fYa3{jbfDf@X@Uoi%N8_e?obe5i6wl>= zPII|`h&*8_40CmK(+)_n=JWY%!V3XNIHcu_C-jcY6nOm}a7z|?1WG0RQ5Wf4gF}jf ze9RT3gFyr=ji)Z6nQ=>c7?{Nkom3Z)`$j$}g1`WEzS$VbTwqEumlXgP<|UQOEmoZ+ z5V2g4&gCvG<;!6X-rT9qU^X}h>h%oxoS-wDr

z+?Xp48V2xnEJV&pK)r^DOV%V2 ziCF1q!j@DHNb`@QbR0i7gP?sy$!ap%R}-nSI*($j4`xCbYb+m%0B&ZsstT72O0S<2 zfT{ard2lshA(6sn_JkG3^}^xP`Nu1N0xKy*uw;x}b3RNBAA0mP&RGv6GD7b}y#{~` zuU`u{hfaZxQPf->(D1otp2l+@5{|hA5mpf4@_7+Bm8rSPE@%+2%+*agA=5uK9#c=F zqW%dwpZgg4s( zQUpqjIB_Q-C^G4!h$5X1Cyi^|>BJ;=2dNh2x&CI%%k>Oeo00cIg!4lzcM?ArvNjer z*hnlYuzV5XxKhNAFz%RsWFe*wc~LNmF+W0(x@sXOS~(--z-~fN$?AMbNuX3@4P~TM zM$(c}s^UlHSTpUVoTwie0#E-IQ=@o4N(reRa}q@O;a%M8@MGZnJN)>XX&LoIJqdQx z$fbSY;9Msd-uN)FZWkEcq{OuBkVH*QT?RGjABG*he;|)4#KjIB}!?SkSOQMy|sleQOkysXN?2>YemkX=*-if`d zBd5=wf9(8ynO$zisetT&6K&_4XEsG#6K!eAle=oxg;d#}nIMLlv1&LJ)LZE;<`tL6w=i0X-}t z?L+v5y*~rFm)ZnZgtyg4r>Yr!pQ)-0nk-a_%$6!nrdYweOk^%B)oN7*l>%`b$19Up zh5J&v5~nVuYcV=L0+H5JiZiA;vp&Qi%S^jkW$qi#Txp9H5#gqCD}$*W3KZDmQTG8^az=Vr_KIp|BO6o2xSl^|IuT**nF0(&N3 z3%X9YZb7lp(@Is9T43iY2*y*I=(3OM3WH??fmxGBs48xwWmS z6hI6!jenF))8;5+RvNs27bF14s%8rRhHy=oJ-F{NSKFkiCKxqAW*rAXtOJAss^D`u zSyM81R2+zSwK?-3Ncj`&n?}m98=fc#x%ot*wK#r)+TMWYV`1}0r9qEpy@e_TppTLP zWdh@58T(pz$NQ@bwuKsU32?)&1Lq;WgIK&zDxk# zCa?NpMctIQ?8smRk&@c@lNTouX%Xe(8nY0+5)_AQ9k6s7)nMQ&Xzw#$=5F07&6 z#Dd9k!9(lQ)#bM3m4xE4;?}Be0Sl@Y^yb}}YQ-zgEmfD?yi)m<@n~yV^2uKAa=r{6 zWO43*+R2WsM{vSUf=kWiK8Z|`j<^sbW*W;chUFK*FNKttmBzITOL9+yDFf!HJ!J1T zQ`Uy1NS+sLCdZ6_ypp$&g|QbAy%eFi5oIHmr7cp8U5X+NbQcPwmPDhMYC`=0DynxQ z2+~gw%bv{Fyd_nTQC9ad-GoImmC>D-0T;!pA)A;295+EKOQOXhYBq`kRpPz~g7moIZcyZ1yzdND&+V8H-+F&>BP;KMwZS@pCDJ8dw4eGZ7W= z5bC2}`Q=a_vX6ifyn#K{#*mnTO>P1!B$v}4r0TKdYY68Ob zhJiFa8;DhFvO$EPO8^|py5lu2WGdmjR$+9SO=upqd7O5rWaF2R<+_ zX`H?tW3}3S5~x_^Gs4<{UaY3B2pCUb!)CQZ$3Y+N%c!@aI40_C`to*M)UynPKOWFj zLGV-jO1|j5rmzBis&X`b`bQ`j^g0FbcGZ;=5d>x>E4&}M#?6qvdOu42 z1%4sp1ttp^Jz+*{V0hqoDqDdrfCq{HV|E|HKD)0C+c%*w#P&pn{Nt5BfkFbVZ2E;P zmn^TFKszzx2x_jwjLoMsG2@S+nEEFOpip>m%qU8a)_RthEb1Rc?XTgd)f;B26G-tk z#nc)6ntwK8$MO5lE03jFcLZ|gSmU*-u^U_$D5cRXrt;F%F7Ao@m|=$vyUqBzG7RXllr)r*MzAKp#4uHA+c8 zderbn5Zi(U9G|nOWVmYA)976%g>FBk1!Qo!>Cz|>&!yUv267>2195CEjfqyb3#y9* z+vCVM-9TM2eN!UG1g2_|e)2N} zY@%m3gFr;;5pJMrqK0XLAWib7g=r1-mKMpP0D)pHl$mcTaNgEf2oRU|v_Vjl)NCd% zuXYOCOF?pCVj`o-OdHx-?LCvnMyZqWs+nr7K=JgtgZ^KHv>)@J$OXoE0{218$zvm{ zvbrtfF3rpoT$c>>GWcrfyLU}Y&SuV>yD&bnZa_$Q`tiGAKrpJ%v;%>g;OnhWyI=ta z2nHpl01<8?HmDiMdAASGI&>lHmlFAr)&Mnxxkw(GkJXtcPn@4RJN4jwnN!$ZFDPsQ zX7sAdNYl`iU~6>-^7sU#0EL0@AOw^%;2K0eugpMe50SmRl5vZR#d2PiSMHRWPS}n+ zF`m&JzM$e>^hPt4eE%TAkyV1uQ=&2{7b{Q}aIZypaBdk^mlQ@dNEU>5!Csg~?=7x{ z+7k64R!J1y$IxIwO9aB`svlv-Mn?C^v!^aT6bv?*9L@edbo%Ur4_yq>t>kkT6eZOl zwp7Cg9HbBzGw%s4k@(C4vMR+(O9l1QC?`9A6xE2W26tcNSfkJ>t!LJ;} zJ@)*8f+(89_~EG30|Od`UJ(f4QY=*JMzsP#7T9&7g1LV<@bL$KZ}pjDpv0fSz4{n~ zf5G6>2wEvVW1LPCbYiAj=@qwp4AiTOCK=3Tp z4j4zxgAy5VjDahm=@o7?+Tcc!0s^3UP(j|*JA-fuQL|_U08TDS?votddM;2a5u5~g zLD*_Q&A|;k6T~0_zBN(m@ToZll81r=;zr01CPPO2Bj{#x*f?Z-0l{TE5E0})s*yXp zj~^Zn9R~1O6PsCr&9Es3C7VV^Z8%LYA+018gx^>RGS^Lv_5_ESkG~P+YZhxWlrIlc zA4IJ-X0%f?qZ;yP52fXyA7`nbVDKCR;XU8S*k>6CM@eQGS)M zHN`#;Ai~YP6o8T(WS^b3C#V$=3AzO;-%Um_-xw@x=!qoOdQ6j3{5bpmFA>~ChJK#; z7Z~tWZZ1Rr6mrM-Jgy=z#kVHXU>N%q(`WE&{&gUB96ynAn=q zPM2(8IDBYR|y>uBo;Sff<16w;?EWKvpZHDpA!{GD|cq2|1w}N1( zbvR;pg9|BGZO)rUz1K8WN6=C?v=Y7Emd0kxlQ85W>Z$j>vG;Y_q^px{Nvoq?r-!jB zjXFKt;9;+w-eIHOhrWX!^>L5Zzu;{vZD+jR@Ao@>TMX|O)?q8uqAu7d^JUy+beRfU z4C8)(=~f3dfg*Pxe%qSyE)uJO`kylhN_XOZ0(ry!uyY7JGDg2X;$!4;`~4w=2hgv`HCsJ{ z>z?`+e@lpx2WDv;*SPE9-BzPM>W`KVVm`ON-Gr?L)&XX3E9Zl=A!$SF(nck1cwO2~ ze~Z6$3n6unKgw8r6t=kobKGzn88GUD%hTSZKO(Zs6ZJ8a8TQ8(@cjBVe_QD;m`Q9a z9a;lTtZ%R1;@{#7EEwqjeT>an{-8g0)ToU5+mX)q!W%mS{`Rd#Bk!7h$pAF%D&37A z#uy`I4Zs#|D{R;}bBz)6Q;@IB++pYLr|f#BertV)bNH!f{kHnf`mXx!`t9C57_m`| z>>fYu+`ACNvp(hB>+fO8HEa2={5_|PYu3|$ikTg&kE85Pe+=)i*WXKx`?Z*Qo4>bo zgfP~O4`6;qU~wFAV}R%_{g!k!d#*n39d))lw_GK3_m^JnWJChHwSI?xM|+!FjD>hukI&tW{f8aSZNM9& zuo&zr-H+#C*6}_&IVUD&tYLrX--_{im#k1Jf${G4j+KrR!-zz>&)-GslS*9I1N`|1I2-M--DRJGbq`EjcLwvk3X?yH||kW=MFgIVeXxhEBDAfR(cRt zmitg1Fotk^3wp-da)Z#w&A{LcF04ZIHwY++otcn|wqIIBJ=*0q@X zO+4{Ve@m!Bq;6wK*4}IU#I?Bkh|^P_6paT`8&`{K^AY?W^`;!)x6)(Un%>*toqLwb z(MjS!jG|S)%exnIe;3yIkaO^=Ro}Dx2-oJ*r(|Uw@DDs?1KM6gyn*W@;q_5oeNt8A zfE9f$pmfri^e3IWwi@C4#xFe;3KB=B&IsSuWIw@X2pJdl^Du5`i0xp5jaMByR^!eB zL41e#b_2)~`cdJQMFj)~0uo9UwS!n$j7*#DYyL81Z}*{uu(vz4Il!}c5yx1wv4scL zfZRE?u2eRKjBZ4$6u7A#*id0<-KWJ(rdl~a12sHy*2(TL33v1OO}EK36yLoD{|09F zY-es5Zcdr;{z+OYm@lo7EUN-?0Pi2C4BeKyRLv5GAhv2jZ zd+4S;hPi4@?8ok8O2l>d3Qi)ZUqkhEqm3gIH5eCEybYuM=C;v3@bccOHK?n-FK%7J zw;>lTJBRW%d)ll^xSQK%T~dPRO@X=%q?#rgFPQr^K*d^C8rs-^FRINUzP#a;^^2|- zq}JNrw!3AXAx0UiyR@Re45cirzcaK!&djRnqHGnz7UM#pfT)O7tMQ2$+Kb;6M0f&$ z<^qnl0zDCV?R67MkdX!=|lY6uKrQjRM$W{wJ!hG4b+MSi^XBH!-g zI`oQpnGgJl@A!f0Kk#irS@-5|_FU^h*9kwXgZF3d&PZP~u$^C8tY9x5n=$>B!W{P2 zFBf#4PQ1HPt=w6mElnBc8ywo*RF}L=bv852U40iVVB@7&fp)8{8ix31WOfi3&;H`^ z)%1iuf3h?!r-(#_x7y#HxL@1oT)%@o9Nr{{YW%>=2WDz@WS6_k6${wnY_XtlH<#mo zckYW{;NLfoseg`U)aMycUSGY9%2@1R!(a#*tv=19onZ=^Za|{wMEY`dgi8)KK)|$Zx?4#8Y%jtI+U)|l*k78+rjjp#>k`=Ie zD{Yrf<||@Th7Q7-iAaYdt!qu_gCdwo`Qw$;MNmNk5t>B4iyNNY5)V>bap5^Eb2GJf zkT_b-FHSr8`(Ge%u+7zpmy4@(MXpc}2V%;r!zZzDIcoZ?F=P`5XCHiEocP!%eDRAX zG@`N3oH+w^9J{`0KmrV4=0)dAY-8&9d94#2XR*Bqf=+pC?nP~&lO#?<@6iU0NAXUT^0P(~=t;(bm})JsSFtS_ z!}j_TG}&C?p}JViO@jC=Dtm_y;SddV_c$K5P*@2y-cjULRS?UIO*F2qV9TmFX3);Eo?^S>=ieUa_hW0d5L9HHhR0It2&MSkzWg zR)v;HE`B&lRhz~WBZS$&EC=0%<(cx5!!uVnD*#~H$!`8KgI{7_&61#oz?=@OxoMdb z9JJIY`>+rN%pYfIvTtJZJWFKpXSn0>HvO*)8wM~Gv=UJZN^))&B+LwcG%oDH7DEp< z6S$qQ(@vWsqPK*xp_Q^z$fXXG``-z3Otv9nb^`6PHtP&f{ch7FF^zKEDThg-wHGxR z8b5PKr^=*q{T0E~H2+GIn7=GHGZo*}d+XyaT@_^=&; z%n(_KAQsiJYe`ycYdi|o->w{V^1-0IgH$WR} zY#>9V)GC74Lp+31FZIUM>rjI~D0&8E4+zSBDiW~c&tKtflZ2L;V2;SZKy|!Bk`!R|lmCwiQYn0P(vY-zv&c1*Tz_UQ1aR1;MY z_z1i5)a?;%;i!7BEMTh#jU;V`xfutfQQZrfE&^>8kOwq$+*-BOCt#@p0cg$Ahyx2O z)+`0h(E`X5OCT-g*d8s16Ij%d5y-t#Ctgo@Ntl&!GmqPTILg6;6*xeF>j(}d4;!Tp zlt=^ZL~!kdrAUH`5R{ExGbz#N~xB6X#e3xFn%N7HB7_d#I z<_=Hkf=#ng8gSxQBhcYS>fP9rNckPJmY)`VZ>Qg}6-U2)=s0lzJO+d)rff1Qy?$?L z5U43~%~B5|J_N0I6nh&$ihd`0-y7;jBcdm7+@aq2OG4V{^jwW=IPykN+H!g&2m2?8_alGcYJ^+IV)obP_d%;Z=D~KT zG1|^xsHwz$*!nc|rjddJO>>yhDBuBS*+)4G+n|kWD-m%(T-r+D-NF)lBU?S!gzzCk zI8tT-nOj5qb@pl&(A#_-XKbYH`5c>O3&UXx&!L5}wicdiYeAr>tp$M;y~E^;%yQGa zU&i9gNV|n&gFPpZI&7E27K|f;ce~LFe@kdmnSl*H)QVz-pDf*~`yefW^!xpo#T|rs zSc!4Za&$`lA<#t-T`Vpy)ikar*%O(|Ffzx_fC7AhCE_kTITTQaaZWf z3?rxL{2b(1G^Q_?c@er6#AtcBjXh)xOmKTjdrg5r^ zK}VUQZ7ANwnFzCfG5Bedl_JwPYpY&hQ_&^l-y3wmY@ZT3^%Uysd>-`->TenRF9uf_ zyoZ5EmKnyrh#=_G56-m>M$i$uJILW(RVRxI&7$pyI>vy6Puevuz_LWnPeEDK~`JL<#v!@l~9RhCn<2@LuSN+Wh!{-49*~onFEm# zdyHJOO-h>v&wfh(v&gfz{`J^nu>|h-fYaWAJo4z^>0#3y5S;S}5El$9f#PuDPZl5a^+%0Mqu?@m zs+*d23o)WK#BjtVA`7ZmS!!#5gaG$eSfKtiP9t?zIX$2zW-Gd4Rly@nmC*Vva^(JF$Y9x_N zJRwFBGe~a!QM@^h-*>|T<27hUEHJ|NeZ>Of6$^}4EHGZNz<9+1;}r{xS1d4IvB01` z#!FjZhzQVR8^LzT5xsu$DEknIcJJFTzfix5%9`i0#17yFT_A+5UuT)$U?4n|uv0X? zcsIE}-IdjcH{tt^d`H$7>dSZp6jA@N@!)kmkxqOCSwFx|K&u266t)(JzrAk!-5Oh{ z-{dR&8Unn+Amfx_@M^LTv3^W{v~5fezRZ{T9@to@-{NZsul^lcS$weVg}*CS7FtOh zDx0Y|Shs2jDY9yY5P4A(3oXgMqzS?=wEF+;K8QN&24qm&TBm(&uC~v9=MSRn{n8Km z#nyvFsqy#Ps6zc7-sz@B73%kqtG>n}uNYQf6fmvX``h7pOH3b1I^-U ztnPo=SgB!6A)8p=V_OQ*zG=;v=%TLkJ!v$d{)oXJGaznIf5L!oDwY!JzcBW*4BpA$ zQw#)c%N7zz)Mi!^>Q9-miGhUrugn-@AZm<1XH3*G|D7>Wj=hgD*^T-O#{QDQ|6m~7 zT7SjZ|71WVpgzba%A23$?O!v3o?sx`lpTy+Wsy%afV)~l!4)^!<1L*Ch!KP8 zM(n$9!JvBh0NGN*_N<{jN6J{r;K|7JGXq|~!$XFHjKT4Dc<>NzzhMl(U<(6y{5I59 z20XB%c*0O@64Z9a;P=~583wY`w}Y|U7|;>f22%vJi)pf|b~AQ6gFOsL_koNBm>%aS z38sGxvF|4WCV2dRXN$1yMRCh3#t463j1kmcLNJ-D?~O6SIH?hd8O**nGp|0P?NxGm zhhS3^@9&j0g1SS{0Q&xNO%c>S*5H=QW=05V0vRu2gP`^Kf}Rer3Bmkiel@-n=|ciR zKg2p?9NayK@SO0=GC{a?(`DOk5Yl9g(S`5a2H{X+`eB2xnfVe0Pwj}6Fv7oAj1XQi zLRfDM@rn_`D@F*Bmoh?72Z=I?vxL~)V1l3~ng2oNKg;0r3~pvk@FIo=>Rz@(UnJ^2 z1UNPSD#mG8sb0-KpkhH)=v8g+y?1#%bwtfI>{0uL9~R7Q%n;v;xuDt z7<`>ix`V+>TM(!RS>quFFKQ~F&a%jR+32@sBcL8;#n&+S1ZzLS*rN=Zw!WI6(SbnA zJU~6h+j9&y&^PWFznauzgFyg{abXZ(WyC1<@4z5nGb24xslSI70qQ))=-ajkcpnEh zfw^j01Z-vt0sgG{Ws8PMz(r2iMw5WYnRpnjK{Y|g7B9;rKwV(1y9qF@>2L8k+>n1W zMXXIM0hrgcT;yxeiu5FY?r$T2rIH-Q(yl`m66!NZ(N>JMm|*aWBjQ#BK27iQ=zQ9y z*)N>oS#bW+Iui^7Xw;0s&J0#uYZgwn$6(Z9<2xlEk*lFrk)KH8iguF~y0lK!;|Sq` z#7olzWPnD^G(eUZtO#Mx2s3LjS@xs2Z+qgbeWBTRwA(4xRt9~r;?pz86Ax9+A7^4ZVQy4VwJPkX;KLVRh!9c#cahnqFnAMhg~z_3yh5$Sd78}4SuvhrQ;cx>cG|$W;!rwhc7kQo?;$p{f282-f(T$pTYu_Hi2jX zCzs(25%#L}6!JSH2XlfP8wTxmnAg$FOWcr`##8yD)oKdnKa9ovjxZMY(`(i&pfYT! z6Z!ad4no+p!a1THdME1$UDyaE!{RZnQ#XV)_dAS#l7UbI5;;JE?civUtjca}J1z7k z4tGy$;(3fQYr+(Q*F+Mmk-`t<4fod(!i*OV zGfFs9EiOg?G;keIH7NA6Td<=P#!ijNVJ8|SC=qR}k181O$ z)QCY7XvA%rJ~NI~_&S2U84Q~1U2wPpX9IqEgyy74zuWJWSjtaInVx!Yy|3O6qv-8; zdQv`4cM5*y+uDO`0z79p>poh=v+!*#T8Z|w1lJ%J|V?ft&lGw>0s zUcVnM4to4PZ7}S2z-3A|Qqq1`XfT`hdq#}<0MdJL9R!TPg(3{%hx{QJqZ@FUGE{S;6YM!mtddMF2T z-j{rym}R#=Pn(y&%Z z@ITg)bjBmPhwG(=MqfXN_BP+7{9LQAS}ejFYT4+z(v?V2wEG7NOC%<=-@U2K6uW6w z5YtCrVQeg{hV1Cb)`x6!h#bSM6r37d!pTnINkq&MoMAL(n1bpE1WiJvLv7)VtsrluvTUQYk5YCVBEgdM2ch7l2WpCn|>&;mk;IxdxgRB zBx4Mg4^A@;!SWR;k@2Eke(7OFa-i4(h;Fn51^0FbJ))#+kg#oOilm} zSp#;$+zo5xJ>Y!ywS@=FJ=RX1D8ku5o@Tr^{MUv1w0StfPa`z`byuqgJfcbM zN?@5<)UHX@gyMNPSJ$L!HmKTBIOvv?YxdIW zM1kp}Le&uZpmI&B76FALRpZA36L7?YBRTLnf`5L3l!sqcnFU1yoq7Y>1D#UOOKyCm zStiWCANim(>JL$?Q`eO;i0ih(aziKwY72_ggOYI6B<{2_U3f}cSs(W-50^Zk#-XDl9B*>Ez|r7vk?=YZ%`Mai zfK%b-4U}!rAEX~}P`E)H$HLi8VE7$y!#mibSU3jKxPuS_?B1YI-57-i?b4Jj-Ylsp zTch53%GT3XPgAy+e4bFK_UEA|polN|Jbh59?RgE#)QZDcZ-8v>z zIf^*Y<0vRY2C49o`@A% z8d1Bkrn*2S@ogh5q-rj~i8hZSjohn`DK#i*tnDDvu+Bpppv{f>YSLh=qMOj*9-+ZK z=qYJ%FArSF6-Tb*ibGd&g<~kW4$3v{3<(WRqNk`SGb^F^Ebz9A} zQQ{+;kz#Rcp($!hLAU^98j4jdBB;KKSCOMa*U4C{qv25`T)@xemQBN6$pdfUwCJ`e zPeHS!Yz-PMTh1E<8*&p{fwV(zV%>w9!%5==dC&lzm5R52V3T!>3|@jd%5RmC+1GkB zI$K67Hzn@ye|s~QKrM0%`!vztgx;$P8*Y*N^+aA(nJc`4C_^_R^IL8t^G99=c|X!p zPX^{p*1A%|ai*G3@ns2Dz$Y98zkuKJB(9Tfw7te|U1G2~m46ahFA)6ZG4`6ukHKev zrtv9I{Dk~6p!M7F!zm>B!GEgU@5K3>p$+stW$!b)>=F1cAl;{Idjsv~Q=7D(9ivL@ zBz`V8fT7?3E_2Kf|MPQ0087Ctzz|yQDZ%MH57y!!qzx%k6eFPWIABS-<;O;FxYRMB z081|aDo*T2)=(eEd6PWMOBEN}#@VM&Lf(a335R+S@Z}<_YB?3sQ^yE)6nVJTv%EOu zU7UX+ZY9JvQ*7d0rl2HbSYX>;;$q-x;Nf9ojuLP`U0{MXfqqd&qr?`R_|{N$lu&PE z@FoUtW+42dFshe%dxgO=gB1jD^D&>brmv_LK(d?4Wk7FhYKg&m#KP~%g?@rgagCEN zHJ~vWg*trz=hk^PY_DkD_E_o})Ys>shzu{vai|&MJ^uCLm&8wC?8C@!hcPHXG3J0L zOJz9v{AxCICEz!~HN+XhiKh6P!yOcCa6oDS)}rn?lmcAH9Xv~*UJ{TI(WUscJvwd- zQ`lCRf^wK;wuE^`Iv`d*E5$T{I=~{1sDG9^7{CkEe-cYG21rA01so>0DJa`SqQr-> zq&;b;t^KgSVf*2`u%CX1V{SuB;Iw#6qt9G(skch6#LeqrLOmcYg)6*cfG>u4hquE= z3;^?uH^UuO(Xs-d>5&rn&{kUs+%-#ZHN^qr;EMTqY@c_Kg~4^l>x63b%TMFjI+Vxb zP##h*s7W1l#UX(I*|BfnM*e{AUx&$@`@oOz@6*TVVM*Kofm*!{89ES+Vp^|*KSDiC zU_?@T3k$suX>jL*Q+{Hg_I*stA_%(Zocd9C>3Kk$AqCwJJa+yRl+7m}o4RoE#MDK6 zsd^{ddpjGzx7whPloQ0+q?`&qgAeNJoxEk1l65HCP25$=%6Gh$Dv-$Vncpk;&BJFQ zoEbs<^gJBfjg$+s_@1GnkERXlLQdU2&J{6%h7Ft$g~Vm=phpMDW;{n4!K#o?(U}># zV~0nFKAtYc%8|O)DQkVBF={i!zWn3rV~$=WpbD}|z+nSA5IS%u3tb{$0{EG5BG474 zCWWHY=L$d6Ci!&JjFppedX6?*YNqu9+wqG(tkD6*F*Y1Ja8w&QyAG!hdFxa9+e%X{ z4P1=Ubc0EyGh_=Nju{wIH3BH)y6J?2Tz+~TpKJ$Y!)2Q>bOFB({8Bxj zBH>tzt7Z2i;TSjWdK+W503|*g=bGxPeheWoMF-YTFIEunkh29!_(3Re( zXF7jW=YxmG$|c=`#Pk8Q-R1W=a1c!DgYsmI-SEa;5e~6ERA_+ z9^99a7D=NY?j?HgM*ZlarQW!)1p3lA?2ooJMck^}#5O~{zcSX$58Di%D7no%U_A6G zJty=MTF*wC*czc5^Duo7@7C)Nz!wj61&|W;bpX0MeJ|Hpz#_hNnu2R#$JWwv{N$-Tt z)xrdrqu^IL>}@%u2H+RMoD`%k?5RoPDpW}Wg)eSI->2uXAZy% z0F!pnnLe@UrSp)TOicQq<}I7!oW>XvOfTT)K8FBKj`1xXI+1FDE8+x20an@hlU-Ms zT(ki!;!SXsGT5-^_qjQ0=p*O>VlvKa)(E~zDBqZ29%R!n51+wk<-z4THu5RYVobnQ zV}1ej;M*|*TSM>#ScF?2Tz>+?E%1$M`=}6x;YblMtUiV+a@1lx-3HCk>4UlDgAnl! z=2rA&c~al;GO6<>bsp|?;>u(;CB_mt!hSXhq&F$i4bUDWril-W)%VNe zs0qg4>}0@R9ht$`X_mZ}&Z|+OMX_XMvZi5t=~$Tj{4t?c;oT>WiGD1+`_M75{1Ga( zPAk=%Yl@VjmFl5Zid$Ipanjhhx_%tl*MA-X(12{MapMZ{K_scqG1#!CsS5imzM$SH zTOHl_QmsRkdZIqH{jGoe+E0BDqD#w--P~RB_qAgU>Am@AExNRJnnX_}3MWpmKHamA zUF?Pd1V00=kN*5JGk=M}$B_v~sB~K7io~9cE#}>Y+4Lz6RNj5-1e^ysMA1rAvq$Vcz&s1ls2Pf}s_7LJw%YNqx=^>E=5E;K< z$PFj-irZ1>Ej}0wT`F?06AA~7w9~l?h8oCrRJNOAK!N}_D$q-DsuQq1edtO3PSL4s zl;1q}R^$8k?%lt4VsFD!J4}}_#t?`xSlU0K%i9`*pX0nHD6fkDU7oK3gT$8(68t)_ zxL@7Dit7P3=;fo}Un1V0)mTYv?>RS4tH7`fYt=leUydXg8=jUX?^(& zFK6^YyB}sYH9vaka){Kw<%lHdZP%+-;oOqTG&`^eFRsO!s`6Wc@EJK#L!Ne&8|T24 zi_LmR&;}klq=}ejnCmjdY5d^7I zcoUw_fNXJsspKjZ_lo7?&GNZY`2>}CK~k?_#@89p3Q9JA6}7;kw$on3go7maBM3r6 zxjJukduQKItsWlhWvwi&q_r%|iegEYEL)DfRwUUngPmb^@9ggE&d#jP zy({f5cM=mhi9=DMK%o#Q%%&8QQ20>@X`mFGkkSW98v>-^(n27Vph9V&&{CH||NrmY zdF*;+I}d)Lzj}7>ch5cd+;h%7kMDK9^W6vA+fxR9ccy>mTfYCd4C9NusQ(fOoW$WC zwG6{C9J6F(_1Vmta&2WTIonxV&XH_H&e3cXXR8#Ok7wg1pRr4c`D8XJ;YcYp-+q*za_g>!fmB(^LJ$LkZ`(`nZGl8=lu5UcDZgZ?U>)0-HC8V zc9+xXbUEElkJIb)IsMb|>~3ek8FYr6P0p~h+1cW3ola!$TH0Oco840zSvAfW&kkkx z@~W_Rli_Tevu>Jd+j--rd1c#W!@0xBJa1(0cJ6exL{*`IQQY|zQO>WydT#`o%?Zpzg!=4jyZ2YZQkJIoHsfTpbp2K z6V6G*ywN%3Jc#oH&S~ci&L^C2an3o9BE?B3?>yn0N9dF@;aqesA@rd0r1KO~opvUj zr=4#?%o)dVzS(&Op@$svqA^mq#ql&gVg@l)SSaK@gi@DgR3Y!2uT)C7ik#1TGrX^3 zN30-vZazO<2oeu16{jxcr(N8PpUHc9x4@?(-W6va=Y&%zEKF7wmKisncjs_Ux%sPw zT#=6>pXAc=((>5S^0gTTugxI1#ME27ikh6sIYrk~#fe3);N(^cs*;;3D%V>%KI3@{ z?y&<0ri74=W&Xfo$y52#vR9mR4=h(om8_Ch-_I&$ z?TPt?tUbFhosG;c6sEKGLK(m-PGutiQ0X?*~@#yN_o;`OXA0~jpH;9w+Fz(bGMwT2;%7c(Gc1)$rSf z4YZ*(7g2BVt+M6VetH@?R6CGEWX@L4d5PKNY-+ZxmiF6MO{6fYonE`&j&eKvPABS_ zvmH+CMy%T9clqs87TP&JK9UIH`Ne6DyTG1VUI-$pFzE&9$^7I@hzwj{hGKcTRLD)1 z+#tqz?-T-iA@3k@zC5SfLb~cMy!It@Q)(jb0ZB+_wUrm=bJr?ruAtl??&jwgN(B|9 z7K-KEd||$#mV?w}rBup$h1^tmq*HYwkD&LlO5PDn=AJ4$kL4!{C0Fg^9lOYy&KJvp zT`H7S7vqwX3yZnQ$|CMW1w92(nL8LLm^?vS3$hA&&NMOC#jr9x=gL#XG6oT`5KAs7 zbr>0sbSw5M#@GC`yP^&vtTOY1kI}g{1ZN4P z<_|NpZh-W>Y=GSV6q0>qBjiy$F^Qw`v%`?fiWdwrtYB8nto<%y3V7YIpHF~UjW|&- zFHtAv#Bq+j3v@CY2Tn;yE3BN)a5Cv}o$+S!UPcub-GZA5XF@8K!S6}N04EeO?lMUA zQYPCe zAWP@JCJmVF3I{_@B@<2i$KoPf4nhGL$H4U^mca3Mz;babYH1E2(Tyv(Y$C1lbz7NTs zVRnG@tZaTcRvN9>tZB0>)j?y-TEfYI4YBp$Wi@3SjhN~p!|O&;JeLDH z&E>uo_k^!7%$4B{D4U^QoR{>-^zl>k{KY;w`(Km3v?;^vW&rneo5tW^A%?i zL@XDibGa*v`BHcXZ|+nk(Hraob(w(W1nprzO%_U}dS9{8(14s{itLjN^#uf6@+XN% z#7ajKw#2eQ8b7wuNgQqlpmj#cY%*GB6REN~k7O$kXTmX7pFWfT+{{!(6|NSPo^2_GEM6Njbq$1@KZD=7ppWsF>-KTJJ7bn9!Jvv!b33w=ADs}GRD zwR7S2qfT7gh_HePr_U>lQ|X!;tb!f_mbtP)C8YbuM`G%m zky1TF@GQYw2&9|Y!!4sr-cvf1Eq@({OvFSqVxc=3x28L7M4V)zBLI!=>~L%+0@=-W zfE0leBTn2&2#QQPDWXWH%}L`NciPd(ok6Ned2T(2ez}!FZjbq2)Qio={0N8C4NU)Pw0u_5f!&0hlI8tcOkxs9 z6&+Y83BBDIis2PJ#$Oyo!5mehkcin;*Y! zT1GWdO@iGta%mqpIM)t_H$F(L+W|&5DWOg$ISsbA4>}Hh^k=kNPVJ z7QKGlY1?G10;88>4)f>(8tHV}<{~#uH{f7^|zM zV{S5P<|+sbnmgi~@3XE%P^1-@ql%0#Fn-isC>6nkmMYgkN!q|R<}h97F>Qg=C|?BT zQp>R8hbySN4R{;WZL5fiGxX8xz%Vn){ z5oKOo-?&6p--W{^muQ%2{821To5KuQX>j}<;QJw_nkoDZ;G8hKaNT9Dw8&765!4th zYxoA@8lVx-0-uw}8kfGK&On5!jaddk#XrfqX;d70S&4#%8zpjXH4xfcP(G$J59$lL zJnQW!e-=n%)_@*?VX|)st4~VJqPP+{_!-E=Rol1R_xRvAXMslU15snmaX+yNio)k` zFyC3`Ti%JQmYi!zDr+QNYmUq<73P-m+8|#`?nF;uE^!Y>+Bc`70|t5D8x|1L*|l}> z+G`!m8}+jtV=n1_+3R#-zBwpp6nVy(r{rf6ZPoMfx`-}kyK(e5i5s?)L>mCTJHR{d zpqe;@PU;=@6f`QeUXYTcW3AdC1%0)CO$b2EBa%;Sig+nK9zxl_ zGkfGTf*Exz6M&-$>_XXD=@NlEGY-^vR%Yl*m(0TOAtt6`ki*}d$%9wsaF(GW*y9#4 z*T{f+j?br;3QSuNT~L(u z<7K&YG1;z;5xMXQRb|3tso){^iON#T^h!e8SaugGZUGZ&A?VJ#la;bpoL;Ofx_PDY z%Ola|v}DO%?rOdS&SQT1pxVKTtwnIcM1nWX-gwpRtKu<+4)G%UN+9J!?HL7r8q}qt9 z%Q1SYCe*v}p!yDgApI1v?CJc1x2Ouz%IXN?ZK%tFzSF19=gwbv;(YGxg;S3|lso;z zljE1v-^2Zo?G}y4x+c)wf;6Fc62sfBvDm*D-lb{_gKH5l58f>YLpvAz}COxci+rT%y2) zncA9&Bz7ktm*A4wH-0ukCvgZOZm@jmS7-U~&h47jyIh!<%+CkrLE-i$nInHZH@ctt z1Yw}So0Hd21CXA`rcu0=fl!MQ!yp_(ivoXnk=cY}t^yl{g6%D4IFghZ5hRN))Nch< zoD3ob8RiQ`Ok9o=_1y$A32lhCL2qMZJox01Je~?Lqf;Aga+K-L> z`#4;Ro(85s!c0VEcnI~;ulyBLAK|7<^&GOi%`hSV{--$B(Vk{`YiSP$Adl00qyytJ z#M`E(9h_=tL({VzFwUaC2K}i;NlkDsCg-?NsDxwV{xfL%NG-W)fEt<5(VDdsHQ%eC zP37Z`UA32vfC3h*xJj)FpgxrZ(q zv_)t|`?wH^z`P>p=&0-Z z^gK>N5Y#&u(I@?=QB(X{zUaNFunYyNax`js0cnD6rvSFDvV5vkx?C(fm20kgCv%Z; z_b#T0OqGgIPEV`+M34Z^hNd4vFsXq0c7pd1$OwHeLpKTD2LK^ZMldJ2A}1A?<*cxM zWEeNX_ljuaFK~oB7x*h6^Mo0(f!u-Ksa*x80171PkJ&wdJ$6qEqHm1C5YZD4^2aj| z8HJqkvd9;5TY_?LGunw1hw$VYq}V8>ffT)DZ1|N5z-`3s7d_DPdH1C@3;+g3y(Mogf^0__NgOQH zeJ7g1F<&6VX8}UFB$6;DuR4oG6R0PzTkqXcwFNO7zRk(Y!t91fl z{|J~lNW^6<_F$UD{dlc=)o@?*dYq&m(^J}mY+LK|+GqPk0#7uq2jm(HL~?f!B4=vA zsHSj@w?G}*UNB~pe)PEE4I#7%6F5F?QLAvnuBK7Dc1qcPN=w9GaMQJ6BAzP?mkr#7 zWDUfzxwb{rxE;_{B$yvt#_0s=ig}x7xB4-z;NP%jw>h0ykB`lg0@z=&*rf_>SVl1rm|3=WO~a%{VzeN zk9pAC0^?l5br5s%ShOmyY|XfflamG4B`>`MmKqA~U87@DnX~6Fj*PBp5E7nV9HRVd zZ4k67RO>+CP%O+^b#(%8fM8H!3eevMVuNylOm}PZtU(t-k134b-)x||92dz$)v+>p z`P7B+bK?&m%bdY#dO=~$FQaE&MzV%#1PiK@5XDC!04Ows2mZg50mC4od3h3QdPwV~ z<&0aLFP8GEw7g%Qbi#bx(UFX1?gbThp*EVI6zMy{mQ{kzQ=%v-70b{QaB)T0Z!Qm3 z78P1GNEU=`!Qz)j?@fl~2bkwSrEvQ#j-(Mua(sYI?wo7*ii zSCBFw?hix!?}LcXzaE5o5TCHOAT|;5~@s z7r1PIn2EsU_28}K>(x#JGjeuoM)Y^P#fUCgo zP-{TQ2wB0TW(CzMs@Z5UwiaJN(Aaha1dR=Ayw2+5K=IIA0F^ZmnI(t}3t&*KX-w3D z&h+fjT2aCE^+_OY-9Tnfv6)%?%dx%Yt~SE4KgPQM6M)-D&YV`F+7mwb_LB35k!}mi;{*afd}ksJ zUa?m(eiBFH*M`tZ94=+s2CvvlNk$e+`ea$e#Bxcy(;>?xU_iKBQi#)#@OiK&*@Q!l zR=>IzS?IMAPWrRsq5}95g7T9dZ`e8hTVrXK~g{5YWP$5qA= zh7DJ^J8M?P03&Gw*N~Cgpkt)%jXqB^mzbXdjB6&d+P?hxX`|Y)^kFiSUh=x77JRb| zK4+TD<9EUeC*^lShVS${DG90%`yF0-HVxYxSmkU6leL*Fn6Q$-y&YGsB3%d4{S~A< zESG?xJxZIMS{GPZFq73TJ~`WiWtZOBzCq*NhTkEmA?36IHFOa*FrOx~d*!Mumvn0O z^U77Qg9B*oHZ0?GF!gm4Z!?H@9t4wj&>M0(xaz_-(By+|zs@q zp^NDDHq{qiSPpEBy5HLUblrxloq0*F!(O|GwwfJwx;CLC*4pVFG^#zQJ6KV_XVpaR zpYpcWwlQ4o^?RM3O@?;|pJ6W4ozB@v^SKwyT1KZSBgcB!@2%bG;7PEy+Y#QeYP^SZ zs;}DbCH+3B!%lD4>~6mw@GhtK2K#Rh?)BDSeyZObHd4*2VQ;VBkKP*acGd32vCjuv z?%SKO0#w_NJVsH=L4VNc1AE+$HyQHLa=8Qk0N_E?IC9fg&*QwOy2;-ZqU8Q5TEo?^ zx_GtOs1EzXwL|F7&F?Z{S1};Hx0(IH-jKM#HF3idH?$^hr@zVHyon=qk3Y;%b=c{L z;Vl=}hWdxVXrpW(qeuJ zqLrCD;%s`xu4bxtR<}FD&qS*`synN@s=KRqdH1ou(dv8rw6l3GhO$299rgDx=BBmu zm;RnJ#!c(lKS9rKsg5A+PJavDVXwcJdiI+!^-h29?EM^Ljc`Bu2XA^a;>IwdxB4S` z^y7)HxoGX!s)0Kh+}YN0M-$L~l2P^%-r5XPK8log)Kg*R83;;JYQ1!{UPLMX zE~g(e@Gd`zs~4Ou)>23_dj7cK@7483Xs4vx>1S?6RiD4xxf9RpP#0^i{s zlcWBA9@7xYtB3qUSk{(!j@J4WTCfrC?8TDu-4M;g7zoLy{X?}gFhk$B3awu4Az;t? z{5^!xdKM|Wun5h5?(#=h?fNyI)OY)x{o&pHa#yZ#_eAYsSXAyqdW} zz?~Sa7W(MVoP*MS=ll`qDFyEX16ydX{KU<; z`he3_9TQatV(Vv%GxISVk9*_J7+UHH=N{+q^T4Ox`4>PNfIkLkiwY+59`7i6{vIqq z3^@1Ru&R5O9^=e>_KeKSgZ{y1Y>c%x5o_T5Sa^P%XP-0`cOdG%8Dn(JIpU8w_iZ-9 zw~b%Ph7!cl@w39PHJDE@8A8N``8(*LNO}5vgm%m zK0q{?$J0TqB<7@z{MRtI`;bDI+nsvh{{_4V7XR&c;`rbU;H%vKx1#16nLE)Hdpj9EeS;uzYSJ8J8PUq`6*ZS^~@m7uP-yl7|B8bfR_R(5IS zehG?Ln0RMsXq=f+m3dhihUvw{LIFXMr&i*llQasyCx~#b0j&cZZyCCwyI7sA0Bbsw zLq=vSYa>n2QKguOc~ln>^#}~dr0h3h<*0#Stvsg6RxkX4IwjWbjIGIMrWn}fy})bd9MtN=@;kASgXg1 zO>em{jkWiy1$|FP-dnDe_m^pFQ^M8e%$krm!^R4u@Mjp^MJh$$~6Dxxo+@Y>NF!O<#S{d5q?sCO0 zwm4NRC|phF`0q}C`cwS<`3dy_WTSqWfI|Js4r*+%!VNPntoToQ9KX#B@GeUQcjf%W z$$YuYTN>%3QK+!h7D)TBdX9$;lliaw=5xN5r$loXn}pSN@6&2)9HL!EX`0sSUV9xv8DA>qk8)Y`;b4LNr?n)BoLsX6 zG~AqJZloIz630vV`3Wcgz)Q&Lzas-SirIo?cnCWaa{nlu*(L+-Y zKR6=2U-eKX zR1wV%#0{l#5TP}j7j&1(ipzK=2<4 zssKS^rcheQEvl0GPmJp(pe9-j_)#jFRBdL$aJ!4{7;J5+EClun)*EA3GGBy>nlm`m z1B(es5TC-s-jT!D4nw6p_Ib?}mP1u`6!$7Bh~>q)8E03}BsrHLDn{p`9){H#jCWzL zrk-Z*_wlwppI-_hlZ9d_NQdU>lD+yElW-ACG|#X@E0xrXe4;(v0Wei5p=pD}e15Kg z_H%>iHAO9?D9&#%WrX(5K}?JuUDd%T@F-IK00s|N3$THi#a=J*4~5l3ve zqvLbD(NFMJfWQR8C}78sY)_IU+y{A2I_NJLqP{q=CTLAJAU2LL1$rUvb5-M_Y|!DF_?C+nx7!}IO}T8%5)CQiNKnkkUqhVNxe}Ih+c*{ zk%Bfje;lDHOmO$h2pF(G(tjOT34rmRm55ri+Rx+tg9gsb4?d3$`HvDv{tSrd>Dm5CWy>M5Q^&1 zNyA<6k`7ja-)?$(Qy$DN)JHsQ0mMqI_1-2zC5thi!Tlbmt(gi|)(I$NEI)D_qWzUUP}=p1(yj+eyA+gmeO@1}rLi&$fsvwO zsIp7FHAR)(l|4`6fEuS?7I&b^r_v4U3GEPe2Wo>{9iV}c&n<%;<13vFq8buKOJR4q zq5d-(Sa_COf~m+7uJlV1tQW2nGRF@c%q$d_xRvi^{6E=6A$@%a2)mfp9P`?hV)JF< z)J80C2ar&G(cth#Lq?;IkPbID%{Gjx!R-7N>VfHp_cGLP1FYQJApGa1!99^WPQu!E zae=1G`YrQi=K#0`$v5PnRlt0PH17UWW+voG=+RMLQdAJq24Vv{t*K?%ZmKO?gt|4U z)=efQWp7f!kPzK77DTxV+tp<7+nI)hEhKc@*gewLf{SN)FT*|~GJ?H2C1SN55rG8* zOIh6%oZH1T!hs8rC651wM_}t8?lQx|+cr#3q8%awq8%cgeK_>;y>xTwA z&n8Fq&E>iJmJy6Tm=9`W0cplHB0{r*m+|nX`VhcU$mWfT4);4Xumr~)hpk|VZ5WQ2 z3*avRVR{EuBTaz0%7*37%3d#0i$DzpyaCl4mrHH+ldwL4Y;)bxI0FjtSR)lQ26I?+ zVG6{>9NVKIa02r=G6a!Uo{3izUUD`ft8`q?!+s3zj=(MmoQDkVlc=>JMH(0=;w5YC zu>MF;$APrbn`Zg_UPo9yjDk^zRJ9E-U2R86o$9v%`7YgjmrVv1Ctzw!l^jaJreIw4 zIq@42C}$(pPAoR0{I)5}Pm3bA-EZ5BP24_|m)Oc311=QvG-;J?zk9YHSSfPTQjhxG zwE?KJqgb>6KJ?pB`|eQj84(3|{R;I4mP~Fo_+EY&Q$ulAH(*0rsEaxstXcZ$mOPSc z@&GPdlLu0_ zrUkYO><~!1o30yiJ&wE~q_&)Hxr2ofgnMzn??wdlC+=f^W3|Wcfx3K)2YaA;Ydif~ z1qm#^E)KP+$6!~|GD=7Imjq#$w5YnUOIAyrns8jD{b*P z#N9zNM=T~m&53nz*mto-@NO?Rs^1ivNoHVD4~?N1)hBCr>N-eHp!$90ih)`e+Rps|Zs ztaArE>>!vHr*X7$Y*ihu|>5y#z-9g6NEUH8-PA;QV=YmAjE< zrXhABGkv0M`pc9KBgtUNi=A#;ME)Z6!*g zp=1|lB&_tsh^IwNicDa`tomQfD!PdKM}sz4;ZvZdT0#9D%iBxv*90#SEE0&dlL(b} z@bXsxf*xIPuB9=8w$Num4)3bknN(=j8`&f@3ej7%_pxzO)Mw>jPJ|Wz_cG=C2uOut znktO-zhT4(0bf+}=M8~EER)89xMmi{gmVe3nS+6K?O>CO*J2sOC~O1LJGx0?Ar3qS z$z9aVIY|ENC@D7|lu+I%jzodk;y(Vip)27JJbSg&?F|drK^V>pftcx8ok!bFlNUQ|Q-dVOQ7!73 zgg9P6zynglI%|a1&s7kOrJdNE2=*CF^AHdhj3t5Mux(E!A5`(jjVr@oFu64ws{2(7 zA)+;euwx}66RN)JD5``_5{dU1ic&Ph;7%(KX00+($g`+X_OkFsBKBn&M+RnH8_RC| zCAMt>DGUfgG?-f1w$YonxS?oMpJIle1~@woW!zb@G*JJJkupzAwFia4%>J7h^$fwY z1eDJdIV5d4ah@YE4K)Y6-^>@F{&A#2EtPDtE%)N70@Ayb%@d|!rru_3P7NiJiKoQy zVG_}e9|f9|IKCcM73YwTSXG4Y`?^)d>sA%7TUESnRq?u2#p_lTuUl2TZdEY|bQ`}knf?#)pJn{75j2^pIb6P%Y#*xvgNjEtVEFF; znk*`0*$En`->4V7rY52(`y6h42P@IApE&Z>RpW2hfI|HTz5-hpuRuLm-5y)k9@e($ z_qVj^q1X5_-vGM_^_zT~uhDYi2V36t>tZ>fmBCuK{56|RXc6{R%?du=e2F#F^R?S& z$l_^zWKhmpqjr^n&e`^fue~4&-LGEIFEtDLN(?2m7XR`fbGjJ_0KTUNZ)(TS&+v*H>X5A!;?P@)9M} zAF^)y32qVm7XnU6^+yC(2+9P1Oz;tccM<#yfuLns8~Sx#dISpuvb0oX=)V%kB9o{{ zdUz?yls{oemWD)0@nv5ADZ!r+h}P>SL$b{D=M4P?!T%tT^{W5L&|eZzyQg>EiB{%A zy!_t;PZQin@K*%?hd}gvA7dy<@KXeTL+~O2tzQ(@B@KmTM(xHz-QW$Imr!*ZVz8j1 z48;iI1W=M7KLV_Bh8w{@%E#Jx3AeJyfS2%*jhq?kKQs z64loZhITOfPY`^ZU?<~bChTHpH^E&5r0YPopJyUbgUbA%{_5L^U^z1Wzq2pc`il7A zbpwOHD+UH?gd=z>M(8)jz+ePur7l@)WDt3TsQtL{3U&orM=zM^0J9LJ-_PQ2XI8LWP3!*Z93Om?;K%vkn;3eGpvhZHp^r0elu3lk9_Qr~ z1fn<-Ae=Rqg5OB$!DKCq{IIRXBEOXp8`r;uMgENp??|Qo4zBX6H=~Wd>Q(;lV&kq^ z<=@D@0jyc`YZI`EWKVq>9n!kYe~FQIAreZ2Ct0P}vdph8@TtRis-@?f-G2@@o}*y3 z!RkKmH4G8?1~dv4ejz{rhDWk#O1t=&OQ@eij5bQN#iGK7;gh)kfXC5e9vwp4+T*Bi z&C>X)zIPX<%y=>evoRQDUAM3UJq8;M8{ZE3fSe7rg!>712~M?=WjdKoRpWqg6yl|6 z&(TNYWEv7n2qu9rPK3F%m>&C4T(^`rWnXO69J#iO>69@G|ETyP4DN{wsp$C2osMb? zDf0H%kJ1j?p@Ledz&Hy2b@6qOk0M^{IT||~G&3u6agldL=|CnjNSzzM^w5ResS6LC z3er0K*tzkCPF>*llxUKy)fWwI0bF7<4T5$sFVUVfr#>+OHa$q4T6$ULH?h|ba0m_| zU|2MUrS&_l?z<2kHg}psmf$$zN&LQWVv-sk{$xzxEPc+5^aqCcWg-}O!e$00*!Hwd zr`hm05TTnG95g4j;SdytZ!p}1%?(V`jaeF!q8I5~7H62Cqlc1k3kH|po4|}Z@d3Dk z@e>#`DGw&4atDTaxC5$;jjsA$TjL&l(XP9fM%g@Qf|`OY4?{7(Eeyr|^r|(5F&XC6 zjymFNHh?ftg;_gPI@0%T<4qN~rel~t#x1IXFztSe;g1lAmgpS_VK~^1>tPUxhDr!c z9R8T-n@aCE@Zk$%k6e68vJ}k~{R?@Aj%e~jMG}no zuum86ogaJhQvE4BF@`5cI!$l5cDc*f1+do%{ACD1EaNd)Pa=j$2od>kbwc&XV{Fvbc_2cwpgR4P`U<+;0R0MKzhUdbwJqU z!mY(DR_VkM2!;)OyTOqOcN4%I3kF(rMk4Mbu-Wwaga?aA8f#t0{%Keb+e`c$5o}4t z$-<^zgI| zYA-CDx1sc;d>`%%JioW(2L}QuXE3Y_@?VoXeD*bRMar-&q*{}I`9x9C5%gQ)4S-m!jq8qbQpjKG@55(=tLt@x{!C(pH(r|q&Wcx{B2MYu0}6vl~gse0Xn&GJnzTI+&w zqIlK9XzoThd^>GL4Et+%fqVgIaf3T;Vwv58w&#rOrG2-YVc{)j*m%nsR^D=k(+Hg5 z6z2x#SR05bo|KPs^{o929r=P@PWasb^&Rru#>5ZN!8Z|xKPrR?O>Y!jp)6BRq*LiHHdF zD@=F?0CtBiz=xru#Uqh|NRYI2L!k;=hZ-yisEC*n9}cYL$-r8g)UuZ>Anag{Y@?=l zlBG-Y!-2hYC?D9%1WRKK5iA{=U>v~G7(-(dS$wI`xh8N%OEOYZ4hwXF3=ye_@H(eZ z#~0b`7jWBvL2^uX@(oyhcEa2Z)8jqheD<}#gXSJ9u5CGaGf@fMEKc) z`d^2on16%XZ9)+ZYBw8crFIRfCKS)Z{e+12c=Peh^N~1vyuj3omZG{0O>$&L6N$U67G~FKb+T=SpE~F zOG&zu^>luRv|2#zku{@l72y__Y+dSP*7bCsc9qqdyqaY=|32z2Zdh$45>Em zwm$b1o@k87rf;~Y@q&>a!>DmfkC zXs}aAc%6vm7OH)~sc^;y%GU4q(@{4lTt7bVfIXVP@Y~@h7jA~?6bpMg>Q@kAfZgj9 zsuQhHr(K$|#TzL#Wvf58ma=uVJf|t!tClAes?F5{w$!a_RR`hjED0B4gPQ&g zGS?x0sCmVlxwBPhX+-VzBA~J0HrIHlOghoVzM`R{de2dv#>QH9B@OC(n3tf<_5Nzm zV9cW1(BLki!Cj~+X>d1pPsthkr{s(sRC0z3H#zsqIqeJx4Nigv<4K{xgKKHDaVeoGYEv^f3uGEfR81nN)p}Is-Qr8Fq2b)=L0s<=`PW^yc;GD@ zFux7X0=Y6<>`C%B@VwC)LK*dE!g|7 zX?zM4KOsj3w0;{7Y$=ihUQXqDCpOy*tfTiSd!N~155W%s={{xKm(hNf+Mxa9Hpuv% z#^G}L5(*CBGRF+@KR*S8VJVm#V@At8HF$;RE?Mkqv>|1RVgytkyCq4t{MZn7iaI9L zU&*D@_Cowh2M#V>l z@{vMS3EoQZHUi-vg;8DP}8k1q@(fiB{bZ$NHN3jK3w>_4+j^{sQ6{&v}@F80fD>lI;+4D3>gv1}! zen}iM>^^{$t-}sFQFMccZQClt4&nE$p*{f%3PvJM2=ABjDjb5lSjok>(_WQTv0bIlLE>gNzt|@1+E$?IKkrl@)o8X zzG3b23gR)offBEBprc-X8r#yLPaa8y!yUb~X36Z!Cq}Gr_>;(lzq)$=GLlcf|NHp+ z*%NetBp!c8(t2jxVqQlGM0Wc&ju|Q+;iv~Y`9uSqW!yIb1RZoL{W$#ZJSc9Cg3bq@ zxNrvQ=hIJ&U%Yf`{1UzktrC$FosY8cvqYi+q zgSdX^;+t=94|oAi6VL)s^>ucEbl_;ik?I1W3ENtHCc8fzwsHNcyI$rT^!)IEYpO5! zF+gAr%qtS6q&St}CqN=qqr46AX~tu&%PP{^`(X1ilS2mt??>Jpevi}5)q-#{ew{Q! z=?oIpjqUZ_I9t==0HW3zrba(BHtI_`B8>FHwL}-*s24S~)LYl5!1$?e?nmC5Xl^zn z&mQz)Z+T1OewZg*;Il-z+qee|h&uI73rU4yw4Ns%0l;%?e;D77ckA~1YCTwuF(FXu zb05CLr?2Hag|S%cbU@@+t=TT|WUUz!ltFO^<@aH2F4^+7;T`m3OSvs+6D?^`Bi_M? z=m&n*Iol2dIOYuJ9vv7Jz4#*302~JmfG)u$1Duxh^1MDh%BwR8gW>lYxWJ;N0}2jc z=sB~s_R!MtvIJvkmV!5trtwfcy~M-u06yysF9xwmxZQ-mx5J25Zvm($lf}I-TRzg@ zPN1n=OP5D1d|{j`b=fwpd3%Zu*%Z}yTGpZDpkl9)MWZ2?a|WFAiM zGxyol^I@e^3^X`GZuJ37Mp*`aBoK||ZT1Wn1xX?>Nrko1qyZ=-WNqk504ie{#C7I} z!~;BUm_imjYbNz_PIQpW7kGUg0wSQk&Z1{7!jsgf5PiyGc9vyxqzXu>2yEc=97uqbG|H(N4 z*Ry$s;4T6p*bI(q5W{B1v;nyEMYBVG2w8lZ9U>GJ{&u&c(G_DG0#PTbLM~NkEStEM zGX-x4frx9y&tsJs_`;+z(~041lJ+`rgz)+Z;Umlre6l0dh7jEBa3K(G&!TjD2G>QT zO;PZx_>2|#QZyvE-Z0K?#3f*D&@S=qJ{%FD1I%G?q~LIqEq93D!2yJKQV_MUkVYjY z(zW5r!om;Kr?^T(nb-Ne;l-RTt;hwQO8IGsLE!X=>45#>q&eyW7Sy`uBI<&dz$dZr z(JOL-(-B;yoE~7nc8G4EP9dI*I-=sj7YIPzfg3xVzB&2M?3D`;m`rqfzvdnr?VLs% zbC`-A@;pbm?4+3d;4_~Ng-h(%Etk6@4#alx?ivGo*2WO5J*c=XD42uw%3x*~}Zw}w6 zwuTDFFuWjQ467f*BZ8#B`8G#$bmCBM=@8_)L%C&rS{l<=JdNplWBMMhbma0_HYFAk zIgb5o5=d`Mg6r7QATdFFSghoKlS7^+5C(hz?u2e&3*43T*(*bn_;Adk*Hm=XA861E zp-kb`XHJBH7f%R%2(Nzfgb<4Gs&+z*eT2HLQAIWSnsTJ5qPnPx;<^>Rl+;(NZk@#K zTfYbZG$89~T&+TwI8J?%VBMUi{_8LKf{;DAnl`-trJB1V1=&o0Y}-5j=*=JdK?oz- zqJYllrtgv87f;kh_D0cKIuRNp)JBw3qGe(S>uo%Dvx=Rtdf<1twGCcFXjlIfH{r#U zZiieUBiYz|-krML3{j{Q7eJwcIFoafrO1b4-k4ifxaulP>A6S3=iubV}|Pg4#aQx}yB z*HbCBh$cz={P<2&eVn-^;21~kMuulw40*QU`W5cj>KPW<0O+mf8SxH)OJ}tf1_prA zEjM}NeGMR&EKYqV;_xnl+df>sj~@N!j5pOU@N`x;#rN@c7ax)t!$wj63YYMM56;x9 zRN#w}6EoX452vfe1y$iU{ovbibOHCYU)wmFs#KhiwuZAV`EDDM$j9PD2@piJdnuW6 z$C$l+{@i2QwPT!@UHAaji*^136TjAiIzBoAt_r|g&OztS#wy?~xrfb*6 zm?WpWK_vA`hXQ)W4f_9zxG(L!@V2sqx{E1V8Zhb=@%Sl*~iO^dFiwuWVS;-J z&Jp|`ftc-{VqZ|%E801&Xb_%}{Enamy)BSqt!0eL(4eLP4zHnvSu7PE;6!xG0QD7Q zp6P@VfAU4R1hj!VtmANHo8X$m+Y9HRajLT5`9uw%jAxh}@&uDDGA%HXqN%P3EyniTPwUDgH<)HQ$l#5P!6k zp6|?dir*}CWxFJQceY3Fz1d#eW2L_N{%rsJKz2aV#Y=64`m^Bc1pB`j6iG`~4}oA^6QTjp=i-Y))hDKmdZ_Kx|j*{$;2S-Nw6TXq}#UD@qU zx6|YFI(<&RGvEwP$Fn<}A!pbbaW*&`olVYW=eFrY_O8Of?9STgigChtW^;BIp9;G+ z7|xbC>zb)Ho5nTs!XaFno!gzvvqtuA=MHBp?z^35oSn|-vu5@l=k?Am=Wh7M9x&OL?R**&v+XZOvHU$L|IB7Dr*gYZ3t{dm3)&wHJHc;0u#%HEHsapzt<-HWFO z@U-8#4^Q{u=>VSYcOJmg1BD^vJc#E5&OtmMl;=avA?Iai&C8se^K$1wwBfLG#5szP zmpjLthj4$;Iqsam{fP4h=alm(QXF;i&g0H$_>MW}owLq4_#SeeaGpe}8t1hL zIpH|Y>zt?Id)P708l#09oKq8{W)M?_g+ktgFLiE474pvMN~MIS$mzT{!+0G!Y6a0# z^ZDsQka&2hICU;R?IJRMBJbth0&_>a3(h?538zq4n5-;ZX4rh*ox?rl<}VgITb*(*-EdoEW>m5cZ7xm27h?m4IO#qvGJ7hSJ1e-E3Owa?El zWbN67>1;YY=F&Wz`MHbP zj>lE8Q1o;7?-$SdX*cD+5(mIbTWh%h;?9dLZ( znJrb*GiJ@2<(n&}vczxG|D<6W7qGg|8rRK<(MVub+#s@mfeKyb$OfZo`IckOS?Yct{ky?{ zYi}^7tg2ZaK}-ZO6Ki9hGRkQa9cUV_HOkg&kO!j?ow&g&Z*b%kof`zOqtPJ3UJN2m zanj2sa%g`6(>{n6y~4a3bX1xXVAKxm;{0@Ay4i%P=t;5m{gcd`V~O{mbaw!bVZ}_# z9554RAFcs2X2vYV@jv-h#`1UL8~CX$@TuG3Om;Fb{Pp7Ag-iPCgYbBUpSohcYnN%T zw|vvL{Fop0lYZQf%vzr9+gD6K;inw)YDcy6DdRrl)y5g)HHH`QEidYKVs=_p%Zqri zT71RuJ2o2VM{6#kUhi9F%d!3RG)ky;p@hhst={YwsAtZ0IkBs;YLDOJcTQQDvhj)0L=ev}PIC?h_RQslAfgJBUXY&5PtJq@!UeP_ zmZwXF++@iOVuS@wA+Q(n4ie|fbGkodxHy^4VW?8)^Bw?)3|L2baXxpcqUH+94dQNo zexX!QL298`&dnF*E9!EPnyi#cd9RS0Dvx%nZj=%9KUT>*0^QtWW#_T{`9jH6V~nwj zZ0UTl9N48oS+UoHtMrtegFs#wNE zB4lFC1*Pss!Pk2gK?dg7{It8Q#^G0c7+_ZxJaso8xh8|C^p@JofZcTVF*JpjE;-T( z*?6M^!lj+H%9qrBNyd4aRz2pFPqVD1X06`*`CJf>`DJEMZSaE(Zc8?|BM= zUAZ$t4TVcpJ%$EI9 z=jdNEx8k`RU|25qU5L{-*j(PYZZjlVTPxTC&K^!FcPkuE&1XAEfPclwWkZx#2^CF!c%9Ff7FsT24-r~?x!i@td?}2W$Cb`Jx^@{m0>V5^O*OoR|L4*rR*Zx{0UfTo?(bYy1tmSo@ zAd_HXG^Sq9{5d*rpp&N~gUv!(3YO88fo1Ps!6nNQ0~^DzGVG>dwGnuf2?@3v!`kK8 zP6P~km+RJ~pw z%TfOX3cufvI~^O06~Os&%wZV=04m*1$6Vx^>1Mn^C+$bPA#Zrf4qHiR-#HPOyUr2z zf|guVZt-Ga)!sF>YkB11GiM$@b11Xj%{UccqjJT|fDU)Pn;=^h9ZsMm{MR%pCI{x5 zHO$o&(=j&~HFE{12VEZV&F{A^L_o}1fjOo~fdUsq-Gx#S)L^M{2{@($^ji+gcOJ|5 zx}^q@CNP&;rl7Dyslz2Ur82+p;E%Ct;)WR3?2xg1S97K`R#^+v)yVgqcn>uvub}2} zQ)M_o$Hk&kZ@wbnThnr-5LUz0(&UriKgr#@!a^;-YNC?S&zXwKpu9qb0By1CWQt{s z$5`g<;=)2jfyF1BV@GB3s&FW!GjYgLIv2g8BVaB)rMOfyh1CZi7>V<)W_?{LDTqv$ zD(68xyG~IBk=aVItooTS`S8+YVS&85MnVaWk1z|#{GwM8uiC_vR(UGW73Zc(`Dw^6 zD&#%#W$?x2!lj(FDX=H=3qjX0*DWX(`mj6ZglUPPT2*LE<$)(_wkGR-j4_#=myHaF5|rL8W|5PjeuV`d6}L%1i*9z6G$iEk{o zFf)zW(>7DuCOIty6$NC=|D4v8ZeHtuk)%Gp-oj$12Spbv=uw>YF z#3xpOUziUU%bjJpuE`T(zAf z`T&650rYt*CAp#Nq~2;zLC8_-2VzM&)`|_JFi;!RC;|9ABIU%U0G&qD6G2oanXt>t zBjV3&pWd^5I!c-73nV$V`V(gpg5b=`x2U{Okg;D`hMD-Si zMW+;qxgffr$a=@i^5|mGT^uKnVGi{y6DCUq52c^4EVV4NBm{zGccJ1Iu&@?_-n=_m zDSO4~#mb_aS1NybG}>IAtl7(5%$Goi%unxC+t{!*L^Fv?(5$)K#}L_AkP$PDWf;RU zjNnRPS;nk1?p;`#J0lDkFgMym_6{>;tyvtpv`KbX{YO8SlHmPa@aP5Ug#thj5r)&!t1EFAvrj+AH+?)2TqWFlHm4ue!*K*1?gsCQUe-B!6>kn^d5KB=5%M z5)vlN)NP4KVn-sX7(R)s?PufZC@yylPJW#dx%!pc);0FE}dKSRUi_9h*a|NI&F;$#ph$edp&Vij}Fs2ZIsaDZqrOZpqL}gkvNAY4m@jmRvD_o=nJ4&031m=@lTW z@&U&dgn+VfXSxnSQ{Wwd0+{;^5}K~(_D%JM3qN?_4Zi9Aq}K_G+?Y+&X>tJkE{#Tz zH{m3ShP-ahl37sGSHT4Ubwqut`PxxOM~FIl9nMuqO{!6^5A=2v=n6DHPtfZWK^``Cc3%s`Z_L0D;u99ue{HalOP)$O zf#n+Zt~b#M7e|n>4K6k+X~4w~A(?;{$QNE1E{YVSxr)UnOZmr9#Fue}5|30TK;oT> zp_91Uem0(t;`-;Y#vKIlXFcHdd3Ut~Gk~5k527MM`8F6y9Y_)vD|X+4?r_ZKNB~-J zA%qeh8EaRagiZq@%`4WsHdk$d&xUVv?NYlTT&>yEfs{d}fd@9dgg{u3Uf@$B0|v?j zxd;~|A!8uw`XttPv>Nl$SefAaK_Eu_n2>Ye{$)Lq42;#f0kJ;>(hi(*8JZZZv$!9x z^{yE1C%ir<>Bsb%_Q3Pj2E5MMLE+33jpreGhW18chrx!XhKyJM}*r11dZkT%R(x#I{=O^j>6dVZT-N#X2L9+x}8?(ZQwS?|dr%s%EMKIVPY&7}%iib}<@``gox*2@> ztm2dnVv7q+JGoukpT2KF85J54!KsVW&miN1osNATAr~o0XhIaL8cr+ zJXG)iJ><<{jc{0Uc_54&2pa)f05(D9MzsV%*3}J8!s+iC_{bxFvi$53py1!av-$-( zWT(`p;J|RCNPf>P!EPkv&LG4oJB1}p7)U>Ufyt$x#F2jb_XvBC{nP`GVMXxn2~wTt zjflA{Y(u>l^k}xNMr6e5s1t$kG^HTwr}6m06z8L?mv6`M(acc2E@*LXCOO&Llz;H< zz8B7Op!6ZX2uV(^e)cY4BZRy{d`9cUCqiO%l2ZUS#m+!fg2XH@0f~m?L`iIth-)YO zwEVz{f&7EfL=6I72&qjcu@V89ny7X75C~TR*df~ha}kn&NlgN(pFuO*x{Te$mkZ6DZRP>{|h{C+odTK(-ddYqDxR5HGc8 z%JU~swYmlei&R7tddks0NwYpb%LE^#^K*2B+7zPkxB1AC67o@f8ooLYMDnqPVr;Hl zQj8O9^8h?l^rbKtNip`>X?u*a0AZV(Gvzx-8s=FqN5}}pam`v9wBau!R?~*-^UPdP zq8Ag|_tre~FCiUl=^8xq9f>qZ#D2x_NnCBe4m=&j@a__{v z1&wHeQjEYg{U}_^kHfY7xXL&}rQr#&vu0%+ZX|8s8JtlE#Ei6t(f4VV67y4Vd{MHu~gYN%OGr~NiV(|Rxv5R8+?1W-%Z|6J@0pU>De@FWnd+<36#|) z(p^G50_=8O@Q|(x=~ltnL308M_5f{YYCWJ`K|fY|m~*xd8kqjsfnnqAhTkP%A?0)c zEc6g8u$(4!d%+XUNw=mr@ic^sDWztpo}rJa@Zes20#%H;!Q?;^jvO_KLmF#S{%7%D^5;cI2sZ*+!nS`!baM{w)q?U zO&d5%clsOYt8R1#VH!&<+sJ@{UKsaBgll=Sx)~{k{mpZzy?UE}TkYNz1MdD60M_c3 z>h1pR&fuJZ*6*ip+VThe&4-QhMt=*!BiAgo%98ypn~ZwgHS5AEdgwk}=wI}bH2@2= zO|TprZs{#Q1un|W-S2F8+OB4*cT~4J8=sC=@2qaCZm;gB-sL^Ou||*Y^wZ9!xfojT zQSX4iQ$lv0Fs@n8{8x4DbL?EdysRh;UC1f;2p2o?kZ;T zZT_g9^T^RN7p)yyF|I}20mN)+iP0!?kcg9GgZDMVln)~1?e&xx{g<&ECj*{HYOU9f zev7Cn|1M_`>+UW;i6`IbVJih0W5f>|{x02q__j&9ZGPrjR3-c!&K=0FeLZM-ME#m~ z*xBmbd6l!ezxHxx8|SyPy?VEQcWap&jJbH2kM-_=vSEjF7of#P*ywfD9z=Z@Z@kY= zj)!S9;eme#`pcJyd$-vgezCTJ6Cu6yYYsAdT&L4p!e^oj(HFGyEv1sna}J( z$e!w6XOvKAmTP%{m%o=I;qO7{u_Z6)e{99BKO?7(9dzyqWABkzc}DD!+9R-O+>P{@UvTf)fR-`0JfS>2 zb5<<8;R(!{F+$L5(F-Ta34gnc_V&i?{+P4J@AJ3$clz7?yZj;cl)p3gifi_Tk6~tT z2E^2toW0U_r~FY!AqC0<;#%m_4c?>v2J+p0gy~S%K7T_f9i+@+NLJe`{lvAndWX|f z9T!OkLhE;ntMM^hukt3GarDyT&b`k5X8}LG)6W4H0AdW$5)|a*z1{(g`n}LK3_15* zwXj059-lcOt8uTt_h}n5?3IKPxIY%&U&Xsm?1>mKvaiO>9Cz;b$DIc@L9;Dy8ozKt z7=;59CxtR=BQZf#2*MT;^L{+g_Su0Bjd$%jR{hBW@_W1aHUps15>%Y6w0!|OonZ`VXdK}wwF#g4ra^5oYeT4GQ_KA|}R8*(aG z13;IwP|8mhv?iStEdnh!kL$eVTpIkjN7AXzpD&+k>Ngt3rwz8PN#24v9+#KNQDEsd zBbs&u*DYVE(WE3u}-*#s-B2 zm=2V^HGTZ%`YQ?Ru=mBiQnM>pd%wBW2w&zbF3}3g%j~?h)kqCoH54@9(?a_ZTQ#Q8 zs=4QStZ9LvZ3*NRMSAk5^_N}C08`d;h40ChXxow1?xyL57+kDw*FyUeB&;y2&d|6w zGo>o?qW*=Q#o0muUSXM5<71OF#J)F((9WMm0FHMVq8}0BZ$TwD4Jc=^tW}CUqz}5P zWCO8!>MWqNn&vs2j_}$_~9?^p=EwV3q6C_EX=X+SDJi zf?L({d}}S&?UN@@UXN-%6%nBxz2QR#NtZL=28A`NgAZoz%gEbgV7|UMUxqq6aNy7cr?Ti}DpQ&BRP4Jb0a}-08S<@`Z0ZxJ=c~9N zoA~t6)%4gNP8E0YJT!t(=1_#U+TR+uM;q(hxSKJ<>%>riADH>TOs$S=celG@7+ahw z78IVQbNqL|`nk{Y_q8MH!zf05kq&wG)jKJNg$5fITTtmwdYrq>0`MkF1$XuI*~xsl z%qR`!(IphaYEz@#&`4$Gi!OH)h`%tu;9VAio*UD35?~j7Yo{SMlNa(yHrZ6!^g4K*?0Qg(Q#w@ zrXG1{ROUe8bDukQ{fw4o?qWkY%0Z_j&0Q)Kr)NA@WH&)VGnf^HfC5Vu6oZ(WSD}sJ_WW4-Yn!R2Bkz8M?$6l;MkzLCf-` z=qHGZrAQE;LT2y&{n&&;aXWTe%@r<(vg#<}Dk_NO#kd)FR{-S!$u73%BL9Vv8Z38V zo~B;Mb{%B2J)d6+B9nz;DM*J_>QcP=43ki|C8A}Ros~-Jv&_*MZVH&Hl+d+7Vm?1t zK>xWx^pc{i@jcA*!%P{WxpNQ`D@Rv#F$i>tG@o|Z7Yi_TnWO?WUYwV_K?EAt$sms6 zm{m*KKPCAHu4gXorO^#NigFYh%H$UmXK@fKpTF!C-0S`e->JvqNuS*~iDP&>;p02>AD(~(U=a@th@{dwf$cg}A;jzi%4Kfm89;o(?vj$IN)Q7k zYZr3TN2NFKL&&?)+$FhaYsfrw`4>SfEECNwHjSdFD(}PAAgCy;5wvBqx!KxcZZQvA z36zPgLx6+f-#5@gQCmPnigrcd--@!+$OFs&%`XJpR>Oe`iIz5#DheHW9mnM!KwlQI zWl--~nAJNKtZ)+vSVhNz(iB#EU~m2s>`kN^UH%9JdND6rgK;$&VybSTHU>kUps{i_ z;luC)TtQ?m0$)`7jv8*+OFB^azQy$P#yFT)sGs++4G$_;>s>pmNho@|MDS_&Nven` z)T7V`;k`8AsK%OXI!wM$mgrtSkN7^PqnQdu)(Hq=EI)D>jQxcj5Y_dIsICX1x)elp z1Kt3QrJ(=@+en5n6x5~On4+NW!j9MA@&-kd1ED@NcWBhQErTOmEgl4V&=_sC}79FB!Kzr9W6jv2#Lp8 z5}^{T)X5U7S0f;6!Jty~m;!tsW}0D^B#ywMI}wKUYPcjwsbt%NwMqCxs6hBb=(8J_ zRz+^JXs5iK`DUZgHWyHZ5Js|CQ+TLv8mW^;+9W`C=4%nZwvkzcZD1pbn4tgIv|jb$ z?x!{|!HLJYD>ZM4Xd6Y_6L-%2~eGTPgLR-z2_F8b$#zJ1Md zzsDO!pVP_?`&;0q#qAWgOI+I6^jwYWdE|{CwdM3m3^XF}_alDbY6K`K?qlC$wa@Q^ ztbDTv%b$90JA+yd2~fT!46Uh$U~keiMl_1~fPLW`ISRKy;?|NP;sCX@q`xS;U`(cUyJ{4z&Rj#}#X;DZ?pjz}mV4Y^s7 za|&|{O1m(v3N&vKOLcBUhwTH?;xq$wp_~ zi@6znbM@^a_ae2|&- zP6q*+tIP)-C9R4or z{7of3vGMf*jT(QK`N`GFh-!kp!C_=+i!`2%Ym#AH$d|yH*&A4w_BP3S&8LB{!Zsky z%@EaQt@2~pl+KxhKwu5B;)Y{bk%4cU&*iofX;BVHGm;c2@*y)~!L$@~bq4hOW^*7i zVs9n|Z4=|BLBF5Se-`QYmcM^YB{s(r2-ykxc`IT`$AiMR!#hfgVM~H+NVt7{^w?-s z8*-+kJc>(DBsmigpbcu)2u+=<+=BwjQIw1$0A4VS1en9-Jy~v$%O5r_jL4>KNby%J z_z2kG!(NpLmy{U2LKG&{(q~Y-9>m{v26%NV-rF9dh`>20p_u(lKkgShm+M zv1=1ZVOW|pS>CeVYquN*(IJ)v>JO0RH$<%Kzes0JP8vlM*a7V)SHaWsgXo7@uX#n;lpG-s(vSf za}?Ki!m{E~l+*hDYu^1O%ZisQE3mv@vaEQ?vf?GnikB=aUb3wCmu^`hWPcl>NZAXC z#ePyGB=be0_ae0d>08ih-gI9z5M7p#N5Ge&ul{d2`l?@ zqeiJ)gkN7l*!R_|72AmWzrAYJpRx2=INxFG2=(VIm?}HnZ#U>`@3;5WUqCD&_Pxj# zXqrN(zd)HcjWi{hBD>uS&$nv)p#GB0{3|*#+S1_mR)2u4W+mFLLH&0o{%blUk=5VO zp|n**x8DH+1@(6<>>ubfC5$o&IQM?DwGVIV3rr`cHKJ7aa<|)i>yn4N*`U7-AHlpb9XQO$YKPLq+Mt=)~!OYk}I54ipcD z>Y(#smhwe9BL2X!i-y8+vWDuS(@p1g#J2S$0!0cevxp@asMn`oO+5A@-=jD0anpR38c!>_i^~z78NF}94R1+>IKzo>UAvOR^WIP z3#^HJ*Rv2jsMWAJE%;i`JW+${r&-N2bOe0!EEw4~x^`LLiHOatl@iJqt$4a)-MQJS z|7+_W)bWCvCq>yz3qok(s4Q~sXY<&@P&uuqsU|(5kJ6JwOo-o0I;WR(PVKs{mvl}q z>6{|B(m5$w0)-kHMWElJv~cklq)ScJ%t5ut6o&w2ntB;^kzrKJs7rK~=zJ^671(Ey`!u8j_6?@|P2Mu2Fr!1PQ7SD+2?f|4XWm^$ZgTrvKmR zKGb)!gRZR8OZTJuet^Byro~uKc>~eN`PM8o$n_ma zFv&vB7+jJ9g&bV?kQ`A3HVi z@Ub)eL<8-q^zjBqnBgcKoO^YSC5R$(8-0Crs*Hal9TJ1E-jp`2U4dNw@8{eaK_UYt zcs$F7rkS75nylv(Pc9S3#WefQI@sUZgx%FQcJREKr>OX}?f3JWaJAiQtP6vPu znLTau$WLsOjBr;wE}FvFIKT`_dl;QSR|v~GW0poR7(AYbhdV6jFfd6RUV;NlH-K7l z;zQWE?k6x+QXb4@Pf1MOk*tjiaLb-c8Q zFFPz2pznqU3A6`^val0|@h{ZvYalq0!G}42IOhx)g>5urP&v~?bh9;P3bJT}4V-5w zT*za%pN9*JBJ9SU#TG8vy^M*2Q)saFn&yzuG(oY<9n`Yx6^bbT=7~OZ{rGI$iE%YM zal|Ef>|zymR)7zw5yJt5f#0SD7X1jt@d;3P!>+2@g?-%E(def~Xw{PRyZuh_rTnC% z>8bWs`>Or0&Dw(6lk(Y_qd2v;r9A9^L_Nb{TTp&m={Q%YQ7Te~bs<$-`Q=Zdbeze9 zz0n_NZ8z2G*x9My5^n%#YOUXP)RRK_(taPi%v)PuK9ADz4Gq{Y!FI;C*S7eDYQPZ_ z(bjg~st$aYqSx=oKFS`yPunf|9oUQAjgYk87255j{hkq{I)LzA+y^l~a1Imf@`n7O zS{hbo!~W1L&PanLnytpsS3{xUfwmf}KZykq_t}f(8TQO$2e%;x3ff{U*~k|YYqNSj zZEaT1hyC7-d`JhYqHkKCSckRNr>)$+X?>EfwLWc8X5nNhoCJlVR!;KNw<{*z`W;@h z)&r9YIRFH6xflNM-L&-+?8b0T)pG!e>%?@3MOhzuo@=q6c4=~lWt!Y!n+D8yt^k`6doizyd&{u$;MFH#{!H z6oh)+*0_!^Q@xuGl3K2Dlc>DGiVt73nL>u={uDqeK*op^f zq?~8MeTW_pM!}Kxj!ApXaNAyJBrF&|qNIWCBsa{62idkS&{;>6{BK*^+e9Ps-ZvC8 z5tnG0&KKbWU18IT=vH=M*g1Hss34{{i6l(Fkvjefa8`N&0q*0L42h7gS(Mnbfig>)UBuX2iE1uz*?Hr+>03P z2)QCX^m&&3IXGf+wLh?z_T>ZnGM%My`sgg}JI^pUOXKv7pU>i}N6sa2XY@`fX(_Q3 z20*5X5JxgiJhbtX>~;nju%wE~o`gPYz)qMuU@^55q|fdacdxnA+D7|k>{j6ZiUZ+a z8qaC-{s_-ouKz_=QwhZy+R)twRMbFs4yI=l-8Imfpga#2{tdJSqL`oQ0a~-48N&jD zjg^OO)*)I0y@W6zH&RAGYwzcNQh@rXpf$KY&|CwpMS$Xn*7yOS1P*zCX)yLSb#N$eqC0WZV2gsZh?~0NQq-0 zq&(c)Yb^g2(xoKb;d(m1OUiAg#dE!d7Zj@NAv$fOe<#wXCH;rO z^gz*T^AK$#wl&|A$k!?PK8u(}9lrF4PRlnfk0BjS#KL>7wx#PrEiGw%JTpDE{sSF{ zM=8L}+UWv010!c4bRwEus15+E;`kV#wn2Z8M~MQ(4dO&<>@We4--5&Za7;oSWx*V^ z{sh(q0KSf*y3q@DWw`_`%sR;wu8&p&31bc20txpB67E4uiG+J;_APf9e#;%E-*O)a?>K}4&uM2!kZ=+$MNUD& z!#5-0ZxO$#A0<5h0G%JB^IE{MH3;_`tq5#6(}Hjhu7`0&(WjLaq6;Kzq-rd?9nWd? z<86q&!8YH7sG|rxi_2|QvU;E&9FVvnayF$N*l7v^Bh9duZy|!+1aRD-7;dVk(P5jB zR0x6r9&kpgMITL2r&ZK1@Fm_yM{0UMeZnR+Xp`NoHK`RQxD~EeKaSYZK8>hf$CiJF zP6J1)pJ47Lif%{Did*LSdCO4tw;pQVa5HK?_##m9!KOqZFekItr3Dy5F9_yc68AE2 z`##`!TubA)kGJ4s?yXnP(~*w(Ir^k`z6ZWlPNZ49rAEn{@ks-XkW)a<3Ar*r%v*55 zY+0_c@Oc|-dWY5^=ah|YNcIS}KN3lk4_yaQv)(pD&2}{e89NXQfdqh$V}<~mpBumw z6F`6opgENqPMYP89PFU5!54}k185mLGl*#X*a&u|I3}c25Myx95NJtobwJJ#h+L79 zvDNTn;7`E^;)IHD3vCr(*CPm4{U5}+IMROGNiUd!+Y{2_$&`?6~Udou7fKZtEie&0{PZ9h^B z`a{#vY3x|SClBnZd0_@6!KfSber%#C`chXpuDG`Q|iB7cp6*IK8ONZCme*97y=KQ!&HWY!4FzO z)B#EnI6qDo2W-iwF7X5<5{{0uF!7@9X{5q5ktgggc*s43kNSsfmqlp8}f+i5U(u#>XPzOVfvmhuAXcL(oE?Lxm;8S2CnL zDW|3Uu)Xc?!gl%{j(HnO0-eNr8g1qR#F>?7XG)HLyWfk0I>Hm~No-lOt6|MB%Y42ncEY~tK#uU;>5z9zoc z0wJPoKFTI#L(xfmY)?(Iv?}Wo-0^Yx-h{BMe92?63^od%0K61f4$h^;2Rq0!diZQE zPEIIH;gd#+M?I`rBeG%V39gPY6lY*-3D_iiD>sFLq2dOV2v&%El*Y{PL{@AH<#zC` zSVelX50~2w@<-~EXbYAQTn#}c35@GMCS$7OxZDo{UC3JDb}XD?3c43Z&OsQ6nE{Z^ z*&=f+3{48g=I(8NyG7!8fFsrqPC-4~D5((^0|+FJXO0d?h@mRzz_u^!RKb^95Z4}( zXh}`UG5|DE^VlrNjd6l2#BiWo;ZnMRxd_h8@5Hg+U6dyaM*!7EOBvk+^HNR}d_m%Qnci(jLC~I&pvo*pP`HfH1DR2gm|f z2d-2PP)pd?auB2Yp|FqZPrdaz=OBm2@p`8EV?PEL@B*ues3<8LR(X_oq-vD6AUw@* ztZ-58w8w)I$NCL7H@+8TclmuzFQxh6M!z~LhDaDlsTW)KdU3a=Wpin*J4}snXlw$M zdW2i($F|iTyiq?|XsI`@O@aAS-#Cb}HJaRHNSS>Y!~XK-MtoQ%PSjK}m0EZOq5VGKsHrQVjbiI%iz5o0hT1_70I$+iOyizQ>@a2IApKRzrmgmVdo zfQ_)-6C0xYd0(3z_0Q`Yb}Yn-SlR z#Ma2zBu@CksWP$!@?D4$KBZoelq@vJZ!njIZcN||Z5;&=0a^W(zVzM@&Q{ICtLH~5YI}?88`BixwQNJ{P0tb!o6Wlfq zx(Td>qWW$o?8YZS7fv^~a^eHM2sk!{Pyg0GhFRg!=LcE-uhJ2sUC@HCFQVz`Lep|M z*KkEFcL@Tc3J{lB;-Gh+(u+bsPF-E05ms_{D9kK4Ok<%1KPs{6>blh zdns_Uwj{AV5tP(nV2hYJZfypwK4R`q?>2Ya6t+R%D0T*%12+cXOh#Ep_#+x<=$4rT zsyQM8EJYz(Gy(vi2+0<1M1SGoW6h*^%!v+@umT0IeOT@ko8jx% z2!nf+f(-&m(IkhdA(}xr^g46KQE;XWiWof@`4Gpf zgCe*SNBHrh%!;9HtWGzYLrPSz=|hNZr945qfz_e*_F3p_0U1on6`dIFCb6s&hYwu< zA3o;j;8<7qI^e@*eJbSqC{Itp@dv~~RL|j~7o_+!aIE&W`H3u zH-g>-{hDlvAsh#$4`WhbqM*{EG!f}K@MJ*+134z1(vYfkKWlg~r$8wbsU~_;N|gbN^BK!ocq}%0N=QH*Ua`Hah@=-Sjqn(r+g0@Xy90(ZQ%O> zZN3qS4A{#flQ?UB(QAsM>RB|W%5@o1@LxC*dOmqXFhuzD(IX+e-A011k`>@M{-1RLb6pZSf=YTk30Q!)kPF4?aEDHX;ZX z69a*9+wh?6QhZR+_<$PebU6$e;{#h1%A0A-Bj@ko>l9?owGqt${faY-9!_b)(VX}Y zQ9<2@K)nNE&4BGkgbidR`&A725TIh%YHDn3&tzq?vTyuATPwlIG$HXBX(eF;c~i_C z>U+>K@UQkPKKut6r*O5jq~TE&{Ln98?M9quMNZs;ya1dBj#!R^SHM<{3)q?lbQGP) zM)?&}Z#lka*RDPIAgJw}OiM?|twTJ~DyOKfqZAOr%)Q&vf#aGkxq$V)GQjdqE`0wDy?d!aq)F%@D4x?o* z$eLlNsNZ29Bzf4LSE=AUE2@sO1M@h<4c{oL@cTbFB5G^_ar&IRIJ>G;JTHBXvp(cQ zFGwO^mk~Zci0acRWYHaFarWs`kLeSl;;gL)pUaq>8RHam$9NW}Vt2}r;mq6ObQsY?6!e3FglBN>LYa8e^rb3sgi>L8mT!8eh;+um}?4&Mrpu z>Ilh}6Tc$$pzdH_ROoZzWFzN}J#tp0#1-Rfj6O% zKB)O)41Ju=tLZp&ew@xP(RqN*qjW?FF6O5mtD+27L~mM_AVedH9RUh@UUROs%u$&d zgwHrYQ-2`-K`ude8BSe~%RA4q!k>IZ24B<-{W=U!Ha_tNaKv-?An1rDx(!r=-#1bl so4%3$p6;$!cOC3H)$=CfO~!5FZb|f}`V)r|kK%qu>Zw#BF_wz|9}d;)7ytkO literal 0 HcmV?d00001 diff --git a/utils/__pycache__/datasets.cpython-39.pyc b/utils/__pycache__/datasets.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2d714357d59a361cf28f24be468c5c0b11d6d8d8 GIT binary patch literal 35906 zcmeHw33wdWecv2=V{xB6L@g#EiiB1ynEj9zu%jDXJ^X5uY2aN^B?*>!}udUwEhx!IEKsJVHt*FIA+Pn z>bsdW<=M(wa<{X#+#}hD+@skj?p7%_AJ4{3=CMnO`D8XJ{zxe`-;wPQf3%dI@62|J z-z;@yyCi>iwny&0*KO2hNR+2Jt%NOlD2lci1b zo3ooGELGYvzcqWC_&ZA5=5Np5F8*{WGrv8%eSSxFhdg(d?wH@1-3fnJcGryIbbrju z?#8_b_d5#%b9-u|tHw#=*{#{Vd@AhSWH`MStZSxvu!zN2s$ z&-dW@4reExcV4lw_u^@nvl~yl@$?Ej-RbPX(;htCho@0zFP`=mhLH1qJm2N)!}C6Q ze!$u9jG;BJbj~^lokM8D5og@F8zB!mhn;(Hf5^Glc?IrAopa6+=Ru@6=A3sPc22-| z+GSy+7m@Lkc`xr4m^o~8HlEWArhz16Fzso9)UbUjs^ zT=WV~Zl$0qx#^;Ey_F-gp10sWaNxj9(VJbI9Gj}lA6P7TDqmXmic{`^7E0Z zix-Lq&Z~T}y#K_a>s99Wvx%eetUWoukhSL)X0nmFg~CkMUMR!yiqqK$oN26z^2}%= zYtL6MW$pZ>B76&n=^(?rWSBpUn9K8U<}X|dI-XF)LfOlE#Y%a~WN(H)`fXe%aJhYO zJS<1YdfxCcDX*AU*!}iITNbQYBf{jocEjN&zLi7mT#__$`Zd#|5Ju(T*O*F zXIwWYMk9e$af8SL1}lga-RWYf5X5H+o?BdDUt41b19SSip+->Z+W8X?JfkX;m8r#r z@q^>zt^WN}m-b(t&3pTc?tTyVXDX9(g()xR6g=GUj=A}YvsfxTv*;B|?lX=acXw=I zIY=IvDU=IK3+kc0sN^qj87o`RgxqZ2ogF(;s!Zie?n7gZR2XX8UF2MevbnCbO*@;A zH%Qd;?!lAGzA-Gd4Q^YXv5RB4q?;$<@r^m7YOXxynZ8l8R*jmyithFzn~bXETaJ0b zQuq28h)o7udy_G3Rn77UVj_r{SReDWQBIraY18-)qih{S9t>M_;s&d{!68<3ZV>K{ zMuP~uJBT>NDKDGIq5TD{ksw<13iEEzQEASUQ9H1U^D}|z1_@QsvuFMLCz(0N68EEY zcL0uI#Z1c_FcW4Ut^qS<#w<08@Kax5EPpp%z)xl1Q`_N8b;7Ctdg1B9C4KeN@OXxw zx?;ZPuxYTjeABo5m>>0%e%z1DS)T3NS4=Y?@J%duKT_`nj!B+3{5_8G9 z)Lcg`?RT!4NMTgFy-vRqZRzs6ov3Hdbvd!Cv1*Us<9AM5nD6n4(L@l>FV4(k?K0H;56KIEBDo$U8`!FJI7oBtv)?y5c;BFEyF> z0C{8(JIagmxyu!Gp`hF#?&jwgN(B|97K&H_h53qF4pLKILvf!yPK5|9{ zQRyvpHv6iQ^u72f#@-4v?CVwy&JYa4zdr0Dws3 za$J(RToBWkGw8_WKpEvdPa&`?cUH)(a0#m?&bm%mJL`T{o9&E;#jClyZshM66^0Hw>67TW$spWA(PRbqXOMk*gquG(12G z=#)8&6pA9s7`62TWMgIVUpLj$^sk#=@mvnDEth)(;xxWCS2l0h3`y433bueVh||j5 z0ml=RL^=s1d=H2q3#|g;5V9xDVbE=h1W`cqnSwMih=2g`6le9QC2b7M;<{$4ClUKb zRzp(DFneyp=E)Fp>)Cq6d<1pB3S`K|mNr;HgbPa7 z{tObYZvuztYNH9($_7o4NiZ=QQ+ei}q{F(ELq`Uig|rkbqbmc;-oJuNmL&!@hGAtG z^SAwMJRQR&FsU)DU5@QUz^T~|z$JiX#EClz0iH=GMd0alIBDGDPA3MtJ4iJ#$&EK- zY;I&wY$!(3f89dnaWGgpCn(B%=|{BG-F1VpYCm}82RDR4p5T__bn zC6+3efnz#A*X6K$=dp~hTZ*e7Fqc}UAZK5YI#Oa&D)S2ueIK6H7C0++Hs?rVeYLP= zjU3;OH&A8N2;4!~rY^z>IxZERdc&2=;C2k677Tw$`mQ8s`W{?Dju~bef8-d`=4Se=wAE#HnSJ0VV`d6}L%1i*9z6G$ zi5FH{7@5XAX`2&mR0H7(KnCEyk7<70nvfC0iirw$qizLyKgIS5fI{u->lDD-Rwq|c z1AM(6Ex_XCMV7wDv)+mp%mGBq8DRD4CkY3?`bBA56i>pEJ_Gi&YWtS^ZXeX=9Dv3v zfXdi*#3xpPLzoX2%bjDn<(+tHDY=%U07lZamdMglVQDF^1FWu;PGAHg6LGlGzBwHo zGDrtswE%`rwYS0RtaY($w9j^oMbrH=0Hv634hzsknQ@jW<(ULh^>Vy!0>`-?T)j@> zs_i7v2LR>{Ak5n+@C{uj^)`DNLXTP>kVn$7R&4B`@rE5DJM1!2sE0W2%<8{ zR0ci2G9v!Wu9*Y7W-@!^Hj0(>Kqi0$DzFP>XRYTn7i?w%fbf*8%#|KlePJdhrr?j$ z-kr^ZUVbwQmdPTJ?glA}(LwcH41Einp!1wpSvZZUAo4Xm``^k~;#Ku_CIROUmOicu zr2<5d>TL{*PAd>>L3BZp?T(k_(ZwRWG)|x*Ig+Jm3p~L`4Y&C`I&=?zvaHX&!V^$jXF09Hu5rzzyo9!Wcx0$lmtqT&kpefl?^&kCQMS|A%!lT!s7YYrr zE^T3F>>7nBF`{UM2`i(%6Uo7Ry&pIA0XRYWDMHp0`2}xL6{LrS4-ITcvx0#W$Is-> zoPFX(FgSXGqKS^hbPLrxzK%vC_8kjcHl z0!NdwJc4A=g+#2Nic>+PAaj19hy~0!qdrDQ7NZSTH|TH7l82u>b?T9`=T2uI0UaPL z;}0>ZV6wXC>kteD)&S6-xu28JbUn9ks^>1g@8Wa5>HZ*a2eDqF zPI3d*cWMlRya^{s4CHljmVgLOT?LU3tP%C82yDk19U<1}aX42Y6sbnN-ddj@1*QT? z50No&jNpvU2|*Ttc~J|-DX(AYJ^^T~@|j1J0_nEPT}fS*Sv&?c%u0v$14TTLQDnx{ zyWv2@@ZI#w;BkG8_G`2tKq`JYU-VvGScXJXIT}d)Go%T6odSru%JT72>FHwGsa$py zX&@!D?tM%VnJyI}EuK;NNl4xc5DJqC{$3{f5FMGCAExgkbUq3Pyq`?n?_)p>PGFX^ zLhq4M+z9F`!i~SerA2Ebv;fk<0onlZ0P+z`ZUTLR|A^fOx6kft0rri_7Xo`svBr;n zUNZT(+NsAAFs{MwMsu7XaRiy#Kw_hk21xutBon9t$w8j}TY^Lu*aGF7(C`Nk@kh8q zSw^Z8(C`k$&?#JPKO0ZSaD8h`ZwI0KIS&|p-d*d!;L|haK?paW3_Go z@K1X^FI=^&X|%4Bytbdxd@%^$bZryC(8aeR4%{`s z$mZG>5%hLJW|3fN>?x-kKrH5Mp4;ljoNlM*sx^0;(+xd*>;ijz8`5n9=lx!k(#ui= z@$^jygA0#C4j3mI(?>{~*$WgT_){xFyqh z2d0DOkoUAqs0O;(h(%uM6cU)sBosY?%Y6)vLx$C=8Gur!z_@~;mS-$QzzV=n zkSPx!9@==o8nWfEU^pVVJP<|>WQl+!fRi9|qgr?%%jpL3+sqpVe)Q4*vhw^<;NgFZ zXZ0`X{4||kgad<=c$XVIckZn@qlr;`|kaJ;Z+Mfyb~Sc=rUU zPV`2^ToJaR-V1s(+ZG`*LTXSa0^w;2K-7=m@x>|5$5=1lj^m@5p?Y0V(%ejPvbW>7 zcy~Vt=bfPRA-V`jPA+=(E`T9qxI%nJ>%=EQVs(;JAT*`Qz)XV4EH43JhUG*_Y?6p; zC;YVNz=?tUgZ)Ge@>|HDO((G$0hyYpb@-6?RsqQ&)&O%6l7LA~0;(THGuyh1-Nly> zxU~Zwf#91p1m`Q@LK7gp0PLzmF2F7lf>4A(tacLw(|YS$!LO&#%Ns&FM)ujea zb$$ZXs`tXd0u_mbo^G^h(rnL>L`AIt8L+2xOzCcHaNYamr3%kz$)EKavj?6{@ zy0U6l5jMeDLGhv?QUTg{fIPI*_88>=!ZJ5!$+wX<%(GsOjuA@Xnzb~D!=FX0CJr~| zm${lmLDa4&#r|)@?AlZaNS=OL>07b|P+~9gYH6hHu+RMVR~S1~B?Q!%|{? z3T|8zmetOcug)0NE|Q8~@`|O(zF7tpGehF=hhPYk^1H#acl+Js`_w1>E-yWohItGO zW43^d+Cm~rNJW6$u8SVhb*;jvMnX{>BRDcz7q^m-$8mCMsGCV~l2?;Cr|DSba^ z!0f7%rAfJ)yiN~2IJe2^*@U|IE>7>TQSHMUfe7{cR!zMBm%OdD+vuH>)C)G!eD$4X?RK4}j1ud9zrU7okP}4hcKCO!8t*3B8mJC>Nq<1vaEG^ZZkIm@ zcem4jm7{tm;`(cQR#7(J9p+2Tr%m3dKZsEr@^;qt;=0QR0q)ydp!=!qLmB(g%3*)l z8318Ch&LJW(RaBqe+cdYv^a9jR`19C&gv$AQwX95r!5$;)t`F!w8f}y_BYoKVwAVM z!-UPnkc{aTjtJW%VZ&`>T%cXiv#GP!?@7D=p}0aCTd$?MmF5iTYd`6l$pE7+4PKE%~ZEncQ~7$ ziB|8Z?yT;r?ylbH-OI5?kMHr*&X!AT!N2!j;qQ@@z2E6!D+NVk#E%&M zUfq89c1pUPe&$+K-RJLiwj;mx^|0mYx4l<7JDfYNa#r`(jyO9xznxvxyZpOa%iLsK zGQ)hVcemH=>~`)16xj)by{_7Ws1M_f_u0eoFpZnwMARK?g%kdE^x_Z8dX;>HW!^)z zql7|Nz-(9d`TOwRfRb-RFJVmfSI4}2{k@!D*UV@4BIH2zpfgICHOCb`z{@|#@$nBJ z^w>05U%*eU{o{a^fHZT208#fB`JT-X5TXY$+b|x;Ip*)@H3O!+ddNQn^|pj_=GLFk zr;TuDFI3BSfjJLDz$QQJAF7>z(fGbq$oOg}0bT1W`4HyrBS_f;Wi&^=#~)j@>(9uk zV+Wo6VeEd1m1o30R67N;$9+hTSqFFjCbW#X#=Lusumks3 zhxf;M_la#0177zG=Igj~uRrd*Vv7;JY5bxmOvQZ@r-YJgBS%4m2!a=K^jiVC8#^$i!4ZUESHfxh?T_Nv#pLcl9y(GLh|m^%KvxbbzlHs z*F=Cps-0HJj=dVP?bjnJgDk2-hcrrHL$C$I0O*|-O8KdRR;{~;L7?U4aZPHvQb-OMJb`j*7H?mu92 z*?^#XU11kV_X*cy?J!sF`KB7QX6@ap=27~2b%7Oq2F^`g-jI%v(q327}1y~c5y>d}*)K?8dd;uWvZQ z7deMZG>7scdv0wwQUm7=B@TGG(1gTpPMtOL!1Y+u6hoUD$m@x4<&Wwws$Wldlg1pn zQh&#_?xqcfSYE8{(jxs5gs`x+&d}mFGp#D~q8En2#koQOUSX=%;$u^^&b~W{a6aP__|Zb~3pZoopb~`oa3! zd@I#;=p~}}EqsmDZBWxEz6~|0fQ8?xhELXOxNe_1b?SOlbFT;tmFf*2Iz_sf0XHbV zSsQ#Pb5BOzA_F`2#rZOH^Sj)&{&Haky7o&29j62DDObw-%e1K}VK;(9`e;;j+m`=aT=-TdvuoP=2EVSKfNtVdkc3Lo<$d|>63=M=$6G0IBS?3zl`#Cs* z^&kDjFz0ZI#)Za^@5ZB;#$r{ZxG?pGEA+UBWb4*f%z?rezHt2dYqflHKN~_7c+e?HbC(OnnOV;j zX-*KEuef=rnLG78H;K6gBTh;)gH#cn4*(4jauA`lniz0##}oF8DfI>xyu^fYgm87L z&ok(Kbbf)(pVDc9yf^Z3h0fo=2@N!Q%&*=Yvccp=omv6N^SybukEphcury*Ov;g zdzqp(HC~*Tyg>x&*Qp?m;+R!S+(l@H6f?Qm%Li$ELyw|hg;p~#dgBBRV&%zYui#!k z+_bLIQ{^7S%5D|CZ-w%5g>9mSI!NCkI-D4K6AQ&Q>W}FR(|HG+zy!o7V6TpB3=)Oy zs~9H(G|Ga0hN-Mc8qrOPs6DXqDjO|wLy;g6Yy(t9nC-!?A6o#S0m-&*aMAEAI}}0{ z%vL}|ECP4{P_mtRisU$T3J&U+S8tID%d{|ZabY{s!ek!Cg`BQ|S;}@7mZnOJ4)-m= z&>xeDM2z|kI={}W=A5kLz?{rlGm|nV*Z`@w&jHTM41b!b1%ba7zG=Lt`@i7f{xSX6 z1<5~b{;WjQg0y>tvTrjZO6fFW?t>lDfM^Kz*lEPlA_;bbR?1ExHU$Ye)z}GhtLPJA zb^_&QPy+J|AUJRD<(AguI*0y^@PIhs;Ble-XsOLeV^I(?W{M z^FC}3g1*8UL0h((Tdi&8HuH#;K$(`AegW+g%>^W-c)JMvJ5XX8DPh~c^~IpsS~xr* zWzuF<)q(Efbpn@r20d8B2130nVNUNwu)=L3-~b&9ic;9@3D;_YZ;4c+E1!pCFXlyS zFrWsfONA{I$l$*dG)S%{d{}>g+>cy{z!%lNV}`o`P77+@w?ZB`mx72LHWupNdDxZ* zMXU9m@oEwhz)q2X8h(<}J}QSP<3Tt~p20oE+PHFV)cb5JPYzo!lrKt|FC$N{)6vWc z z+oNGM?Nz`)fvdBePVlyawIQkrXkp}I8{cTx#9DU)-)fwJ`THOj@!yC;a z4O2qw+}z*VFrrp}iNf(ld=K>koV9xz#K7qp&{M>fQx&Ba7ihGsUoT&F4uFD?QbQ_Q z{RPVz*5G^>dxpqLqbMpp6}bdKbihBb(3)PBJ)uprW{tx{HE}1izmMsO4nwqb_jnNH z7Hd}tKKx!rZ=-XRX(HUUeZ8XvGJA}Bf+Z0&!7QDOuX-&4vKFi>iE%UpeuQa+nGi<+ z(j5qcS-;`#K{Be{wqc1A2oW?82oZ1`!==@edn_6)uVlX70JKd3RMD=StM3x2lSbMq zK=)&la&I>BSro8|jU;x0R%FXY)rZ@i+MovK8>g&5jcs6Hum%i8yIk1aLC#1!V5-`% zIx2gi{ER^21URzExImSutv&_g6RFq|aNvRgX&O=>EauoAt$`C*%aIZA zvXUoWO#p60L{&$n9=2d`Lj*QH;67q-YecOBDbj#Q5ieQmgsDe@A`PUCUNg%dgguz9 zAC|#@W2tHf+;p`QHFc}shbuP5e3wlIG!@2NAEj@o1)Fs7G~mRqMj(}qRJ);fNckPp zmY)_0Zl~X|1)HsXNG`GEItJJ%wrSEUy?*c9ARtoYnx!7|duv0GVMn3A0DSa2(Rv;! z0m+vW5vh0m3GD{dOl~pwUVaZ#Lq=D(UPD^Qhq@goF8y>%8OgRX0Lj|QKPZebXpmI4t3lC}C?$37>2!L1s})2{Kc>+iKhVJ_oW-tc-r?i{D1r?Zm&(Acgaf zvrxw3X3P3bp^an)cJ)xPi)DSXwq3VDN&*$&_hS@yaL&Wbi@SU_PpB<~o6-5=(&B{fj-48{)PI5xHZ3?mp@1~xW<{DQ%q__6!i*}=)%kF|RdWR6{;0!5B7#6nmw`MXSNP|B^ak~jLw_b2C;AwE^`MTe+N#`rz_62bVkq- zo*|IKo2yPH72LJJf+(!iJFWBZ1u2TPaErB(AFt9EkY|{3cSp##0jYXQJA*#(<<;Sxroihi4z&eb|9mX&x z1M@bY%kkVFw}ebIEJ=YZA2K5rEKWgOXF$|%H3uRi_EwV7HZg7*MEptpXOW0+`TN^c zVrwjckUb!$cOaHVLLl>DuPS$x7Q-e5*`jd!#^|xpsy5^tN8>1h5)?TBj|b2O6>EgX z&sA@N`y;FY+)e^6GG{i zQM?|+FTlrLR^Iivo+u<;l*2(5HeYzb=NLvhW?dJ{p86$rZ2~C_OWRGYY}@FyTTX@O z5PJjlyU22C0s^{I>ue52P4yHiyPi~eRy;{(iVm4JMM6p2QBcSnq#^de2{-cv2!$NI z$-hZiH7S<|khOsHa^y_H1i$e@>P^Ph)JP(kc*-)x41#sE{mADW!}aa3vv>{4X?^c? z@BFfz#mjaUb-Rq0?JQolvv}Fg;$=IFy8Xq=b`~$&S$thP3tL@NDRJ*N zSFQRJmOc;X+iWYLzQTh4D;?>x|3+VXpM9v&XNO*5Z>U4rV)-I@Eoph$`nHV zcQ)~V(vcQ7DQD?&j`BzAEmnWVWPeTvbhM$UI1v%7$aB99<_hYsS>o5|G-Z#CcKn-q zJ8tTMmue27{`ZTO|M_pd{J(v%@_(UTzWO^R`+GY7K<5QI;78D{bf5(=)aMvx(+5G6 zp`vtfI;)}LbilqqiAe{_2Satx!D)=fI@P|yS+s`gqSH;Mht77C+}48#d=X{2>Sd;W zI_u`f0Kx`sgl|CsD+gZkY}VRwHOL}y*e%MXvx&|LdE~1}3EE#)&;8@8=hV}jp0A^W%TV7zUxvOcecyuSP5l!Va4S$e+E{KI*$CdRwY51d z5ZlN=Q3L8{SjDq+{(?>Bfibe9bo~Oq0})$M<2R;|+P>j@ZB+?w2z^eVDYEs2La#Yk z!G;oSnMQxTA>`hEZLkF4I|}(?{neDrwX*6R*(e zWXZSE#;7+iNxK?G`3ze}yis!gAc5yPl?d#Upc1h%Fg^H3Qi*J2us|^Ve@`c(-og&L zvOzE1kG}pM_EMX6VzAnBP|vql*+gwr6}*`=FAXv`&{Z(Ljp$?h$Oj4* zrEU#4RE)v(=H*4uB*dy>Q(%jd$c8poHa6odwFh8Xg?)GWEH&SnqYk&e?+Er<$cf`D zsa3yXVJ}?_N+%l!lz&X_n9PWeyGuAIBvIyJnaOG#E>7yfVIELW4bTFs1G&WqI|o=? zz$Q#=sr)FOTk4y(&ox?(QgJ36^ufklC-01_Z-l>wH`JQm416%O?Su{#yFMyV^5W#d z(Jt8mudP@4`^ltfyIm=rl!B*3ZW(D(|)k#l;*5obX@Tt=5*pn8!*n}X*WF5tPeIl z37F^YG)J(71gR)T!0!)_PLa2*|7dE;k(N>Z5f~i>{M`c0IbOD8Jov_4&#(N zZh6N=s~8)nn?ZjM`xB@QVR45MgFOsvWOz; zmN60o`vSxOQ($Y3j_YW|;at6SacR`ei){abkKjoOc#h*>37)T^ zO?YY879^%Fc_;8e3>9>qc;fLV&gLF|^z8Vn&ON36in;co3Lcf<9lBQo?N>?wEO+sF z5Bu>a&)0Jy$2f9gzR6UWM>18WQ=vmPAZyPqB9EKLA@?wW*5!MQh?f@e1&GCh_8@`w zAW>FZ1w-?`4rmkp{|M&~!GZz%u+3%++Gm=KZneft!4GXT&9B%q^L)aCX%RKfSFrC3M_bu6h>>lge~HvF98iF* z-C~qwS23Xwmc-dPF(-<1ExzPM=VI@K$tKVDq3g%z;!cbU*@+`A!Q&XKFu4L~NR8k? zt}7P&HjT09M<~u#fbJV6R@E+S^u`uOKRrSNm!#kAcZx6NCnZf!wYS<=?T4AxHq@S! zZ`mBfiM1`|VZ$Tp84lZm^4m(sDMO7?kut0csoKgfe-5SNgdS{>{%C8vsbR-fPW_g6 z0{~KMUr#;uos{=TAgi+kLA#@coKjzaJYX zd;C6a!sK^gOLjLx(tcNH!jtxUMvUqJ!h3Na#QeaiOfbQ7=!#Etl3goZ8k*vM^&t%5clOE&Vw#PF=1PaB@q^9{D-Lpm5weck%R zP^`5+ZSeMW>yvz~^=acW3+GG?b4%~VQ#d*R_7^a_dY2ci^}xD9P6xqU?u9>mH*E+7 z6Ed7y^-h4q4PrXPxU3I7&$S4{VJbUuhmo4xVWuW`7^=w~TU>F+X8x;OVI4r7$jR@0 zjP~99nH=H=qNKJHm1&*L59Q#r1uOtG3d@=MbHlSlOu?nsZH?;)Gu3{UyK>E%&>XBf<;_F>-`yNVthjmSW#vzsmxS zUve}rAuI#&kY)vFPBl8xq($lmREGm=c`C4$rZl7S97`13B)#(+eB@pUvAQ}O*h`1< zfxS#;X`DVfONS;I24`uUzVXQ{KA+@V7I#)}mXekdyP)%BdI(V@UGAZcpCghwk7xtN zR596`&}R+U33E4$ruKm3+1KJ8H1}9LY2u7s3p|JEzVI)NXPnLy;rYw;zsOoDp;$v3 znq$2UH0OX~YNELYIulgq!MeYJ&OitAGd@6P_OoMHSTM8lu%9|aXP}c12Bb!c24*9UrQptA^28_^j*5R||f53ml#CT9Hk38EYx%svfN1_bqblm`T*eokWJ zBaJj+{6mNbicw!iu1=j-(!j6F3eycC9ndULQ5q?63WSu0dwY%L|ABNVNq3~4&hL_P zn`!Y}Z(;efNZ%ppOFF%ukopC6>UxMq8|mMT^l3@|@i0A5^ZGnQ*NAP+_Z0GVO1@u3 zOrs88dPJw?o0i9r4##BSJ=faOb)lA)v_77h9((+OhQl)xU}^1ifsBEjvk*8DO(s+a z08VkP3{cvjKgjb%f!YRfLN#`r0Jv|%k$yN2p^mD+fz+SCtpKpsQBya1p^mgPN{csA zYLr&b*N)P9TJmX>_I2wM)YMuZS^`w~b?ehM(=GMYQCbI2TET9UD9%9uvgf;kFzD7k zLB~nN3`i)*g|Ifx?5=9(#W5isyGbCTCw1Ri)nS~^lEkSe!y54ov&<2Hr1`{hu}cRZ zumTdr7Jk?!`68zv-{G5)?>C6s)JF)AQ=raBrOwH3*P2pHgLXQWc}`2%&@MExXR z;6rqzmJib>Y*7Qh@2l6MRvCFK{H(qov7>z&IlrDQZpX{&2bjBwmfMlBlHKuw(?ztm z5jx&{8oXT347hwOqAlPaGV(t{g zOrCQCRJwHlsKugi#P#aH|k#M{saJ$Pj*9k&KLN4*NHORS%|vIgQ3;3vj4b1cE0HU$|n7eF+eAJ}!dzn#0A)LUY!!6eLO=2)A zTXNM;GwsjN`81uMr6a`MbFBDB`1qrAevHni;9z&~Y}T5*teUg%I^%wb&cC4Z!*trE z82nmHcqYI}tlUr*)(2Szx7KchB&-kHU$;UQw%bzw8u>wD&awe?z@rF6YdGh{ih7Fh zr2gxLr?J!QV<@0?!a+8PA@Hy#Ol3G2{KzH58lVn==i_v7rj~sE5>L<(;T$;&6EEtX zK`KlWdBP4={PR>V8-xvX$O>9t2xP?Z&1Bt%! z!4L5FvqvHCi^|agqiMY+ZbPi*V}+&qS9}z<3m>j^v;^?G81^nWK^M=gd<{1GKP*Qn z1l_41o75E#myqij<45uL(^_0$YYdovp*xPwAJ z^(KU6JQGo3lmcSQwfVY|S0q{CK z4p8{2HP<7jUuvQQaFiSe-gE$3uw=_yhA|km`a59%VoTaYOWH67qdy1;tV^~X5CAM0 zdxN_$1Nw0e!7$Dt7y{SfgewTKgg!B4OfVQ*|S@!#5gfHd>!PL>2_0l0nkGzzjdZ2GNHPB1J%$ z91?eYbV$Qj4a*D5*tyAs@1^qrqz?LL3f|f9i_x#i<52sh@sl{5gx~$PaV}0^Efm%B zOxTT2hAy0G?)=1ueGzbc8lMcVe=M`YW7F?v(Vw9s#J->tVRu9u)P<(ya3+#dkbPu~|9T2WQI^9W>B95twHJ^Q^Y%ypydo&q>{Z`I_(`mZfDO z`vY@XJ%;8IGxymP%t0q9<_DZlHwNHLkwvWkh-6~8PT+E{!GZdY2n5Sn z$Q_M10ANCLhx-;G;gxBxQ`E_sdJ0iZ)=Y}hoaitKGEn~72arJ#8@`&2Fu2Dk1R-!0 zMRS-MVjP4+_jAEG22Qp?Nu!5C);I&-SAOn0f9HFD;&n&LEO|0%zUj>BIh1@ME*ME`ukV9apEBr=8SQzl*Rha8Ly2;%q;D z%vnO)Sfp+=CyqrC34pfkmQ*_j%}a0e?)&Ae|WQCb6>4m?><(?I5lr!*vR-On3d%<0jRSHQNEp9W(D`j5~N5Hn7k zq|QM7)r-%epW%nk5sNbt@JaLto>ER9pk^nSIbg34wniP1OW~wrV1K~VE@$9^eBbWk z8Tl0bpr$C>`ZKAmhW-WyU)j4viWD0cM0 zmT(GVT79- zyp8L)aUF*z9k@K6O^H22j&nbo1mGJN@4DF@Bqj+Hi1#jz(W6@aMwB*lxBUL-Xq|&^)<-s8OVb3=h=A%rhH+fgogB{RIGo)u zOyIW!wX&$e!>g|%5@)FKcmr3s_H1lE?_R*M1^RwDh%Dixt{}23?vk93woKQ?sAX*o z+xqd0=@MGz*dEFY1;YduUN-g4(qv3zvAqohg02RY98M-DyVx9 zsJBO~8?3{Kuz{#t&I3cfBB&U)mKqy7FjbkV92&o`t(D+@no#(xw32Xv{4C}T^+ITw z2RoKId^HgAP~n7WX~R=3_z_^h+08f!i`=^f83VW#oVgqae}Nqx7jX(OP*HRu8|Bwd zy_NWZy?YPfW4U?<5@;#(GCnNKw~>>b-oVEtI$uK=3|7FY%DH{7z#~4I(2uAf!7l{L z*==W-vAqX^URHz?A##n+^QPDz8W-`ar#npb_be@eJ@~?4pK7tS*^UZcB*xY&RtQBp z_z1~o*#;7g=TB*|3_h17P5Ko^kPp?4JPJRr=~7K2ez_hGsq){zkJ}n=r}W{4Uu1M2 z`$N_YJ4O8m_Cb<|^LdpDPQ0ReIXf_qL*MX4qYA$ngoC8U77(Y;)r+&MO2tX(Ya9zA zUx`5y`818DBBS~s3t4nWS-gGb^yB&*sW=~d@U4xh*)dK*cZ|n&Dt4y~84kiNPF=vs zioe4Ul115$g=Nk(-8yc}$R@ccpJeWAuM~9&2RByAxdr7`TnjD4oaYOwc(-=Qy2L!wFI+@g}s} z2Ul>Cp`8BmNpAUEqkN}KK188E7D4kvtrm61wF4h*IMSN zObyDaan>eeGmE9dLtKI`-=wbZ<(%hejSDNyH%W$XD6bGZi6Q~ tY}W2be@}PUYrF36I^FXo<4wkG;%-ayruq{PBp$6FS`SO2B->okB5gTi36t45cW3YH zym;;%?(XCcP;_NjDL`8V+5%}1pm2f2{Z=3!(k4KX4|%o;lD4To+yFrkCve8~hdvC{ zRe`#{-#Ih04@m}GptHQ^oO@pPod5Zs|C`q*C$s z%aw8}pDazOe7ZEP@_Z?e+-}S?XG^ncjMJEF7D|QYvC^^Td}+S9P+Dj%mKK{!r6tv# zXdG{zD4j43Z@PX`Bt=T3Mdq=M_bK(B6%+L*d{g8eTd2#`Pl?ZqDKYJv-!$<)QGZex zdnk!7iWxEczFBg`T`?yLD4iB-VqPqubVe+SrT2}}S@D)QAx@&^oOnW2&b>;wiF~RSbVI*Uu6beL_q<{h9Bsd3NO+-5fj0Ttwp(C;)nx4q#4<6Pj8FWBR<+wF)-2x8!dZM=i%O*Jc99^X~cp*%PL}U;s|8Q6b>S+Zf)Taot z4NJF0rbsVgCPhawYw{w#)=Mn&$^yOQ1-{PS0daTN+M#==-EE0m?s-jMdnk zZ3*%X)B?~G>I+AaE33iEHbtItn1>hLv1Wl{rk9?J!%PsONy|__Aq)ll3W)i|okO zN7r{+S=O<4-@&Ytkt3#n7_pdcSt75$+)9VjVunbd4tRDgwjWg*aay#uTj14V44V;^ zzYnkk z+s)1kZ*yjjN*cY>`rL5QD&ULDw;Szhr4hVh8CXC4WbHARL~H};ILfQUbY zaEnV19%#l%ZwC_uSl$=VQ+|=j;hXqT<~F(VPDj3=wyO8tb3biCqt3n6?3kpys+aAy za3eN&BQB$eIq_+)rExtVvLD(55l?;bBnk?+h0!hoN2A+e0I#86Y=bcUl-3 z2(YuS4iA=uUsf~)?=t{3QI~p*C#Bwno+u*>G*AY@i;I}~aTU8GtB)}8fl}1C+ z&Nz#D83x-vxxCjBs-&^XX;qrI@k$hp?al6bTo|pU+?5T;sAb-GO+;?+Lz&lhs$Pdw zgMx(ctD$@Y^_nP~8?h;mpg}x&=)ei%G7CeT1pNY4+>D*7=QnVYM9Da%m`VHSqT~x`T*sqM)3DV0 z;2tW*UnLfeNmvIj`1?8-5nhNzG@Yg(>`7}xY$AlbEWQ;FNfjHD11 zCm(pN0DC*jmx*e-RfRCt!V@PUv+Z^~`664=>wc?>%LAMUx=pAKvfV7Vy3NimW!Xk$ z-D?DpVykhg3Ry4kf;hG9`L)gPV-m)K?o6~$UCYCMHGC%E$rx77OygNJr;;d9$bE>~ zDvyspisA$+--DNsgvJM??{)~L`win8)B^#i_tPPOvNxpleFrq#5%vcFbL13^dP1PB zpNJ^+I%|jSK0h$W$ybr6yMKvh=I;0Y@>7&$SZHtMbeiZo?spz9HA{9^XgUxS3YZ`&=lc=#i;H0|M`d||kDJ#2i z0-|QkQ+!~vf}`ES2a3u4@0TVI9Z77qN;zGl&OV4I4}DuL_t5&4wsPQv4|VTxJ6e=X zJsatAe4JnBbTgSY1uJi&L6@D$0>$+fk;LBPr&a$=H}1(ByEfi?tjjP?WC4#aWV z4QMlK%h?Ojeli8Vs&8A@v?m1ROlejX&2evjMx zgdg(a(*B$C3H{OIKz}vkcz(q;7Wd~y>w&&%wsBwer2~D@ioRqT2~EJ{F1D#{GrYPW z6a>6t8dx-==C?31$RRPTQ-Ja{^D_SA zAL0FzVf{B~`G1EsA$?$vfX@MVd;+YG^gj%11>G_H6y(`|;nU##Df1ub|1-l}{yxB= z8Ok4^Gz7T(L)IP^onwG}dlMkv+5IK79s)VRF8>&{RiZS1BvVwl#^??Cf>O%61U+Sz z2uyX!0NMh1)f-aCP8JyJff};bMaU$o-+0u1?S|D)v>5Cu zFz}TG``iSlCt?n-izS=`?HGHc9fqv0kH$MssIipSp?EqBn`E#&M#z`~@V? zjBLnB5QG|jr(A8fr3i}2QWlngm2yCy87FnEs=KD^N*M_oM7!Ln2-w9c7;mF^^gMC_ zv?gyj>A%Fs0+M#aN}FfQRVDMdrVA}NYsx=E{aA=3R~b{D5HDj4RQO1pn1bOF4g(t; z(H(`I!!`i%P@BTbPZ_6Z#NlyDVH?qe!$6}F2LYkkDFbqe_4W<~zl<`QDEZNDA&>QDFct?!IyLOanGv|=>(ux7!4 zogveo7=I0OjC%s=s2|p-p4kIm-k)={mVa_4DL3(L^-8XQ5jIV*JgBEe?FRt=AmXRMD~Yf@a@P^X+ot*m8L$l<#*NEMys?&tL!hT`q2ul zXt=`uQp6d4pl16)#Cd)Yah`t@Eed*(9@^-WkorsR}Oz(q~pypqTgfb0d>00WQ9~on|fh z-!T7}D^XyeTBE4*zd%9h+=jOH!`KIXoCXV3g29aDaOiS2OdHhbkZ*t%k}j({U88sy z`qCUS>Wkh-`sy0hS5ZJ1NTAI-TAPPBF&wGnXb1N-v5310sWw@84=QHX#qVt|?8=s_ z4l3~Av1+%jv>$mAt{N2Gi=VGls}(8GL3lewdL0yNTULWV*A6#b*g=DFy^F(vpmZ39w<@4|CjouXu~@DR*t>E)D+D-*I8&2_d8?rwLj*6(26Hz#D&y zYr!pWVS4VuO@LLqaK%bE2O1ui%)Kdithn%D>f)*zJ(l39%Ts2#k0)@E;6%q98@C_l zJK~&+hBCfEzEJ80M@l7+DM^DE4JYwx-^!Dw%eYyp4)_f@ zK;gHgcUZI!?CVFEzKB4Q?adV?pS<$q_9j7=Cm3m@Fs!`xa{UWwSpUO@0Um^tjFDe2S!C zFK`3F1oapK1lBRvFJRSa3QpMR4A^=>*bqc=iog|yE6H&zMTrgQJJ6bDT6aD^cRdS( zS5{0vwlMFp@w~^P!v4tUSbahiXtUmeh=_ed6qDbp1)DLGV@(ontS2Z92-|rjQ7{R3h@kmLoUr84ow!K^xe~vEDcDd6FbOfU~uA2{>?}L4i(`+zwpz673f)_T(Dth@b>g@_I)ch!L>1%P7&Uwpi z?L2#wom?gz-$2?3G@DR{er>Ly+O(_`T18X`1`Lb-W!!I?ZSXty(gPVGU%La^nyv)p zunX@x%Bd~S>j=L|y-s+K{Hmu%bbjrnqNQhq$w3k7|Bdk~ET?#kVGUG`9fp1=>Jum6 zV@Fs>#)FRlK{XJFXl|fj)?$mIzp)Kp16;+7Lee6g^T_wM<0KpqbV>Xlj<74O-PmqJ z9-B}eCc5=0MzxxOBCE00Y-&69$x;@8r#FcmvV=|<&JLxKw!6b(0;{QE0h~HrskPcc z=vS5PJ+^kLr6iR9^7bb3$`11sbDT*j%dH`TrRj&z_B{bNVx<+9CRAhs0$AYxh)u*< z4C5`V9belX|2v^1`8Bf+uM*ZIAE~#h{142NLA8w~%KyTC9VSQE${Zk8<|=}Hyjmos zzK706YSzrzY12`EX^41>=DeM=v!-QVwioTBdBJ?vI%A$gX*rdJU}q)IKroy&^APsV z!0nzhZ=rS;0$$!sS`x8-#z)gbkIdC|MCBj|NM6L3#_Wr1eQYV>&gdC|DHL&DWKzmQ z1y%-ok$oF%4Y`AS0(lbm35HUNg`JzA<#dz7U<%Kc*?EJsU8LyA3gMC`+mq{Xy_@p} z+~Zle(T5=;${2z89J|`-0!wqvN(X^jKqMY|rVX!Rlkpv;d?>YyQy$(pbu;3G*J)Qb z6;l6$_m^MpoqKCELIss{IuPLK+kzkUGJLM*msd9Key%s^29-x>sh}pftl*(hKdrGH z0e7(XD13F;b3b4rPQksZB2JzIsNf4KBU6q}?OU{(>qk#~n$azt@^~vXo;H0120|s$ z{B!aU*yUNCRep1~Q}!Owt1p&!E6qlAWYMFEbq&cBMj0s;l0TwrJpPHPY4d05~C zd{_tSVaQhX;FGAwi9&T_za+vZ9PpDwlpLK!WXlf`w}O-Dr@};(f)bsqr|M~uL})~$ z${Ch%f+Bj5S~F|{&op|&`STca ziAi`tGf@V$`EVve1lI)C3~s`jS&@t~py#$}?5u`!qn*BLymtm%6{exEfc8|B6;lgJ z;9@{YG;!Pb(0pjTV}!(1m|4E{pq~bu&=bPhl;)_A3!tzZh6-0M@~Fcq=8Y(c9n9!R zvhY}a9y6!uqGJiO&TrIuC$jME znvwI~e0G$7V&Y0z(u% z$6ZRa1jru>4p>3xjN?cPgg-8bk5?o=ck{h;J7SAdX7@U^#x-0ZCTx(c3TC4(ty z*|Y9l#1;5eyfL7Sj3xu3j&M_NH|Ra7qhh&|{Y7$7!)+87bVx!%F{aqzpg5@&5*3gj zDMjyJGf7{OJPj>^MQfa_vpa2fp#i&}afWZA)WKW9#XJfBp0`xJEfQy|&}v>r#0 zL(-JbW7PQ4lT3_7@*krGh^hSI@KVi2c8%hk-3R*Z#PE*e(m42QmOnEW&C zTM)FNQ_C|^0>K9`&4;*{BMn6$SdyY1u0j&RI|w|)Y><0hi>N3#!)Xr%5kTo^4+vc! z8zT}0gbG8HJkkPYk%>S&>RAY2N4}3XTvJ>;^<@ZaN7_+iM_fgYWMJHyQbIW9>!)f) zKfUv9zyLc1Y4~S=0PMzkR5%IZ0R0r?j3orzF+^0AG3T&W!9edCBxR~T9f3GOu+K$| zxL4ZTC>4`m2m8Wm@>nk{9T16g5G-dRrn4doV#ZMu2;InmoXkZzM#_U6U4W=*hK2gE z2*Ii#CW-n28-p5RF=)U+cEJ zfev3tDN*q{x>vxcTS6cIdP3)`0 zsCWVQZ?M8fgh_I<>LMqr2&zKhtmQZNd#xV5z@y^SGpG zd(pfB2j@x1V+eGFT? zIHk*obiNQ;41Lj&r5_;3`vX%2dJ{eDVJHmbu=f!Sq(UIU!SMz}QE#~LvY{`#0Otjl zPWF?dS)$}G8^2-?O|z-B*t}H5I@OO3DcS%iT?7}9s2G`3+F&8ZYrrbO*J!>N#>H4Y z3t?-Ci|>l3iwMTGB%EnFNE}h{h-e13eF3w{FXBTelM*7@$m?jRKglM>1^hN)tk=*x zPp3gij>n#igLS(pjB6-n9}=9t9Zn6drN*9GcYJlB7{ms z=t4W7n62~%^~oV@R8L8;#8ZdC@EcqNR6bx~8d#Wy+*n{-oGHJC7Q_k9az1sR%~6I1 zww%`)V>j^Tscfy7dT@t7`JecnkpByr{0PY%D3F$fbHq?GG0Nxm%MWzjO}*JAj-jny zGwtgvoo4bjM=+d3GQ=PMI|b+1@YglOUS?GmVCXd3A!;F`Fy>4)YQkWuizfK&*S ziZoFn5SmQV{j1!f%H*!pTuED;QqPp!ueiRV9O_69V4|Yy3MtaJLHoK5?*&CmzKZcm zdC)4vFwFr$hJTPTT#zQJ7I`~sXOsL(r?ctwM7oepWakQ*bPliCY#z0@(@RL-%1&jU HOrQO~dwa!m literal 0 HcmV?d00001 diff --git a/utils/__pycache__/general.cpython-37.pyc b/utils/__pycache__/general.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7f6ae080e850bda4d709cac99e8ee8433e8d78c0 GIT binary patch literal 13998 zcmcJ0YiwLse&2n~od<{HkR0(L(egF*utt_Bk&?YyEn~}GS+>_J`XTLlGm$YF&bb_3 za$Y+3jz~`K07d9UoB+zYKwBUU0+3sv-F_?34}AeiJ|x?;O_2sk_ropF22F#_Z1zK& z1W00_O@F_0XNC{k0aBnt-E+=8uY1n_{LlZb*G5M(8h+1Z|JzUh&et^U5BW0qB~iG8 z$Nx{drU^~xHLV!udQn&JM$u5YSu|B{6)lw~iV2n5MO)>`Vp8R)VhXuYOV=~Sj2_Q5 zQXElj*72HiZk`u;%t4c zIH%eZwbS)8#WT9*j&IHjTO>tFq@S92A5-rcF|zrLr;F@U19jQWv*K%FOpJT_FY0(7 z*?(4O2PlcR#e|srNH03#>tageQF>0?7t>+}r3Eo7=04Jj=ftWwBj!CMhPWu6N8M#{NfglQ1#wxtfc%QMBCaC8 zD&7@~;u?A^itAztHP^&@;zjWiYOaefiCd^y5?>a#kuQt)#ZQQr(Q-pP5U+~YP`W7| zio4=bC>0R;s#z?{(aQ z9VOSjpyB#{l>vV+=k>&5F)=J4&X(>mtOc?~Zp1x;DmDEY1{eD4krs+Yj$ zBdgYIR10R5u6Yf2+Y`Y?lx|74;`v^)5hYu)x$f0m%w21igVL7Q5S`S|++JE*e6iXs z7;=5kwZ3-y1<&@2~Nz;A`r~+`{u!JpKZbK-<)X9s(t|Y?;TK z(Xf!3!X#=44PdfXJ0lk^)d7TZ)h+n=<|z!6oT^|%cKqomQFEWTwSpBTYt3raYgD6= zwX*M3zS?Z8d)3H#YxUkck@dl=?<;uAa%1bCYVsM(@#*=yw;$bgpFFC&4M6D8ZnK7z z$KXGzx(!#BYm2SjXvBAeQh(~_FwD1+Xu72vx_lO;V_P&)ab%18*dXyaFgCSe*$lNk z;yAacuVUdg7A2Z>zz!!A8I6`CVJPU5pHoYXle2h@GU`OtSw4^V{%SIMR+s14t}hN3 zWZ-=W^DB4(e6>*fWss9!F@!-#clA)0mja|%qPb}a>#6ZT6A3KMUhQPBUA}y+R`!F- zi>=@x&?yb{XiMLF;^M{Ja(5%Gkcg~mt+^JN_3}<+H2uhG0jHxBP`MI#b+=#?Grn6U z1`1CW)10qV^ZelC&c-mJR4WHA+GV?OmAm8eH6yP}zE$|bA2EQZpdkG%6aql^0NVm8 zJ>*9AEnyt!{=*=#X$w zKABZuTH@L$~^~3h4{sviH8(L z9Y>63p+=nA)Ax+75gGuv2`n>s-$Ok#*3D@J;t;r2FjhNn@^-$p`o?YN>Q%2^y-FJ5 zT&OzrwjTfz$ZLVus<}>c-2s+4z#UJp#}%jD66FAw-U*s`2Q?e3m$zJC+7pnmHIOe8 z8$nAGK~sQLVBwvHB(wtZ|G z7?=lv)3*#c8{)PHCdeD=XTVc=(YktI5lH^aK|&-#jX?R?ex0zl48MqaU~34T6}N@D zNX=m;MLW`~k`$nmSmZSYO34d+oxBIC?W{Hf=U%hj5VxHRevcQXvvhv|Mq-N(ND@Jx zzu-TA#qnF9`|G=oyHl+%h}55Y=|8e-M|#0xGdfT=DkxW<8QXk8uH zrOO}3xF&y0uzmz$3u&1hE9L*d zjK-dhJR#Dd0=}+=#=b2w2fF-ql#*hEr69$Ao9V6@WHv|kv%q!p0Q@T$-5(R#&TOc*796_{)t{FfJyD1nPh%S0nRDxfZ2FbGrd1EQYWd zQ1%A^i$9B`3g~>8!yd7dJ)Okx?;4M-2QgBEadJSEl&;^bfp!~_v3NbwcTOD8rOdij0M%u%-1c`iOTT(mNH;o?fISt-~2mr)q>KDIED1E0?{5-e+sOlDs@(7p-6 z{TNbf%=h%jc6TrV`TE=FDZkF-_)YvYy+TgB(~>W%t;YNAIA64&LFdkLW=K-*#>;jZ zxDo5T5m!*eocJ{OzQT2%$bM`Oo19%nvA`^t^Z`h5zk4_Kq z3;;WMw|}rCU>AI6ad9!SHiDqlu@*aCt8=E^K;~S1%trE<#cO0JvX1x8>|kT%cF-JP z2pTf#)<#LcT_GnZCfCbetu5WoeWNx(AqWoYrrM9d=_$=|p$3)}?Ka_rbr{cCq zH7Gm?uM)^NQ6Cd!eLd3U2{edCj~zH+?ekzXV;XYLSx{u=SrCzoKBni5jDFgXWwZ}j z)ZoG>x-*FuN(`i|XK*<~J>SPk&@WKMjmWCFUJWNnl#G&!nKVx>O8x{I*YK#*)C~3B zdlwbMuM&&+sIRtAAUA-7p>3ro^75L3D;WEb2tnA4F5B_`*)B8+Q+2tW&o!9bK1{Q!2_T^N(BJQo=#d%nhTn33G-usIkfWH zenJ31-9$)9*IGSx_xXV-PQHvp-TliXteg7Bz!#q0krOhgvK;6itCNW-LctusTv8Y) zSPHgs6J_Z!Awc_7BGNsS>a1-rQPhuOz9JP}6b7W?g+s7;b*MuFuzemijt|&Yw>m!9 zL|Mx6Zj^v{S#=d3*eK&@xA1|2efa(2=&>V-^hPlo*Qm4iqtRpER?9uMex;!tIpJg7 zJKRnzN~UmWrwzG-nhGBEqm07aqb|$$P~a(&qmygLh6DlgfyjzcnhZpaCIT7<#3>pE zXcQ2qX%L_>fQA4X0mL~P0I2X&*{7mUC7Jugz{6PaHVhK?VR(2)yjy*j7Z&*Q-l~Flk3^96-9lb4u$i3vsP z-j|tBrtT^2KrMshu}56*y~5-UlUJGWfP43tyvgJ(CSPOnHWQNE9vNkiWWM(f66vz^ z9+UT(kcIZX!Q`7vK49`KCO^r9WG%)_{qsbMXu8s@G;dv31PG-sFU53d55r>Yv{&?7 z`gL9Y25OEGh9N1B&;m&dI?kp6ITJ$uzImWQdb9Q+#)GbawHzfA!#9Iu9pb-n2BhS$ z$I2n$hrBp<_-1`hfAl!gU(Gn2U$KqZ!@1FVq_3K7*jIh&NME#~FPTOnCSY=xkR-E^ zyt*J11iWG!lGHj%RA51JV+vzcn&1U}j;ZgSWZ>zY-JRX#o!#w??_i{#MWPvJ0p;uZ z75vHH!u#jL`oE*){~gwZ^pQORK1bm3Ij}y_|2V7_bcgU$kZ1pyFM{_M%zvc+-x}uf zHvx{Ap?n{uKEUO_WvyZ!LkMBy&*lLLA|$*){wQ%LMBoDhNBK^*NtwXk*DkeK8gpUp%Eib z99{=uRsq_ChOZ>p=N33U5wm|?3}GE<$Ji6?FkXFoFy4AbjitN}H4SQ+LT|f|-U_o7 zjuXA*&me(jWI|4YAXM{OrAo6Yg(1J6%{2kN}g-8Q0!=nlCGQ>cIkJO1N z7|v=)=($Ct5_kQ^Ie{k`SM;0euu8>9YcMcEB3MK!miGAyv(D*|` zKEy7ur#|8V-Bj6{)7Gs=RyWKppk{8r3s%=Xder6 zsNt@%aSb?Y0;@cO84bA-&V*;e(>&7$ASI~zXCe_Bgr`8du%;xQ6!%IcP?fQJmO0yj z0ocu>HhrLd=QM8CJWO|3NjQV=&Cg(l?lT{N%fF|+uYC)(@wdYH@EJl*ebdHX3_vs= zudaKxzc23R*>Ea6)vtHrovM94H<$@uIU7E!FwzO1>+gcl`ThToyZGC$RqYpR?L+#r zYi`Vkv*8?Yio0IG?iRvn^k0Z~1pC}f-<#dR_`~8M&*Vo-9+hT(XN!r5-ybE^b|@tHdn%g{{}Gd zA0g2U+qCra`bFciuKvt<-7%+O9+?H7ISZYde|Z>8PU|)_Yg=Cizd4Hb#GF(z&(R+P|svaT+XC2?jHo!=cNWFl|tyL%t4LNV=@* z;u^)n(3j?rL0|Mf(KoJ9eH8_SfdtyT6>IYlC;B6m9BtvgCT4M0A=TRD520da9Q@vQ zgSKoq>Y#iVUaDr}YV(OJ;hsU!x%9PirBapx9fZ46pw~gMwna7gwPvv4zz*sU>m3{p zEaxzx2d#XC7N39w<<4|ClAuyC6$DQ9)(J`MiT+i_sz2_U2>rwLkGWe;vG z-yMF7Yr!pWV0v!DO@LK9@UTia2Wl>t%)KdiEIII7#>HhddMLqDm#1j?0FUn=!HEtz zHf}%8cfdJkHD!E*e4#`!j#TPKtauRP>1xFHK8yW8eqIX;W0aXhGdgqV$QyBJ7GVW@ z9ho1R%p}U!cv; zAkjusRz}a58@goO@a900Ih5z91_K)S!_Cl$v#=>Z)I|=HQ*5FzGZcjBJs%Rrn+ficQYc{ z9T6#rN;cg8R(BMa9=6c#7;T{-By$?+*57igdhE*o?EG z1-Oig+*4!AfI|w-r=zffST_x!>t7#(SbsD{Gx-CdHz3C01ZIZw51|y!0Qm-A9pU~U zD9?81@ZIsv-2MdFKHOa6qs;QlIIq+D=yL$UTS0V+z3{d1Ft5qPOY!|7Dwrvx1Gk$ODEaM71Hr_q%}Wg6UxvZQ_T_F%!0AlS7{Yt^)wh4y(_rmG~M8L z{?0=gB41U#?t#dr%6=(m!_AIza?5pF!mF!@8)|vNed1Nzcx>yR+$o%5`~vl1i!mOG z#^~AzL){D=|Wk0H;os ztBs}~cok)EkBps4(T2uf+TK83-eI0(j)N#>xHW{T)V%;IzANB6EH{GUhzdtQqzarK zk&fVsevpN+>u?d*@)A7C==Gq#nmm#;1Cy?8?M6ixh7Hr*uZIr%oYcsKLt8O_ov4R=xHLDMIoAv7Yz zKPv|TT`usLavQs?lKX^Se6hG&uGcC93m#3XLvzVLMqdE|f*do?P#hf80m{>`xC!{I zj?}|&t?JgR2li%iKP3=XB|}vP zcRv8n5J^zWPXc>CgJVrH00PH80yj>2&-l)4Hx;H3Dq)8S^2r(1dZQZ!!!aYa=}EH-qS}g2Hb-+lfn+upx>sh?JNgVgPq>hK3o77g()b=qdggB z#Mq1yu=W6raAZaMM1QP(fK>rW$FZu1-4x)2o)E^yhH?NDl;cq0%0&)!SjDs!+StKF zyo2D><}_wb(JA#5t2x{ejVAKD3L1G_1eo7qh$yr^wc4>REM50IHqyl#)i}Rd?VQQL zmydt1$ES^`F%=KWP=!oqvg+TycI|?HEyjh#U?=Eg4$F!cboxQ1PNju0&SIpOB^^47 zAK)4#8UmCL1q-apmt8bP`gZ5eIpAcosQxwzGjRw)Lh+x-;-JWm6%ZAG zASpE;UNcGGkaU3Ml8&j!YPJxKl-QOY*1NzO8>-ynHhi7Ds(??yM#gm4G>waDz17)69diof2_p_kad9p$LTzRD?L$gZp&FljGb|QGganT_@>mU9$E`~+lH%$C# zxd6dBfmo-7FtMM25S|EdF$WrnKuBa?B(@=}gSJDg2BnW{_mlLZQ^W@mK8-OR>chH!SG9W`c%RcvPWaPfw%NhNe+zHYLr zbyGXv1q`rLkcNK%2*6Gpj|xUX9H5VaoUw!eyFKh$l`&_(R;dN?u0c}9_Q%5!swq%m z7&@=Cw?PV30h|jk$zi>)Y(NCg;!Gw&rjud>#Ehe|2-V1foJ@sTl1GrEix4gKAisYq zL@4V1bePzjIbbvwtfA2UMnPP#9;iH{TUZU>R-(pEw z>nE;1kzD%NY`Mngy{jK!=qQAdPp4FI8mbmmtyih>Yts&txizC1h;*QW|crD+?Z1Qb@1LYS`Mp%*CieauWl>q?1N(gfe z2tiyx_>olBCwSR{jYIR!7sMbEutVdMTJ|jC_ay#{0Zb+>#G51Eb$0 zVx?3lE;Is))=F1UpX|d%^^^olGprB(JPp{*{;@jrPe z_Dne(LXBj!4N$idjiPC_U4ICzvMhY{Uq00Tv{3&gK}Ex>aBvm8tbz_zAfSp_gIt3D z!lS~RX{uB7IW NzLOctJexZAe*v=E-WmV^ literal 0 HcmV?d00001 diff --git a/utils/__pycache__/general.cpython-38.pyc b/utils/__pycache__/general.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6a3889a09458962cc93d73a5737d2ad181c64b4d GIT binary patch literal 14126 zcmcJ0Ym6LMc3!=@`Z+zX<~&G>Dn7-g$RTG)X|-G~MXo5Wq+O1*;%Fbe(sp|K*7Vd& zzjUjb^GJ2F@n~V?2fYel2XGt%JxPFA{{jTa0|Ao2zkzyKbF(?z!ijd(N5n#>O%lK4)`(_~(A-ElvAlRt|rXsNBQP z|ChR^2~FsAtrX{aNmq5FWT@OMnJTwRmdX>Qgv#xbt@30ksq$1Qh1{s88<|o@kLSvk zvZ^mv%Bg&;lvnw9X#Mi{6nEFUBIpXVLS`<(^D;|j%F^keUF(>9f(n{yWvN$D9qve8lPMkr@Me)3F zkY5sK#X00Jh_}TBaS^)*9W5-*_binuI_816@iJO&h%bm&(6T7LC~hNP67PyHi@WH#DISaa;wvcK5-Z|? zcpase#6$5_jCxtTC*BZmqU8%hf2v_7xl<`rxL6q&)57;;ML1DH}Jh(w`fPn zH7{toz8|Frg>w6#D5Ko4wbFj+MwB_I9SmB-yc1s23(7%DR@ckE>k8ky2ZHJ+@cPKA zx0d)U^TwHjm)-4)x zjB}*dTC!0I0#`=XTB{jE7Qydl(bxYuK2^MJ^kLb;?=}4Vb4UVhLl=4ovfQ*~0X3s( zAvJ|b@(>zOWi9V!FJ5i{5tW)-^zq&kcu;bxq7m8g>!U>7ed5-OR+Ox_YBjG}i?XW~ z->bgXYOZ;;$a-`6;oFh*t^4mPoXbjc^HWVehdF+F;lb_qAGlB6Z@5ih>HVEn9VxfM zf4}B7U0JCww0EMc?*`?;)MxRSpF*PPmTu@%Ka83z75bss{t%xuOg$_mSht}T@qUg;5a#hYLroXt9JDT)O{O?rf2k=E-$cO zUpNAkfwmF$uc87>YoYdQU@pII2!lB9>Y*+#2S@>?xnT+Gsqt773G9Qtyqmjz<;wMX z#SgA5w1bsT^(YNu=}6yu;)2r5O?M~Kiiyao)my8P*{EzsM$3<^HYhwwfv~HA*KmtQ zDdW2p(xdQXDb4xHb&~T1s^SNgpyNB&I?dLXoHcI+C`|b4ZoR&n=mcxmUfDhIomNLWtDp!x z6LqAfih+cnG$5On|eK1{@q0se9f@=d%Trhj8jFX$;<-bG8_K~g1+ z4L>{pY=e_&L}j8pt+*4y!!^v>zm78z3p7P z<~3^9$XT3=HK)<>17HJrJ@DFf*J-Uepf(3IU7$o5`YajK?`;8xO}6s>4M^( zfVr)LshL=`{1Tu;Uka0#Il25Y8ln`KVO%AHICS3fF(!ti`Sbt}d0=5mmk-eL`A2RI zy<~VjEF6?r0Nc|y4LKL$^am!G9@=LiZF%UrdSDSz{@oxU5}`)K{KBA3*qer5LOZB7 zgfxr$LS3ZhF_U7N>1A2O+jbKRJhChD5-U>=!OflJR^U8rb(-R~bJ6ef*z7Jo8X}T3 z;{%r@(-$uKFI;u}HsrzDj^l1us)56!%;#eEYabAt`~oJDSCB+z-EBts`@ZkW0CMDY zDO(aJ1_TDQC)68`lUOE3Ec+C(Btiz9V+zeT4?lak=fVfKSe1ivMdEDj@w9g%^D$k2 z)E#o_YOB${`z~iDKejbP+dUiaAP!vxl5*iry;ZH${ktd($3Fue*@MsD3=#mBr|{`% z``R}(UHcHaEr4^-1*T-X+nC0e%cvd)DSd}xdb=&}s5PZd!Kaw%p&+Y_1@DcC>$4t%kZt3@{Zz}Xp2m6s?S=C{04eg z@Kd6kh)vp zRXRayNGa&Zs1qC|{Z5s#p_E*!c=e8S@wVfrSSU_JCSe85 zrCwK@G|He|2L6^udG9ubD#es#H7gCAex-*-=6Yu}Dh#11cVrz3YMG}$W+*T7Md{bK zt8SYdgerybs)76(+G7^2uSL2%P6=a2cAT{0b5}HD1{%>B@Mq>ZFqDj**9%5QKWRu8 z{YQk>5hZB~z0@PfXHVgvhI(N{mEdULlk1UHb-g+c&^s6#C6$0_9S2R`L+2`fzLJlI zs`vTCk$1wdysiI7grkztg}CLbe*^W~b5K zp^jUxth#j{+HEyTR-ym-t{)}0T(7o{Gk#3w&c=gDAZloNFs_Ez_}m}O$muEk=JdRc z5|!U4XswdH3_ny1DJ>MsxraiaeN3)z1`x)-sQn_Zgh19usQ`%C(tG}0beqWpTC4ev4SgPw;TG$pdRkxp`9{#$=TOGRy!0kh@u(e zUT)y=qNOk_TPVvW6QUJV_BOIe_dYeGWUJ3aVT>07#Xh(uRK)ZoBPn!X41hfy(HbA=oEdsO#h$V4T+@dvrmH=7- z#FxY!+5p6>;vOvk;x*p>dGqI;ANGx}!G`e&wu-mKJGFOsYC%BXSXSunlOgm;mwldo zsqW3Bq&^KmeXhQrX2NTrEU?6DcR$Z$oXG@}NhUlEeNtdwjpH@BPubd^WkMyOKhNY8 z6LQ8r@2&Dxmd-Lc$K*Vd3rsFDxx|FG%l>61)GGT_^ZG9`p;q5tV8RPo{|1vqCOiZE zn@nyod5Hdv1zRl!2OiD-;S>mQmdOFU9W~!}f>y;ad z3!(JI#h4lGVW`jZs@Zp-asHXUf12lhW|{Jz<89CI9?}j`1K*m5>oj<5qpD`XI4vkkcj^QfXq=mR2;^eI~VoafZpKl+@y;119F zQN-~!2;HL5E(3ds)z&t2$F#D@DAnXBWy*g8;Quau12y6_77S|wTUQ?b=vRLK(tpk_ zOx~?hzz#p>QD9w><{y|?yS@QaLTJbptQ6}S5Fp4-m92Sg&3fPJrn{MLwwnW9I7VRy zDaYtyIA*My?~ZpTn!hoc3H>Br@POgmgO*tc6U><%y$y4WMgsF_AGD~EnL}?rm~#lr z;W@{;<0I$}<{ZL%c+Pxxg6eUY4afHll*}+4j_n(BxM$GNqWW^-#Go$|<`4P`T6Yr9 zrLnGDm_eFSP)>EH!|`wo^@%V)YAtk6gj3;kK?@7vi30i6WZc)C2`9rD+*W2o^Ap|w z8=gFsHR} zo++RJ%-xgWSU8R{SCS>GL4Q7+Knquui(4Qy6M*s*W;EoTa5g*@p5&fB21`N9-w}z} z<~$7+2AGogrMOn!C{-D&XPL9@8^GOZw5Ip9AD+bNIt{BJAPHyjzSFaqq5Isow3oDR zXzyy@Mr-`8@O1bbF{j>XV=V?ST8N?Ro*Ar*>v=w$4o?i)op_~coo9zL;Voyv=M_ad z;n~3|h@Id2|G0`jJ*;ZIfOUZBXJKxh4(Gyo&=gmF4y!vC&S3nxcttS$&Bm5e`LA)- zVN2InV(sDgP?c0|sX~Cu$TgOYwg)EL+KMa~N8lN2w#qeG5ykV71rD^R(xp;rdvRxR zdueB>WPSC`)_W!Efl6Of>FZ1rud@=FZrca%S}fyHW;=1Vy%g7&EyaCHaUW`N;|?+y zuHkGmwiY8}9lxz59jMwRwpv5TllTx2TxdgE^Wyt@9_Z$&N)E0OVQY^WLg1W0~ zu?Jy8-Qz=pY#KLu*xYDd#}&|oS%eosYI>W8)SzR6LUDAxZK&54{$ThIQYEl1|8XS)Ta3^JYcQnwbm0?!tH~i zbNQ`GwOWw^1BAO>q$fkkv<3C>S6ack1AD4JYIm?vu*JhDAGm_f4Y)x(OtZas)!D&s zdkOhc5k^dUW#d`y%UXxtP5-u|EE@yfB5nTw<+jd|e3ZS7VM+|z}6%Xz*-yOY+u;4^EFl2Y&cmUK6{J0YC zhPq3Txi*E5MF)P-xVWUAKBAHX*)aMJEU^v>*wvA!#<9m94@KsjMiX8U%7xO!_@uf+ z#J35a0B9H{-udilz+vDqvGB~mj<-9Hfjl7x<`gz?)S>xdD)|Gx=6g&=3`YJTs&J0V zmcW2NVc&mb@_*P2!;4!1S75dz|0~;%9nR$7&Hod9eg=s)ma;N>#$4B>4hWB&AgD11 z3LS6Jz({P4(nz~7NkHF44s%s(?A$kc(7-|G%1alQVVqZOpmQhziq4IAl|}Q=+J1)V zi-<|tT3=-H+0fft>qOfGQOTpN856Mw4GN|9qrdj6{QSdLe{@rQ{`l1@S-|i^%f*Nt z{201wyxAM-svyict!twNnoOu~THT~byl-?<-L$Z~8IkN}MGCr-jerBII|c>-n`$>t zn<|(}df%wwevFwwJdnfV@WFwpJkUNdKKMRnnSfR^879Mtc_Ittc%=W)hpXWv>`WQO za*z%#6vMWxVnRUl7vl>;Vl*1IDUO~e>zWG3KGEfsQJ=~A6l=jc;IoDe>#4qx1RT@d z0w7LNYr-lgA=3-Ogcg!fjIJ<%B*SN^N37w}18ebQ^Wn2|Z)9LEONsnb1M{93&3hs& z96TAG=w-z?t>2r#D%K4>O#Usrcep-Nq+>{7cg}?t@G>SQo*J75++1*J9mW-Ox*2F( zfA0v!22WEjlfNtUCUiJpe|9wg2utBCh;R7TEY}Aud9FK;_l|E&^d>3u;Y2f`XHLg^ z6J|E4mE8!ZMJ5D30n0SBNkjf7^r{&P`>~1LmCIsM<-gg)>29`zDa=IN&h6m3GRHS#zfSfrW*=I&!ss+d z;cesL#2spgph=}4XE!amssp|c{}b>v4z@M5`%CY*lFZ$QBe!|&QDxg}bQ;d%P8)$c zKI3+r%gc`*Ly%DBtT7;=>N@Cxhu6<2&iM=xhxtBgPW~lubu|$q<0l(n&x_ZF5Fv@zkqyMjd}=Ho2vNbpaUm9 z%E?XFZ40lVB7At!6YdkQ>c-Eu|MtD&2}VfpT5K_rL@}Kx0k=J(Owumg3y9W%ZbWkf zb+jgX6zh#lxGCU!W*k$j-C0jOcPp~tyP!8C2?*iGYVJg43#wUGxv3Z_K#i)|fV-Om zj7B3ieV;C6fNi>@I3Poml;I~)H_=vSP)q=t8eHH6CRM35TYlhGmBBtTwyPx@7yt6s zI`Yak^CWX@L@C3iAt0vV1-R+E0?x=vGbm+Mumj>@;5vzPgl`POIE-yCwpjjeyhv{G zZTbeBQh-T@s#cZ%j=AkuTL4l1502|FIVMr2fw3~7@DGS-4(s-5IMb3-dd^Jgminfk z>&@v?X3os$hI!STGj08n{<3jSzkt$0G6UVtu+Kq9oYE(t@127mKd0YD>lAc6uzEwD z!^}S&A2@TauOi3?F-0n9;fUm-R3DilBr0^Mz`Uu&vPe0&;3|f_$h-rIhTKA)KyKqG z!T3qdu>A^5pO7qpwUOxAsJ}r|s7TTg7Qn|JLR83A`117`4gU5F{PKfH6lI9W!qZ-D zcR-N2Mx~7?Fc1?rKHY+Qvca&C(nKI*HK#l{YKnxTgxhXa*A@MK|D%QbyBFRbKA~b& z+HL4?bdA9wdlk;xiwlcukG`@y=J=H-=&7K^zp8LjOvS{Gh&`Bk6uDX)bP#|MCE=n~ zF)XyE!+%sorktg*;nQfW9^deB#>=$JqgB+n*>o&u2)#)0YfV69mps2eomk&#m)$3f z4iF1Fl}5eFtr>o3H62+^dKfRDN>Fwd81RG5+DCZ?1~>uF*P(XUrd2y!YiLJc0&Z*^ zXCoZLf+R_V_HZjgQ+|St#m01#K_X1<>4CkG?4<-ExMZlxkn#JF9wG_8`AJ~+GT7QA z11zxhS@?s}d&Upvx~VXQPz*avP-4!i-h&%;Ft(A$ZyclHHF}DWZre4u^cK$^lH6co^#3^O7>s}!w0;3CZ4(LVfHkOhg3 z1D=&`3K+sDC}jDOE`ZC*QG5_)F@ZL`e?|*!z&;tn9-P>i!K^9zwVncwgSF6VBLBL= zLjlJCHnra@A5J~(BPz9e}C1TN{#imX-} z!A^-S=>g#LY_V~#`>+FNDNicU6D2FMMt~&G3j!{1YSApSoFk7UvUr*h;Q;S=F^AZQ zowo9{Qec{F2uk-geMfMFmP_>8_P9p)xtC*qT?yjC&$-#mWM^D4xz+~^?U5SZ)W zN&hOK(Gb5k2X;JVSUBw$^gNjJX?1)89z zhjY4-i{A_l?N~or%n(p)KcoV}bkjqUGUuo-{#ehDjWUh^~ z3=OH6k7a72fdJY%Y;KJjlN$r|OUU~Gpu|vm0Y9<7$WC9LAyn%0+EoN&Aq3a(8V946 zj-T2Iz)|0c9nIdF=gMEk1pXBO#E2#;Rw;8%zolQ*PeTPmuq0Yufs%HKJvhs!88Pu4 zJaZ%zCW*(XP?4|sRK?>^n7@pG7~VVJG1mRlaoN8R7z~qPbZXE17=hv+>nd2Blwl6S zW}tX|gyU1?va4p8rR1BT@VqKi@&3 zfS+Srpu~SSz>We$)84YD55R`W2Ezc{ePoecldVph|LMR$hDdK5qYM@1^G^sO%_0QR z8aTsg(kdoY#9b_@R?wiLa@CP3CgT;SRA6JdwJFP(g~4?a0az;P7Z(Hy5p{KlU;8}_ ziQ{zgNAd9Y2?6eRAjcF0MG8t|fq`|pbkT!AiYas(kS8|ACfB% z`H}yg^@Id7)ue^w;qpVE;XJEFc45m7$}4f(EAavu-=eJbv9x}P)w4{#%O@CdBGV}0 z-&(MTb#FDs(pzlGVCn68D?lr>6Go}U(O1e~cZVaU)IWqQ)LN|?oT{p#G=tJu+yRA! zN{Ev2l6xgcTXsx8N`eN-66weiPCGwI! z_tKKM%Pp&1H2cG?2W$hnqJY#DTmAQd4a0zK=pRF|r5M(A19pdP#oDc7P;3MC0mR29!>^qz)11D@44rE=X;+|<)I-lq9{M0%I;4bl`Hu9-%}NZDNL;? zMVYHbRn)bjCUU)~i`*z0B99egA~%bs$m7Mh$P>i`a;=uETSZHibES$Y(U&f!MLtx_ zhEb?qIi(Idb)N{q0c*dxW*7L=D{Xp?ReXKZEA1{vACyEpG$>OBwkJS#=4;2ro ziZi@&n3*ij5-fRNM?E9z7E7%>=Bg}xUqf4ZbVgbc{`~$$LuiIGbmO*kQDsV2`mQXgSFqXEyRv>?k{i z{0a6dJHbw3#A$Zi$!rza@ojZUW2e{?XgkABvjRpv$Re>))OH3fVcx9kxj`vt@XB(@cO2%sSAbOA7+xP4wT4$M=uxuf zdd`~5g5@aLp2~@RrceW>uqFvq($pTBsEHx?U}R&(y$+}!T+bmi{!+HyIVcKzu9 z`K`v{ic<+n%n6VIMx}a#wQ9~SIywJV)$tr&uFW(zqLl9hrQS-8;w^pxiJ}^+rsjUa zA4mBSxaery1(z*Ag$NUY0dyQ~qf9+;{IfKumqr*@%=vEHbLK z#$u$`%j=QW@FSxMf`}5J1Aw^h6ttq{J7q#&=JH~a<}1})KiI#!44x>}%7KG^-twlg zV4s$+XnB>NMP8vw|3m0Q`xg-Z3sW&d#t# zW!2%1cenkNyJUaPo@Vw;)eY?1AGU*K$Fr%t1Q;yar%s8AAC!ZZZ%?YNUh z6QCl~^(CqMtwoT1h1gEIvg}k=OO000Yz2|YoJxZ^QLM>bqVgrojU}l5{&UF6$FTmA zT=IFmK;pkPq2|?u$}gj(Ya^);j`cr80K}HWkw|kkZ-;kLQsF8gGZu$90Hn%uA>G9 z0m{Gp3dk>12r!@TwJ~#5^NVN))`k#%vM*FwViGe69-Ex!1-z{tn<0{2;HRjPyAI}W z&o=`5dZXpBi}p#sOJviYz0n6G;fxPh5}H17(tl#g_L~qNOB=ScUakZ-kup6expiy@ z;rNFz5kG?@(rb2+e zK%M}fAX5mNZB6;*sUnZDWGKL`DxtPzGHYAqzlTzsrKl7nXxvJ&qX*VXYAX#4 z*S8_|f}yPpONV->(_w+(7@kFKA=ce7FzZlgunbTkV#A)svhvMdA{b^PbnN0tbMujY zw_J-7tg+@nezQIB>GSrR0F6I^qzVY#&SIa~&8AAM@aMHV#!ZRNkh%0g6z7iLsNHoQ zYLPZ`E>hPYIjBp-nJbNY^YZI>;GgkR+DGN-fxya;iZhpLjY_%ZUq+!n_K~F#EAZ(z zh6D@Ckf7O8wv|sQs&WUaFBWE_3k=9~)-jFGZ=kw=Imt^TlGmI3GVPOp0!iEcpw;w; zw&$z?AGs#iY;|4k`L8DRJZ&ir<7voE1x7OlLJJ)|;0EC%BHqs$&`E^>f1rF(f(|Z*M z3MC*D>E&8Y@XE+SI}iLdmn6F5F;SAZWO(H|=v^qLk-pqojPm`}lv=z7jkQGdE?LIY z^rGYo>lLR-Y8NRD%&i0*#*-qMu(}kfeE<0m?K$gt3>N31Xk*ZMj(`JGo(2oC)Qps~8)_g$QZvUmCxH z&PDuunqJXFy-UZu=iP)x^b_xHqCheL`i0UG+A@(TWP!E?y%5Yz>rg+wUx0{Qf*!JE zK$D4qX=unzuosnSkQLCbiMfz8xdizU>W7q$XxWNyE03X!9VWnv$WSrn1ztRa@>X{} zul5dH6*pG_{SU!>>|LV$t;~pdes}->So; z;Ej68Yt@?@q{Y_Ci%!jl{#uFR6=-Jh& z_kwpDOn(cY4voCB6=MKWCl->vYs^2w1n4EB^ny7gf(e`<2D_rZ1?-_y+;vhKSv}k8 zTjGQyYbfXv_Iw487YqSm-awgqln_{fWrWp{>ORysr&ga50Wnhf1>@*YLGZpOcR=UX zVNLZx_kKKC9}L{EE4fOf|;{?llw;5Yb?azfOMosvy@z|UqjB}z!g>Jr24evFcjQ$j+wOBmQCX2)GB5vFxtr-UR{_mh--ijq%L@)=4# zOGy!lIDA;E;^}>KK2~W|8qb^)bdJ)qvy$95@tCyh%!JPW0JTTxZon}k^g^M55wW5{ zrGo~&rEe=xl#DIt$RJ(t@!eE7sA#M2$IB;`I|@{nw)?vxHGSz6vZm=^Ei@- zjY^}r0cJrEkJOF*%r&{bvA!|4zOlCaWegBRq-aOfbNEi-%l{ay9}MrmLGS+^-URJk z`vi=3!Q_45y>I+pcnb&*U?`wZmgZ0^yU-FLNt?rzeS)VaUVh!O)5Y|Af%`vJ*2=UHg>m= zM4Es&7vly`?X)gwotT#=u@BLMAD)H@i9EK0;!LjsR51l_iP4y6ARr0ZdL*DRV^=?( z-PaG(^9%jwjmN~Zr0c@UfmI_=*6g9Iz*d2`Z(_szXOX}L)S+TPhp4&DQl-)0%rBTl z3+D4|$tO`0nX*;1os(_FB!}VNC^gFrj-)c4*HPSmCTSlghtr$%SMV~Q*tMc1)MM(L zP#0{~h9POG{Lj(8M+YNmF*u$8l>t@>93!)m^r}gPOt(Rwz_?x?0J*16;MxZ~Cuo1~ zbHbeK)B0Y(k^KYNT5pyBz1U)79ePtz_#1?1-iw+1PqFx4$FC10VY3{&CowuX`(Cc~j^Z2~$rxfMiTI?VR^tT40F zmsdI?crJ;3r9%s8LaZ{^84ZWSA=I;BX3(1N90+saXkH2P;ekBys}b4P84E|kF=*=J zq5eSizaAPQjfvD0Y5ak~<=XnI)cULJOo%o2SEz-(6?P^=n&BNW+dCo6^G+Clpz|Mw z6HFgf!bwW83aq(vFdPbpQKp@wh}ED!6K2suyGqLzNX-~lc?dIV{8Bg`9tscAncf6T zLCf#6nDnTQf`wsCar_drSF)x=8M~)4&9#ra@96mm<^IL)kM?K6yN`sA3p};MqrJTm?6LFz=e_Cvy*+#T-|s*`0XyiO%A-5+4u=!r zB=C=5aSVGt7LH;3F}Y8;#Kxu1kpBtz0=&xVg48s=hbkupmDCMzB)&vtt?7axH`XEp zo)laud5uz)ms#O>WPn@Eigd1+SfAaPU7y>SD;h7p-1ubCxF*u)Mfw7zu@|Tk=}yxJ zpPMb=QOY)Cwmv6ol+DS$IoXGrY}`Nw7cOq8w6)ntTgGo~4mp}z9^H27Zle`xl{Ng9 z@xxO1a$_!1??$n5t+`wl_47roa$b}!pmd>VR!U`#%Za^=7RD|%SHgt94H)<#?Z=aA18;O3+2OgYO{UTxLk=<_d%NoiGtwf1T0^Z4BS zlzriLVRzr$pzp>`pWMl=Nq`xHahT)A?ZQR6^s(pHZ;&&cyX!U_DFL(@aV3GCX4%EX zk?#!NMQZ^?*l_W-;DX1hZCrqIINoaxEt&Qv;4y3ChENvg#M67!cK};Bm~JBXZ6r9= zfmjFG_bwVLmR{@HM(A(%qVQ)ywgD3#nh>mwYN(~?eA`=78^!#1zNemOEk z1C81>Kimp`MXz~-k^zI^e~l`x33-EIzz?bKZz=gNYKD`-DT6CeHpl;w+V>G>y!YmR zN1sn7zA}_BEY;GNRjy)%dniap_dxk0TNn_*7Zjop?!vVI-55FCI?^e)t!+VD2cC=j zDp`i>TC@Srp)d$M*W@k>`mVkG1k)D~(6P2WOUe7M-dtNIuw@C9MB0j$m3K){K9wK+ z@>l5dThIOIy!ia!xeBp>{)hYz19tEZ^jdk?E5fW``h?Ok(E_a}R9B5ooW*WwokS;MbDDXu>=DNUeyur z`lEBNShxaDu*`i8^Bx$?dmzm3JQ*I?O0i+`60ZWP*f(@G{tdjhzdxNNW6;iiV-Li7Pm>PEzspn)`W~P^KA3+1rEna`*MDn@_6Lo6 zqBDv24zFalMo8x4dP#>)Ix5d5v{p%jI~R^JD+D~TmQiS)8vhn_urUMYv5M2>^K3-q z-||4+UNgvHCW7t6CI|qSaxgpy9m*0|aR{&v>(9i`$M`;w^HXLRzkv0jO6uNY3)!>UN)_3Rc0Iy-Ntz7#{uQ{BU zyN@ePZ~8`g-L1Fk_RUrk5idRkyx6DbZ`_0+A(^v8SMwFeMi=fneM@l8NBFos2n6wM zy5q2gnR?Oo)}P)lSf+@hFCne@QeX(Ty5yZH&}BiJ=~=;oy}Al~jP4X@n%vs>oVc>U zKaYG~jJgh1n<)FGpoQyfl;f+8(`0U4gvO9j$eg=w#gWf8f9*=)0L2QB32jhJgVmlw@5m0!y|45x)Hf1NJsOiN3h;VhdUp)vJ?O$wL9ak>#Rj4eEj6~k7I?n@9{Pw zy#duMC9ZSm&YUzVua1jf8lctd(oKB0XaU-EbwdLz0!az?3Bqb^w0gxDR#SC2ncCQ7 z)ob{HTM>@!NL#NIO*jckYs<*X>y*bS$3YY=+8RPb>TUq9fWvTISN4KpN?iLRG6Yu) zk%~ZuUOj~8m-vo5M2{osubxYOsDSbjW)l=%z+A;M6N;7c_x}9bogN~R} zv(WdB;f6b{UPNmSIv!ZP#*bma*>B=Ah|dybnyUJs3K*N zxCnw2gKtT{1c`>+KpsPGf|TGWCC#w;4BVuUEP=5S+cHsqiQJ|vP8UW2qWeOK3ciS2 zZgot-O|*p@>t0NTa9X5r2VH5lfRO2WxrulvAQK&YvVm*7It5S^vjLZCPN_#~0)?ZP z(`;0h1^&KyZ{}+I#H;-$MASyJ2_25EkZ^H1g-gzpGqX!KKH46#{qkM(l+of(378ZT z64?>z2DgwvSAz!0dqolZ6311hh~^-lIPO_0$i$_hbdqZI#r+39Opzna(qI==I&8Wg zP!N=mps$gI%Fc;?-_0&>G)vB1x;JMt8|8YfLPyj8q)`~iyMHw=F8`6Wt!8acS=9Yz{jZ^3Wjy{Dz&g7={Uj z%I#y2u<$1ZdGy9Zi)F@z^0f*0gsDr)1NBZWNC9z&vC20(3BV2Gpr&O88Ujq1y-=c^ zuq@iJf-xmDv7-^Wqu{{G80Jl|92>olmF(Fe`4?3GS9kA`yfOUS0%m!T419YMZh`;~ z%(sn`v*-M_iFD?CRpu9}?L!uB-0}B<#0$i8iBM09C9vALs(vynx zn^u7EMM%BZd$R7h{h|nDq(+Kvs9MME8@TOvE2we0U?TC81p7x!sI%d>AD5x4w3MAy z(yDsr_1hV&HUt6&a1D<3+JG?qb2msl|j1 zc&CL+NTOb53S4rp7 z!Hh*@DD-Xh_P0ST23<$FdBi(kV2;eVFT1Vi8#Fk!*-sDy~OP3&2eF=MY) zSP^p1;4+!5;V?xO4Xhmsj2FgUKZRuh5eJB5v06AppfRU$8Y3a4IhF!k{qDX_s zjD~6Aq2N$9!pibn2SP-MZjFVpmGNx~vx1Kmrs5Fz4ORpDXlNB{q1q+o&Jl5IJtP%A zX>$wbKX5>)e&=oa{L6FQBdCcRY+;KRwC?w*;W`HOb-@^;C=96ze!p?0Fe$GG1s%nq zBGIM3Bowl5`FALxYd)#&C0m{kN^>BqNju~uuf!b*PtZHAq6!&*cMg%r4hhjH34!kU z&4%wpBV}k#Uf@<+jg~Ki2jW7LJdfdJ$Z(HI^S4H`XwV&nd^SPL(54C*DSu;#v4-ZR z<65JL>;DDP_4?|r;$RS#ufkJnRdO*vEDH7o`cxF#{NgR)p@_YjdoVJOf9 zfOiNwG(-(_&Q~OjGstDgaI<6fX9>-(DqqukF`n`HNIkR1`9n8bw9-*pFZ+W}#&1 zzSESj7Z^*L@$b-4QM5q&7-ECCa9iOXFIe933H=Wtf&|`y*GepWf+FlV{(S)c766(2 zGMj2|$#CspJQj8pS!7muqt&E;C7{@aNUiU~3K2H*yZz-!BaNJ1g_ww4lk#Z+4I+Y7 z5K18~_n8pEfu+VOe*@1`m>NeYlZcRo2|-qdFfQZ^$=4~PUNU6#1IhLI>j1!U zsurjV2engPkZsS%4N@G7@aIbz{S;M?Qc|QRD6)hnQNn+W;Ph(lqJ+{b)MR1n%~~Ts zD|8Zy_mXi;!lic!BZ@ik&l5A%Mx%-=T2T@DKygTRKusalLr8W`r_RZz#R+jaNk&c= zmeGhDEdS3%PC~xDC`riO;7&U9W5)GkizExj<{aI$d_@@l!v5vtn-g}Mun2^Z7d%6p z<}Tb5b|ukAl(;1Sv*fbiAQaCb%nE5-^wKnNqxXeMVS_1=(UI3J-7@K$NLYzPDv?jb atkHZjkw(?BvS_`Om_+)jm9ZXA9RF`ApS3ms literal 0 HcmV?d00001 diff --git a/utils/__pycache__/google_utils.cpython-36.pyc b/utils/__pycache__/google_utils.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..79d89c2e1332725a46f16d87ec4f67952ca4d46f GIT binary patch literal 2938 zcmb7GO>ZN&874U&8p-ma z-{LO!p3`QD`@Hm=Gy{H{2fPePnSYB{`66l+zRH*R3Tg|SeoAVuyufT^zectD)3)ks z(xFCxk0H58A0TCKBU5BRInBu$=H|{9Y{0lXqZd>;Ih|2?U%5FQc-)&&?r)G8taBeT zl=i39UN%ut9P&wcH~Sgp^0K~&s}y?es-w*h(jA zJW<-`qLXr=omeE=6({W&M#gcZYD_y58EcyTiO40GxqQ&*rz6o2ZK?V@+l})yPG!rs zih9GWC2RE3v=@t(ZPdr-dZ}1st0z#+-XDtCsy{8G6CZ{QoPDv@8Ff5iQqLeEttUdr8^ zblDpEC{e+nJg6ASjG<*lEtpaAxvf`Cod-iV`>k614Lu~ueV3@EVMYF#k_%vCx(177 zYpH~_!QvtL%peB6zw(}uggrwYQL5};(l@2S;B+q>MOhXlz3+xfo`*a=P2#l8AqcxN z9ff_R#@VArqZg_E3tj}pGA`^7fTU~b|$jMbfNJ7`0e^w%^Rn-@y<6g zG+##JWT(E}n7;b-#66i=k6*Ombt zC8Mz}NimMw9U%?YwVSj@LbEi}?${g!KgzmMEVMV4QKGb`QrYQi2Yc45^VPHfy_QN_ zR?!z{QKm9ox`L2idXi;Aswho%rA(zR@1AwU*tCK5(y54geW<&3urtbfO@9Q8$C)(X z(^ObI7um9rthAL9qp@nur@swpb_W?@E40D_8n89SXh2u!5`7(cK;`S`?U)Uk&qFF9 zS?K-|6$Rj4u(J;ppsD~~xt5azaO=^h76p#cCD%vx#%(QRf z%*BiU;Hxot52IwwWZywnTWSV#3%U{k@tJBT9iiPAVYGhJetSF?36~qthaAU1dtDT& z>2@cGZN(TXn{3@^lro_ZBWWREDm71rruVMN-Y_ckRMX+hfvwI1Yi2NN*(x%^e0m2V zyvM2tY?W5flHW5%zP*#Q(vN!w4|jg}kvblJ|H0Arwsa0cLX%k6C7nOcGyXx?MkU+zK6 zGqtN8-++R92><8YhpYq$u|bIkImw-CRE$rsW0~Cyiu+Tl2#XNGIItg!M+D z8g6Cb)?T>zwP)T8-^0w|bic5)cl19~U&}}*O}ddBVGqSlHp4ALzteXMdv-Ne@=exc z-vHC)xl2P~C^MY82EfzBNrGYwKZE1L>4I&BTgPVB>eDn4;k~={oo-gERkW+3k+A1c zz5=|ZT}$Mb^b49|)cur$T-a}DbruN>mC=j0P| zKyb-BfULK#gE?%OyzviGp%TKnL#6l@94I>(y7 m;d%4ellEQ=-o>pjiN$-yS(Z9+6WYJZ0;l2!tCf|?=6?ZHy7wah literal 0 HcmV?d00001 diff --git a/utils/__pycache__/google_utils.cpython-37.pyc b/utils/__pycache__/google_utils.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..07918a43df985fbcb786cfcb07d1b3d03ca2ee45 GIT binary patch literal 2908 zcmb7GOLJSd6$Wr0e29A3RTRmS#f>IT%}zy1YLj}LIH?o4%_JJNk=x4QxWiBcNL}gj zUJ3y9;9Pb(k*B*%yXdkDYMH;#$s)f4o6fW=Z@W|%?FZ0wRIieI2d@L*9N?S}AMnBK zY?ejs{TjJzNPoz4NPtLOhxwWsJ zbhE6Rh^A}Q`sZeDvan`XXcZ4d#ctK^nXqLl7f`)uo%6;wsN`f!F3A`k;p~!) zh-NwVeCUnfA@03OM(_>~x@-jR)Ntzt4L=6Q>|9(Vh7}pPiqf`%MW*Vf3U~B5E=>~@4 zELK{X!gBzbxd%!KspBktC}k#1>ET&h^lcmHE}V+Edw}7-o9y*ew-NS$>O@HkJwt^< z@FbhJl7*JmqSx2Wsrj3bs$0kiTcBlDq(!#K7%kETI!E6?UZnC3^tSB=?H3>wkQ{LT zhKeQ^0O#398bH!=$U1RXaB`FTzQx((c4c%)Rv;91&>aFOMsyaE(mN$ zl?iRQxv0)Hiqp}i90TG?x6-ySeuB6;N13qI?~9bn6&QdVxHmxug>Lwr0fK2yy)4;m z*^VeEp%E16Vs0zdKt{Irw$0wOL+GiYM=wXRGL59QV27ziWQ2wE7GidXRS?MvEu$rW zYL&dckv6kWy1REbKKNK4AN}mD{q=S6;E{;(ZTZ(~hV{Ogr+>>x$aho3>}(Qw8!qqSpewX&6^BD!a-$fBZgST5+F?~LeKlv`R}#%qG+46ld<&@#pG0~tRe#|wMzcK?*@65Q$@AnWg# z;uM4O4s=a`$BdtW<>U+`RZreUVQ9zFVEXc%Bi~2oV7uGy(bNsUvF6DM^ewVgzxHcC@D9Cs9o0I8Lh zyA%LQHfPo8M4sL&k_33}E5%P^~T>W`y{1Ttqhv07it)-wT=n9ghi{ zA-~3b9z3JX5)XOl8EHoRI*)i6nlk?pukvZsDtw;L@;TI|IQ@v!UU?30WUEGv|NEBS zH>6Fi;_}B(`uN_%r{09nHLP~kKqqO;2k|ZS2lxxrzDUzyFwmXldlxj8)^RN~6*buF%e*a=GS&9JXdmw8 z1Kl5J6LQhcxiDTTGUJQmRth8iG|@F?yn##&RbLW012Y%3_4|+4#qr~w$aF3rKh0A} z>t8+Y=6N?2yRK31KQ%Msh3$5QR>@FQ?N;rc30t6Y4&@6G38AogbP@R)m4b}OIT;}; zoSm~%qFI4`A9$yT68Byur-%>_y6hAYD!ju`ex?0`U=$YQoT{&e@AI&r80G5{FBN`4 zI_w7KQ=+4T@~C1p=L{`7XUfizpSgO~)_HX3tH0{$&*%=x)_kI84lDAXl$-(;!+BUN zJ4+?bY`H`30xPG%pMyu_6Oyq_jC+YvN!I;YtmV^~=OT7W zN9MhFU+ccQ-)M9by+7EixAR_OC(pVcB-tm)py7htkRlZ=B~)WLpX`Hym zTR+IqWEqX))%r?f`0DzVW!+M-&H<=;z23zR$X}k-9}X8bwzq$`eLtQIj61C)9Y|5H z%>po5CVf+qqMx?fLRy3yKWp`bVYxDX-yVW6QJo|eCg{r~(;R#-$MHqRK1=p3D)uR}&uzK-6u-QeUG zpi;u;!21O%nw$Zk8c@~jjLG?eWB^>3u9H8loO(Fao&`?9M!pUX!U7QH^mE49A{mtc z`Uqe796+SxXh~NJz>Ir`OnzDb^oszOp5lQe3u}dku(|}GR|{77dU`yj@Xy$phxz@2 zfh;T_OO}nQy;grW&D$*-GZVPzJhu$-{J;1O7dJ8`dV8se+fuZ&;PDAC2_RI!1%XYc zGNDa17qyvYG#PCw3E-`CD{Bkmr^uNLk_lV=zR0*-gahb-c@uO{=%(KpAd&XO%d!;< zc0@@Djf6;-Z(FGaGPb?9thjB5&=W%sU(RB6l11ym4pZ|GgoShg`MS-j$Ra+p^3traI83+@;y&Isstja4OMMrtS1?Cp8ei~ev^Yx29 zi-1oZdh**V>;1PKab>-fR~=@9kKkui=F{i&?^NEzu4lg6Emk>O(8&tN^RZUS=f+Vg z*uo`h3)rb$+Zz66LME0BC-lW-axCiO3&pYFp#83R($A%SFgd5U6aD}s)ly@UiXG7c z-yYy*!gih?Cc>0a5C#gSOLWPbr`KtfUdK_cpcO$EQQxaVHv_7kr*Z?MFQQ3KUwB{Gd)Ts# zJGQ?@MdQGn(Z9Wc_k^Y8J9tmfg5e!816rmyfFNUrgSrEGmwZUJ$v;4%M>F=ew=LZA z9w6**nP`GZc?WjKKxD?x!F+NGnwlr?p)j;#sWE%`4wCPp^ASE}*GI57Tsrg)b^02; z1ol>q$-C&5HZIWGE!3*x>^!h~%S-Yt2vf3mZJO-4JJ?aLdu};KcKqk!ge hcPB<~<5n1?;vE|^$E0>a_FrX@R|%u}%3S5<{{T6P_rL%E literal 0 HcmV?d00001 diff --git a/utils/__pycache__/google_utils.cpython-39.pyc b/utils/__pycache__/google_utils.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2a0b8c0e66541c25ef9dbe0e99bb4bed97048f1f GIT binary patch literal 2959 zcmb7GO>Z2x874U&JG)wa*i{@!2+*kzyV;=HmDD5^oY+ZfyD1z&ZSA(I7b?`%kh>ab zc4kC!Wot1iipc0KXb<@Tt7HE|5fuFeJ@c5Jd@XuX5ADNQZ6o`X%z$4n$@eAS_u)f! zrl)HJU-uvX?)>K(A^)_EtDl3$M@VWP87G`3q&+Tao0e_XW@YKL9h59_`(E3l<1v2Q z=hwK)z2~%D;XbcCC+&b==K-%mQsv*_bv}(+jnDI0K8M;Ar;kbFwHL5Pwi?v9Kkw*$ zLn3MgmmfpvBHcq$?;+FVfO1-p8?euv&)JA^_ne+m?G*Hk%607)bmVdGoN|AWoWTP3 zVWqM)toPJF$4STs;Z5~R*z?rBNRpv9(A|}d3mQsmxZ#_c8tmmVi-c099_@?ha5o$1 ze4vfbMU-)2oJ6F?6~~NO zOES63MV@3QeUa)NyBxLC%d%b~cFRUHKQS}ob?)|rR`F2O?ar+W2wR|X4&_S@@*%Kc zEgkfCR0=X8=VSyYadys5iDm`daNwN6Y2108oWfz;>#ZoQV=L{`7XUfizpOy8xt@Gf}ReyX)KBLZx#M%Wd)mz&Z7P@1?bIxk^?cL9l9D7xeqm!g076_G6{uwD*Hh z%af32Ptzppa0tS#%=+QJ*15XfYV~5hKiF$VS--WNrM*w%^hrEul^$qjkL2Tx|_szkg*}H&v)JAgtMJ_OJu;hv&_YhYJrLJ^Iwj*zLs0K#FE#1_;tJ&P_!E7@bH+3wh(FoxU(EQ^w8hsqkafjT2$KT*j$3p3Y>n zZyfB|%uiOc3k2@!Y_}Zfi)XRY%2Y04WU3FIMIzU6mIC6Hry}m{quVV9JAKtNejm_I zl(c{|RFp6tXR}r^*U?(^bGF12IMKY=Y z<^fXm9MGfWXi3)!K#MzvO#ZF_%ohPBJ;goC7FG-Qp>+w+t{1Fu_4Ig5;hwQG2lMlS zfh{ZmE0&Jx{Z77{WKqZZ%y^}DURa8F@n3wy#RsVp{k=qlkrW**c=!|;MBr;61J8y} z8Q+GMi^fbln2a{H7;sj)lSab031U<_$@rZ-7b%yEFaS9)ZoDoE-FCYJC8Ry$vTCyh zJEEe5MnGg4Ut4K-^1HV8rp<2IA@r%Chp$GlK8d2WV27!BWQ6&20r9%U>WCr|TJjyM z!Jo@#$U!?2Zi>WA!vbFV%<}l7*inXOC z8}BmKx8XTmQj?9gW$=PpY8kUQ2!$OuBXBneWnHt8XKN{Ln@NvldAVtOZ0)kgw;14_ zjW_ops{k=pg~@QI$`h(?3(Wj9t%sy<4nuwxE+0j^|^ol+TU5 zRIr6h)E3awxVAO?WI`U6{3dk8Wqd4}<13_Ozd`h&c$Q~U-q>+*)p-aux2 zDf-#5Fac;?cQe@s9h@j|sHuXJLDYwML<9(evcv&WW-*ucG#aXngBORKE*tXIGd$ZX zegA(Vw$CE7t<&o`$~CkC$O7s*b;xEw)$>$7!03x$64RGn6!zW? zkji7bgNnw1Iir922rmdr%WvZaK?{bL!whJd;sAn-84l_;a*o%UODvr}CPy>dSK8^^ z8c|!T7-#<{NZbLi-7OPLkSgDTwJ|)IaWn9toPwm`$PE;Rb}eD1uii}ZLv;QINm-60 z*fFjmdY6{+2EBy)R)fiR(JigFApMCg>f@N*v2x2+@_l5cVsG3e-Yak7l90=1SIN^Q rE0*3X)n5sXef7!mrVKv7T`@?+UF*q`U+seIUuS_+^Mm=?Ty6P(DuVhV literal 0 HcmV?d00001 diff --git a/utils/__pycache__/layers.cpython-36.pyc b/utils/__pycache__/layers.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bd8431727d876bc280f6e2e986f7449c47ed4e37 GIT binary patch literal 24381 zcmdU1Yj7M{b?(2~Ka>$rXu)z46QD>!2q8QYk_D0?sXzircrFA;;0NSa{~)i*50ghxsY+5sr7HQp zbNewp8rhadNh&kdcW$4)efr#U&OPVcb8pY#o}SG2zwyHtk3D4=e{VE>`cOZHqtM#Xxwad{8gr^=bR?#b@C>||D2rrj>I?DAXo#IR)I*EAMj?{>4e zC+pwS>-M<4FBy}YaAlL*hbw*ll|Dc=yZwOl`;g6m47h`U4Em6MK(@F;fDHMN0YJ98 z!+;F?kU>DUx!VER?nAZ!vcug8$W9+J1jrrkE5c$0QcTPa&u#-JwgYyT zyBo0GA#4X=cf0ohc8`DOPC)K;_W-iThui_ktK7YS?DZkL02y`n0kY4B+zH65-Ti>< z_aP&Iyv7{^WXy-$1;_#SARq^Q$ZkOHa}NP>$cNkw$YJ+>K<@V;_W<%*_W?j2@FDjC z@;diHKpylVdjNUJeHf63eaNc-dA)lCkRv{1FCcGlj{V?JabAjjQD z0eRGiyc&>k_XHp(e8_%49&=9ua?*#q29U?yCjfcEhl~O8r27;gPx+7ofc%vEG$2p= zkb{7nawh4wi{g(=+#>uqqXl=~JcJ%=m(vFYsdU zu~0dNqc(`5XiS=}F=@G`Ye7YA*9J1jb9iTgE&1UuhMh5vqt=h2Vb(GH8s;*F;Ic7j z;2v{g)b`AiUbSA8GJ$$^-WG-e&;Fy150u-lnxk`Sn zn9F&YTyC!FE|gi%=5ps3@@0KR5rNRqRAdQDpkPI?r%gEr<(U)DJ#$h&Tk20%E`X$B zjGN>OKec;M1l+u0Oqs&bG&$-z;H-1SaE!?WIF;~h;4PSPn*noV7<@hv)pnz(8+Ef` zEL&u)Y1C~ZB9d!%!^k-f;9CW{k$0TYC^F^fw z(30#aRZ8_-PHpC?eQ9cL)Kp!lLr=PLxk5Q#tKljoq=YBiQKN>`92ADx3!a1Qu>en~ z9BEwTAlYrqdyQGgEs9jmf;lwe*n`zm#@!wPN7&T!Z}>BJenFQhW|f=1X003yzzJ`J9xO9IsYj31wie;tGXXqp6Ggo(B9-4#Ql#cSmhQ z3w4ME4V`DYK2N+Zw=`F*)Jj!E9ooLd{m27o9{G_?nv~V9gxn)3<7`gTs%;lsh$RAI zj1id>br>yfCY6VY%9l#7|J=ZQo7zgPZN=3n?v7M7=$qJXvf+Lrr#RA9uOp0!35s3e zXFZ`o&&guGzMzVl)b@g++}jDN?l2;A&2tXKhKUfwW=#Zr6xqENcWT)sb$I33+qfhjVSfUDm^2rMV)RBL4f@Vsa)`e*sxpLWsNHo0j!}>Vn0*4K zLyyB1wt)(7Gq&kiCk&M+M9~%B5PpP@S7U`WO)sd(v1OjrYh`ma!w{Y9U{)9YrO$OIRx=15WBe z+})&`!a=*q=e?fS{RlwnqDAy&CvXHRx~1*IZxCqQNJS~_qeLz$YGxCPkOGeI6NqU& zD%u;P@z6%==o>(s4y4FK^*R=0ZZOi<*3Bo;s#!O;#tCh&oD99kc_V#}*c*%X#)Up1 zKGJmrst8r>Wrn2M64_0z0>U4`y%OdqQs*VzVm)7&fw78Wo>2ag21=wjfg1wp#|P7) zzcPAsa0I!y7Gr8kpM?{~7kR$uz~qf4IH)*o7V z7jbV71n#W~IgHaRa-cwKwL_~N;V6t0XN@u;gEzkubr{iy2x3?SP-K+E8yx|Z>JYfX zSGV%(mYkt}UZR~x)HJj;ivcyETRa;Of_OSUlyl_{2f{I+eu3!2fj~%XAn*!5>kG2C zJXb7D&(w=Z`$-gca)B}N$pY1hAN~?Ee`~bO?ZP-(nVeHk=wNOuR4|upSn5PQHJh$? z&1RsINyJJB^{=sIw*-2yK$P_}2z7c?@%%zb6>GU^m3K$8I&43VtI;U(k{5+iaj%|P zQKZQ0I-OU@q1BNz8sT|@nWdV#7xln&iBBFUplzVfgiMVhWV$4YZ?EZ?i@VygW%cNz z@`ncb?{FfcZ!d#I)NIDK_;q$5FSTT2asuM{OU|WF)st-F+QSGaPZ)3c zproJ-H6_M7)kuh6w~=hJx5A7M8kKDGm8E1O>ElW@oJQ)LrQVAtCAZ=!=N}b_qrQf` zi9gaCDU4)L!4|F1BL?oky&*$c&%(5X{Q7A?2oJp4;VS zV)NV{uj^Fl5++X<4!PH<)7OdRdJ2UeA6^!NBVW$VQ`2h&Y8*cd%-M*y)P95_J}~R{ zN@CeuN+W4$LL;88JJ1oUk*1b4tYsSm(zt40v61!V%sX9AHPY0+W%DY=R>PXcaGf?A z=|Kas@2aO~yKpZxChcb3)HQo4)5rjd(oN!XH5`Gb*%L}Fv_+mD!IZ;zEUZcYhD&p$>^;6Tu}j> zV~WU2*6OO{7QNJb-gQfrX)o=Tkcfg0@SL-yd<`t2hDkC88X94f0|7>sP)Wg1T`;RE>&vt`9jf4OqHw1zR~r- zD$IL!@iNv6RJC085_zTam%Sv~=F8JwqFk%j&V#Sjg^KGX^R@cr`J!i+Ds|n3%qdx8 zc^Z>p&zu|W)>EAv5hpc+nn-7dboY{QFEdpjoCf0gn!1};oVp{@H2ZNjVhtx3`&a8$EJO_*@=hS*6d;zEu%g5S zY;{b*!Vy8w8>Au{NJcE1t5Xp=?kb}tdYZvci#&e=pkOrI8&Q0-h%1&56YQ;t_TX{C zGn)O61W!U>wRzQ(UbfXKQpb6zdLxR_gqmkvLP?1+)MIs)CYDsV4%tfpRJ~@XR77~$l8FDcp6X`gS(t0th zV02}ol;8sOAZcxTn`>A^%YpH^v-!d~Ei!uk|nfRa6EpngS9fN0hdEJGKep_9!o^nUJq$^bI5!n z$zE%s+fg;3_PQAo=5#JhuY*~&OD(a1mk#>$oAFLtNZzWu^gXRMi7jk6K4&35c5OSoIxyOYMp7-n53$c+7TAi@2De+{leIn-YoEv7_AGa zdDc{`&M3CBk{Ec1VzAqK9U@QxOH>mMbAG;j83K=ze4@^gPSYZM z8Hd<)SlgZLo*D>zJ*#sKApKH+7^`zUUpEg1TBxC{Zk+{WyIMkVGX}g8&Hph+^Xma#^q=SJ?tfyn&~4xuCtfZ-MVnj#-{7xjYe#Tnv45Lbs6*&=k38sh`uTR; z|E&NM`pNTk_p|E>xUkiI4Q&d;o}jO;xfM2p?sXYf!?kzl6&MK#rqND5er5RBGmt&e zcJpjKE-TscaIHgjoeQIyJm8UdU6&i2$`s*QSX9PhfT*9`)97P zMMgsz6AT(+?IAZLHWG5R8&^3=G-kVJd)?%+g=mNMO(-!*#90bBHaAl06(2irQ|3)L z`a>N3O*r}^9BvA|PcJTKM*PolHC=l+JR?x{Vo5+YviEGYTHZfWo#%`wUmksUc^2nTxkdbqFYyub|Gpl9n2CG&^_G$%bk ztVv!)f?0|pxIP&+GEtit9nkZpcM>e~CczT5qP)DWNo2xK=iNiDmt|56`2(5YDW;gQ z!2y(d$@!f8I^1aGb)k;%r&7;JI>#GQ#c9lyicwUVo2gdMd8u5jSkIM}mx&nVCFcZ{ zpW5N9t1#&xYAfh2M#M=!e;zdW)2F6M1^e;?95s$_BcaoANi%B>S-UVf>P61SF}I_( zxTRTl!})iC9p{I?43k80A4DaZA5n!l-SKlB$T%Paf@}mbAiN@Blg@(pNy^y#i3ktl zkO>?Xa{Hr9_n`4C_P(9RG92{Fn*PwQ^7Dm9CZvC`IuGGC$tfTYkdc_SRKfCY)cv1y z9hojxikIfq(SPJ4Xo?+U@m2AzYSuSp8iITN2k2B1L`A{DFM^_fibzAJp1_&R+Oe50 zDcVp}?;$L_6OAH3mY04G#dREhOu5}$9BkLc(64Vn+t3PSAy!6{jBuBjVl~x>?9k)B zJQ3biq_<`hOmAThV$7xxTUxVe*)xNc2T84VsR>-k2o9!ayJ@ACvD*)c)GSg`JyZy! z#mr?#V3MW+z7&&bNQb5I#6BFbMA(&$U4S}C>SE4=Cn2VFO{8+|patw37f+CK^;^I~ z>K0g>?Q)aXtl55I53#0iiWMzxh!tr!AFI$BrxPi>rn^ByH(MjghhFho)Fa*(JqInb z(lQfLnrv>}w*P1mVQAC;9>ECz5T|i=pn(+w_>CrNhB1fK?IMg(y9ma1NU?XLZLBv@ z+o*@bm}lsA(fv`o2*!56nMK>KW}2e5%{YZlw2SVK+C?xQXAf{@ZjMvvM7!wz_Bf^Q zz0m4Gcwa0YEZa-;pFB&;S)YH_C1;!cvy7beH@auHV1s4yn%UR{51kFq`ZybF^n~Y| z<-9jM@8|i_0I-?wwbj3_7_B4eDwY_7)>jQ`P?#1^*M}MddW>qbEEHH6$pb4)A`|P@ z#sDGAF+ zG39G>gEYR0F|!{_NNlwS?XqUZYH><_QZtM&IEUadj-y5g$N4r6S@olF@0NvbC@!*q z7`kvhbREmqt+ZLTu=N~U(b1Mi0?vuJ6AZ0kgz2ha24-P25lABqh+nuWgx*ZN;R=R@ zzqlGL)FCh%0h!o12eD2g<&OlpUXhdFQFJ6VrMIA}-Ds&kfC9NANfmsI0Oorq6Z5%B z?vnnw?AbWe!y}9me4WCDy(V}=`4|Y>?lJdUJI(D@Yv?8#oU0UqxewMnyzs-W2^pdJLkJWX(-AdT%Z|mQ~Cik+4 zG5DZn@WGrg_~2R$K6r=>?#1n7@US^z?#6d$4Hh2^b(az)yG3`oki(6|=p3{OuQad@ zJ(6GOosG0o(`yknSja$$n$Afd&mFQuuO>4%YNT8Bj}{qo8L?h3qpm(fK$ljnj}SoB z(maS*@;iimn#IRih=PS$rhW;qBkVtuU52^G&SDWIYc3ANV@Xl30_Ot!P#Htz`V>wO z8e$<3uYN!k__9a93G~9}!xAR3vnGN<_7}k#+$q!u0V1Za{yI9=NC|;b`C%TofOS#1 zO9IJjCRX=TxEn9#xtZle6TWWXLm&$E%Q6T+&=x+6N{5l3Sn6(cgQGnY(HtG_O}!7~ zBe>=d7v9|BrFQP61%~k~Qs7dY9_1hA{YlI^@?0~U#^(2;!UhZ(^<=GQp4Wo8o3}*L zc|T4k)0|Txd4Eye!>hl`;u9=dg!OBHoushnc@47<7CK|~+xJ-et&z6s3V#sB|2A$3 zh`40>h_;t;k9_k_0}#RY8vp178+JRwrXI5#;zNajto8x zMznf2adNb$^^F>)3Uu5P2) z6t5O=gdgf6Ke6?kD7n!7)w#!ypAO?tihxLX)EEvj>(Kn_94rMaF_UTScEkSx?$9Y8 zBJegJck{?hT__&EFnt<{D&%_7`3uvzdDLMdIEdS4Sze@2#42*lBMST@ipkvHUdDP)DY*ylg6 zJ=f7tbDa{BEmvlkM!F%l&ow<_Q}Bq+IL^M!G;!RL-geCK>>0e8P#o7AR6s0`VY42O9WzuSPw=7N*o7`BV+C z%bS~o6mW!}px5geI@vQbrK$Qx2=b3X$9e?07<9%LWD_0h7o@G<4R2s-u}YBYk3iY2 zi|}nRx(1v1)1mbvhb{-$MTCTOEW+Mq5)Yl|Xf%FdokDym=#B_+Yn+nyVhpD)d-1I1 z@rXAg8JP2h7YAe0M#xTAszn-m-+_)76B3S69R;sX!YdKc$X*Y%5Z&1@B?FfO{32sQ zI+rn0RCR2c+lYbsMY2r-Ff#Dw7WX|tceIS-l(d&IEzvlRb~ZO0s%>oUI*01}W0VcG zP}Z3s$)#%nei0-g9SgEAPGpC}^-n<3dPMobphF_cZLPGl7v{PLtO#}^2keI((f2AQN8Ob!5>T4E!ZL^#aKTBG}(WZHXG~dNyefIg2G1`Y$(cYQ3$;poePY`hv z(y_SvTgmKjX#W}cwhk-(nV?G|@ExnEX)p3{a38|+nnf<&*a^Hb)_drYWsSDMBz7SF zMDuH@&1s25lY1UvUv5JYJ{$E3<9>UaK6R!Fa#7Sm- zE;^;jsQPC|Zv|zY_FE1UlS8N_&xpV|ss!NLNSLNO}4ueGRAWoATWx z`!=NgN<=g=}`Ex3)D@p=44Y+>y6?=jh?3j1~QwKbQz%>1{ zgTMFq-!6XO5pGqe)m^-gB+10D;+n|ZOBHl_Ox|8nj}bn?;v@<$UGj-lS9qGLU@IqH z5)5WzUk9*#>i4h6ILXH&|CPkD;bMQ-??_3|NIv}KF^)stt6&lsosb0ZD**Z_&vv;i zk56Js4;zX%N6+X?_Fo0M9}qEPO~^i}2-!HrUVRPZ2eSVr>Uz9We(mJ0$a4|7 z`UY?G)9-7G`>)BX_TnDjSaJUYpVTVu@eWg7tCUk24 z$(nAWc2JRUkC4&BlLPWsxnq^e*wjL$px@5%U-NGQ(3*Qa^6$)Jd{IK>^((NGU2}MK zFjv4IO)FxSzfbm(@=8x}vTy#fV9)WtiOYCxNB)6WaUX#EbFKWfsQk;P{MDuYf63SK z>Kj-*!r~Z<<1C&=fo&NJ^(x=`@X{xJdgYZ&b(u{XEZ)ZA?JVBG;+-tGyFqU+_&jT} zM9TFb#kB+_*?URsO0rLKJ)FU+udw(k3r0&y!V2*;Vsk{@q8(aN%c7l^KB8aYI0~by z|4S-srSTsHS{k LGyUE8W?B9>A;Csd literal 0 HcmV?d00001 diff --git a/utils/__pycache__/layers.cpython-37.pyc b/utils/__pycache__/layers.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bb7391c2973e1fe6e57f25e0fa57a27a1be5eb9d GIT binary patch literal 24304 zcmdU1eQ;b?b$?&`sg*2CvMhhZQJlnB!3f6*AqmE@Bggq@tOlG(Kn>lj_pRiWSG)4v zm1Cd@UhVl2j362ypM{(9i zY{PI3*Q^+Gre&DMoHb>egjf~lY5gQImB77SNz9p3rfFR4$DLD2&ZVZ(rtz#%-DBcC z(|FWy6K?WF!%ddV*~}$-D*bk&YQ4?4xDVx1mF!&4RL@*)Dkmk=ZU!YYe#zcgE!p@p zT?=aOadUVl=fBhE_PTv98dF>FWQ*I6C;k4Den7Um1Aq+pkgb3WxuY>;>d*_Yfe5e8}y9+~eL0$h|(~4nW@E z-UrBiKIBe7-ss*B$o)QKA0Q974+8R_4|xqBZ*mU)8N?mhv?6F%hi zfc%8}Bp^@vkb{7nbSD9s^dWZx@{{gUfIQ_x4!PzjW9%)ja=c8AnVxwU602H8nm$>s zpP5*23q?K*4hxy1IO{`5O2(Aw8dH{Qx)wy#c5PJVcn;qzvLp{)9h?(5>jOxdX2V!9 zo93#yVyzlO2Hr6z$866$;ic=f>Bf9zq3$`2LiNm;D(d96VfKOMKzlr> zClrV@o>F6rKs<#^)0{P)bC#?nd&yZ!G=1TC&RR+?r5e_X-AuaXE~A-ptsTaS(?s8z zj_d3+nhB<>4xhS-?Z%AdCRZ)=HZ?heYIz)0?Dnhwh2vEUf)D?jhZHfDE}to&&%1Ev zB_L77GoD>6G(6{gx%9$VS~{$Vt!JM0%%W$$;F)JU>tY4xnRcn5xQyVeCy|f>pg|95 z;EUZ@QuX11+QOut327!$=Jom{EtV>kfUKk~h`yVJgv8_wPa$#I%sIlVn0T?5M_FY3 z0jghOK_0xaAz?=a89h27L}DJ_YX{e9dk0trI3ZRCcu?MdzNUN&e8zEYVU;sEuJ)i& z%@0eBGJhMAF)N~dCpzDXGQrqxr7?zfJCWBN#Uv#g_eb$Y>F6ciTB@m;P_X}>Qj&To zpk%M~D3I5U(DE+Sa(hq{G$QX~l!@5@kugZqE@KszAg}>Y_j85D8P6$}$`zPlOR z_pz>vDrSLwNCIQyCdJ%9kG`0JMR43i%-p2BrJCf3%Z=$wIhI?5#5l&luju0ls zC{mTjM!bUF6Qx39L6tPBof$>GcVbk-X_}WH%V!<14f7tb&6*7QD7_#>uasw{cEB#Qp@PFljCh$MB6<8uXpl(6ON?gPDff+kl@K`A2}+ z#|@w^_G6i4Ai_J09oo$ay`(@ssN-ee&!Ff#a!V#&F?Sdrvd%fDTKQ#p*7!lsEW^7= z)fO7_3yr$wCOP=y?BPne-YC}Q=8rtW3R!#7SR9M@$QPFJ!3+Q3BAY@%yT#nSiO+rGFL{w7nz7?J0};cm~RfzO}fTD&&78f#M`ivTa{M#Rt=&zh zSbNBRLN=NG_iQ%%sbFr7{ioR17S$AI|2>`A&tQ_UpCWcW_WR-hpV=1&+ZPANcN!=T z;`%}%j51HJSBV^RAl{HbybT3A3&cHL3Is{}R@NR01R-4u#Fp4F%U3pAB8~#do0Eul zu(K@^G17)E{3!R*dnKw?P z2iZ8~Maql`Ym<(wAfooXh;uG@$++1sJUhm2~j^~qAQne^*QHJxVatffAPmXh1i%DIOH z;;649Wa5wXW(p%2Sg=Gp^ss?9@NQVv(PedA&!RlWKN!bwg`RYZJCRnTujW9iUY?n& zm0jV1I!-u;v6oPV>P$&2OKU(&4`}iW0pANU%007yd?A{}=%^DN&l=7;XL5#>G>6Tc zoi&HdaWiYD&8(R=@3MAVi??)UT{wk0ag)vo#c`VL>mk^eR)iFBh6kq&JmNzzBhv=1 z)^MJia?`PSZr013EMLIn>HH!0CHqxM^{cGsRoZzyLcAP?N1>9Rr>xhDlsO)B&6(#b z^&cXM56yL&5aT($2vG;F13IRVMf4jK;t z;wDkTI&7e9Znn3XZDt@{Jy&HUtn{rKD_a`2RwoPZP6=St^`UNdy z*B9n|YvY+S&=xc~Exs>p%~hO%t=SjyEz}F^&$({Fi)h0kF)=QRwW=G# znv4~!??pziKCpzVJu_Xj5MLIuCdctww7UCucmxEYU$cjUQfB2FP ztEEca%V9e6@n*K?a;;`0pr%~j*bfsLa z&lid%FEL%IA@oM$1HCZs*`4HeFy;`zF|oll-X z;#-ull$29>!sRuQoacEPQ9;}UcJeY#t;EbeD-E?ZY>rs{P-H{q0PaSuk>ujQTG@&@ zsnifM?sp@@NkA+#VMU<{XzQ44g(HHBI|xQJ5stWIt_?Mh9Lq@t%Zz7J=h=(j6E zia5iAq(6!?7AGsolGlr%Clwu_AmE&PhcUoW2j zI-6iyhLtrJcXhxj@KrROK`JR&A_|Cx|r33g`w3L!Vg^!;?v{~ z*86&iyPHRp8v*rt3*L^ZqlDK(A26?jUwX~Us$Xbx2E268@0ak3o-m;|=+Ev>{Rs+B zu8{~YS0^WmdW7qh))!D^OM4OZ&f&qQ*dFPDb+3dXq-(*zSih!R&=C;17)7R4E>=vg zOVX}WI`{*%5phl`g_53)7^x;Ny#z%DD%J(%O+v3LAis-;k$~)6i`^Repf(ESgVVrH z^3kxlYKrCd#45aAc$v6s+C!KSBIgWY$x_QqyTl|_gV>HTp?pUjQS2ASzVv2+k3>hE zN6xdR+hs8mMf#XI`nUSc~KYe>UG5AB8q$-XCx+s#KeTrv!g7+F%d3^ zz~O>x80=pdI$4^lsf&+HPnU~jC=3zoa7eNS8>a{cyR8=<0uiu8G+`g-=PMV%@F=P$ z;vC5|HNsbLiW-M<-QB6xfZ-ciiEEKGo*PpYWi+a8%V?Z7-^_CdDMRzNvH<4wayny zj-D}!Bav~lP!LJIH*YHC@5xK+9eGn3y$^3Hi+5!I9p34Y?K?T>)L}1oyabP7PER$3 zT;eNDo(N}@RE(s#O`OQo_>8j2ojlm}g4AguMv+f6k}HTg$h<7wNMeR)Qm5mGy)su> zBCDbF35E>O_TU>L83}pXgQvNbURaBs**+J$X5j50zp@1>=7hLQp^mN1lzPRl9d(oE zEp-fpbquuBF%Z?^rqKKJ;-&1Ue;iXY^#{W{0%hNoIAo*yPuFUd1EaNh&V34%u?I&V zpB{Z$EtC$7%H*MdP}wL6Iak1f|0s!Bt6=9>tvb42(>H`qmKycZ^M}U9!?xh~z8zH6 zvm#EKbRdP2E362f|5RAWL~e3yP*0QIL$FMfq?V`@`Q>G%5c+zm;2v_l9HU?e7symj zQMj;~0hoHp`Mg{mZmRM!$iw}qHu4g`@rG4t22-O_2{GF=fNu#qX_Q($VAg2iZEw4erN*$1_V41d_cg1PXG^s9)46ZHeF(c z_u!QI8dh!xqR{q`@eKC4owH=`qBZleU*_?}hbEc~H`37T}rSbR;qtD5vJfra3m{{cLeI8l*v@bkdvpCZ)IsmE|9lXN)p zNs)$<`T$|!n8YmOMJc2ENPjjJVy zvHB&TA$1Av4Rljit(5`R9@d(^ZmlSBgS85-aXb;RYk51s0Wm%FiqE1R@x7>>uw*vG zG}&6ZWB*Yis$sqUFTp;#5BL)lC#PEpy`It5RZi{6jQMKHh4UeuYrew~6R%0=&Yu2cHn2dN%{ z^~Gwzs=WmJDR*hP>-X<6a<|pL%gWtAvuAc2HdCgqn$0b+(7EugpR=)MZ+O2|?)$>~ z0p2eSazuX6R{y$ew2!1KSW^s2Uoj{_qLle9Lb@^B9Moe}D__CD!bl!mX3QA1u551( z!n3d>Khhk;$Q^19!IEcNmI(C{1kLBT-u;FkF7|b;(Ts!CKK)1};FYEhUnkh*nR%G0 zITVfbv^tF2QCQ+2)a4s`+Wtds;25@O=y1Eu*benRRz>TlMwvXy-#ysAKOmYq*d<=VcbE^>YiQ65IkRaBpiKYi-JNqVm8? zxDrIQ&elD1(lh6$oOAg~gQ+?#c}W!@GXkFxIP!oa-k?rvD7}4%b8iL%>eoF%FvtOe1Wd;1kO5*8RynGWl4{Uyhm2CA^M2;VVGh? zn`UCwx{)%g7PgdQi#f{jOu!Ja?gZUvC|sH)4EYr(NI1<1`QZw)1UH*OHax))@K;Ww zRX8|dqd*h8;=tA!1o9F4*2`_;D2j}vruEiRwHGDTN01;+Bw>M70vO7jO3df0`3w4T z(X(-{;e58U4Xw(8SNrkMOD6H?Mks?t1`&h@?-A*;V#e;k;_iAriO`uam$d=BT+Bzu|RAd@z(1I3SQx$wQ9h3c0hf z?Y`x62OA`$Q$$JU1dsO)$)T5u=?FE`?ea(Mi+PJ^ua}TlpCcfnMe7p;P_#4+B9iJC2rZ6=>$(#DQo0qg|XK@)12{m{W#Yrww4I$(`<6i8_N(3`jM3faP|>`z)w2wM_Y zj4S39>k7WOX_&LvS}QBRHrIZ!^2<1efhgwybAimHMi6C4u)Huq?4`ESp&N zgw9xYn65f}24c4DPa-YE0k{{LU^R7_ZYIXTGPYD`P7|pWI0jOwU!lL6YL0^qAOV26-Zs3){t!2>Qp<6#e}yOY5#ey&)rCQQj0WxYkzT_u%|ry$mjwBtRqFKh*tOWj{^tYS zv36!FLW(%UgDCUZ2#Zpu%J>|hSnABC0n`>}ldwiU%GiNt>I+Or)@#zd9FoFZ?Dre& zOm!60R40dIvy>UegRV*Kvn@?*37Y7R;_Ta*7K&Ta+pZ~|JB5!5N)vidNvP!mks}Xf z{W?x|dUtC+%Xt}P^^mv^xj-Hz-%<4EgU)cL-r}}aJR)ucLPQ2IKY>%ii{ePa6XIq% zE>`2)UwA+aCu?VqtOE{g-pgo%ib)i9MMr#l6Q0Swk51#I5yNgMhFy1zB%NwO?ee)L zAw`_w5%hW^Lnn9YOnJJo8I1fx;IR=#E(V?P8QFrz#u;hrkGPwdH>_c#`U7Bg<1Bn< z46mV9`gE!NNTG{C?ZQGrx@KWtD~gAXcQqQnxIreq7<5ONxIKs}S`%*#NA6tK)u$LHbjKj+j(g;Hjw;|o#FeL#m1@#M$3F)54 z2s1UX*=;ih>X%3{alk~&`W}GfJ8Iky1l`d*j$_i9$J9gCDUx4_IOBW)9eoM`&Zf zj;ukuJ5G~`zZA3}oF=4uP9LivMq9;F@n#I#Ki(jxKN)mLbGjXu&YY&ZB*T`|*jClN zWOH^7l?<1ODjAV7`HTa$ww2Gpl+@9xd6YQc#$;pq`O`7nN7mrp9lJ@%)u07oHz8fK zd!QZ7E{FD?kZv2$(w_^uBn;oR7Msp24+r-lwAae=xHZPY4K1=Pw)*UMu3BJ!qV)?x zvCDqQD$!z|N7i;66|At%N5La#=LTXMUq!KvF#9_} zXN1{0w&CN^nbYCm9Y#y7oMvpJEvjLPLcZ~N@b_ZK=uk$0Om}=ES-u_AE_@@TYrbXp zRRwlQH;%=oz>Hm!PW6Y8bYdE}&T|VRmk;%#p#*T#4QDok58I4cZ0_MVtNe>b?eGkt zrkS4^`khDrcJU(*$v0yS7vB*{F!5`6COr32MI9cKua49sgpV>gfy7IfePq>T-lnS9 zlZlV}g80`r0PGz4{VNhq;_--oC9x*BI1u(b5)u@WgIAstIOPKgbcdZl@Y7VU0O*H2 zJH@g*w#1Mgb_Q>a+USn=Uqf}@XT|h2!TY2lWb=4G@xRP_^>v^h@cx^~>+w$hb>h3w zTv)FDlrQ>|@AcXJ1JbH9yC*i6-T%Or+SxtPWyp&Z`Rf?F3E4b$pX|);<7njOhdTeQ z&>!q@Z)LEfJ{&)KC>r(rf3Q-a8sq<;q0GAc_nca_RNt@w{BV5%IJvn3@S{!waI#AQ zAW8UlkI{H1q-z1_3G~sSuq=sX*IfP2XI_W)Hw`T-(Pu{v9;A zF2_aFw=-P-i3Gm>ZPez}H}HCAHUIBx*gn+K<;`XMf3`4wu(g%$R6S|)Z!y*rvYD)( z_`hWRU)&_?)t~dl^*N8fgl^OLEu7zEou6V`zRqvqe79Pkq~ZE=H0TN0OwOO|%=r#_ ze*=U5#`M1W7WwyeB!SugE&(#9BUjta{FCNJob|EZBA*bfBvG>p64G5SMdFf{B5w}J^=Z%FkXOT+`B+|)&`uPsJU;0=}`PDuC#ijR~)K!l>gC*hw2lA;KZ@Cn}r62gDOx2OWDwjglMecjXD z(#*=EJB(BDoZ5W>6 znN?%Kv<%Z&ux4zN5Uc7uuAii4Qnzp!+&NWuAw82ZjW-#!{U+*}#zTge z^4w<)&n=tt*=zPp<}F6edb6>72<6k&+(O?>-$H&SFC{Zx7A3Pm$^Jwux#Tn53R>^; z@~D#!>I``O-oUfQ%nm%+;SJ)+VDMxRke%KTAVUFUCm_S#2p}T?WC)O5-Y6iW0c04E z-QE}=V*z9YkUidBK=uZZU4ZQK?f~SD05S^5o!)*x_6LyNfV{*T2V}gQnjf3r14`@# z>@M#BUuM8ju0r@iTRe-!I zfZPMfmwT@U5FoGd9suNl0CF!NU*R1G4Bi^HcJQ_e=4#-z} zj{)*n067ZCS#KJU=>YNyK;Gb;1LRx)Ip&$?jft;%j&o*u!t~AikXW@E(#+XPi{A1zfB(BB?lCm*ldd7_9nVtnPwLKdRI=;i&C6)|7%saTIa5aXIw9KZlX12_A zbIn>eMvU{uhB-Z9`{o%x)2Poj7pqGR-)R&C1p)Xb2Yo&%~n*Fn0O6eyeWZM0o7gNpoNo_Mga#sMTv zqiME`YZl3B8cmyxv7wuG%P2S=2tARSevZ}9!PBqso#nEslkktuIL{O>mwg9hD5I-X zV`;Ih)F?`lOqE)tStuxS-M6pIE^L^p5BUw#&lU=$YO&G4Q_@-qZSFvhWgFzWVGe-y zKz%Z7Cm|del%OUT;W&p(%bYi!bXKetd&OBvwE{tT(pqs>(oJj4Zn>Vh&uFDRYmc$! zv@pJw<2iecR*LDm!>3+qw=rjV?z)8$r>E!8EI%g|hyL9E;PV`XB7pzJgK9VKWY`yq z826pH^HY$n(goix6`Q_uxl(>+A|r!VxQ>w`5`3i9o!}&Hl{GGM;fMI|WVs(Ir<%8#2%E#byj%Sl$ z8_x8kI)F}vD;t*L6jApup`dgkGpQ0xbM-Rhr`2GxWF(s;+y1b4#(XjK@>6dt*VSA? zjQ?LLrjBy7uVB)RLZo4+2>S*7Pz+yyR>x53?yw?s9PhV`nsZ#_gmNr$0*TmXtV6$3 zvnxtJ6r;0HY+mr4Qn^yaoq5?$dzH(TMqO>NO}anN3>=V522o z4Qi^S`DnqE6fi$S9y)3t%BTmJP*eG)H(`lN^s`C7i6u>^WnP1nUUa}9tX!Znt!Zt-T+O4` zqNe7Nt9>Hhu9jOm$QI7WEeY`Nz0XaeJib>ZkyT%@S&jBpl! zX@)w`gpU{bHGtWt4InR?ewAe)vU`j@+BHddH=!(uk)!pRL-DufS4>nh_ZaW8E;$fQ z5j7@o6|KpmzFC1+lCCc`7nhn1{giU#=Z;q^jb^F7uz2G2xEHp%hGi^IB*zxW$K>&9 zy;Q6=P9PK3(-K1$V!~0U@Dr0mQf$MH90c_cp9^wlm?I8i+D{`B%H*z&U3ZsBvh@V( zi%gpNlp>@jGC7byK#Bzzrn9YpXEz~dsgJBeU-8FW!` zij`?OM15FPA09*ozKYJnb%gV1uhU5s#j+79dOCs|&L+nCu2*grOBWQ8iX<*#>NNYK zyc(P5%CS)Qcjqce^DrA|bCr-U0ax$uIafLE+v4hZj*}dsKMci^kaFvE} zJFW&?hHn~h*$&LP6PR;ySsh^V7?W;neKQ_*X6yaivz4@YBO7S5m5`pWbw^?@6skSv z>l=XSZSnQ39B7QMW1BGS&RABmS^Fs@w`VfA2zi_38F4c*k43~DtpE1P6aAd&EQ}1+ zg6{Md6T=0lm>8-nF)^ zrzD7Mh!uM2W`f<49TFLyNOdwsy7Ne~1%m;i!FB1=>^x9d$vIf4ZZFytk0;qeTeJz` zfD#`*<>&U?)m?8YSLQA>%ZP!Amvm-{e&v~xts9e+dA82q28r3pO`;YkIL(xHmUct9 z3U13%r<>{dOf$QdgRr@D^ga6~OZG{41R8=Id>mo0epSA-R8i$dVNMmjiM)2L&f#e^ zHGKCO!B(9F*f(p682Q_eDl#fGB~1-XZLEZXK%oIxv2 z(`5j{J=ZSJnw^CwGZA4w{-B7b5MlBzIvf{Sfprv%nX{9e(>K*9E z-HkpjofL?pK8DaiFo#=d%u(RL5}TpN4b(v0c3DT44ccNH6KoT8PUlQ}!Wkvh75b+e zmAQp_#S;!F+8~)7eo7T@egz@|~So*ItX9>Fym+-e%ba|BQw_!g=(S45XJV#@zr-7fx94J-`ixlWaiDDIgxPah2 zt2MrWBsmA0_G;>yxspMk&II!xYdUKw&uV2TP%UfSUUS!t8}@Y@5l=2zGtG1>LqWS{ z-oP|!S#y|~b7m_uVt~!rW@bK%x)hEK$8pnM$+dETBK2H+vp4Nm8ydFOx0-^0=Z1~f z0OGkQVI4P6Hb39r%C)i(qrRIW@M{C>#@deN;5zzj?pzzXZd^C7Ti2cIsq5~n>*bSp zA);XvhDA&CPtRmDVc--}=H_nx(}&jn^VdFjVvBhp+r+$Zfi31`P@9*F)|#;%U&TP> z*o~zHtj$iNsBg}}f>8E0*^Qe3MRwy*n}cmd(0%E07pkHc^CA+bc%{_fP&^}Gu=)HN;4Rm-%VpBT|v{%crE z!m%frJBY;3R4cW{VzE^AQ?u1N0#|P$)T%A|cKIqo=c-<<`l+H)#jC!Hvc>A0pQ<*R zjZ2_!eW~X8Zn4q4x>)w@O0B7fkvl8f4v%4p>6;4^eR=`8$c9}_q}V7pHa$`o#^pjC z4bGwA#fG|=%>(ZV14dW*2NCrW&&-@;=5nz?zY4M7n&G7)i9i%Bqi>faS7JY^Sc!xC0vOQ$$Ju4BHmP%jh<&gsTl`(**QW!<#FH0 z^@q?xtem;LuLEwO1De2*HQFA{5!}!lp+ml6 z35M3X8G16<6S7(`Gc+9|_##U5m8gJyu72X}6%YwV;JFdujCju5P{Qw{r&rLyBfZaK zHLk?iMNavdaLgOH(~BOA1Y^0Q(^$g7(_6NC{CtDFDCxPZcN%Y?$yk38<-Q3I-pBrE zax>Idj%6xBdm$T=g} zvD0D`3XQ8cDOuZm8cf^RpfkYcC*+hvi8HcgoBj*9NhZleI3Tu78&&| zY&D`!Bo1%KgAay{wZ(zZE#g2Gb&^G?SKFHg@UGwshI{rSXUhw9b@lbLvz1Z>DnbM~ zG5y$zjY|X|hl~!)N36C)7-9bw7pqreN;ATllmDZ*LZx{}r%pr0zQC^3JJH5R!gjDL z#rqdE>fucYkQKK(Xp&rg8xs;G#;dk0e**Au$oo3O8u(-lA6$ zj?Oc8W6IN1u4B@B_CCDUczu0B~dlMYUlW%PNnnXEoV zHj_ggIX#9teRAL=565BD&z~;C#xLj5NSxs(>^@Suk}N@_stJ@ zIO72?1^KldNU@Z|T^eodY^BvFg4WSCc^qqFsNKd;tc{_l4KED}WR|bx#)Ho(HP?8c zeMg|27!qe{{P5HDdiBV7eUS^5Vs+wy@keIIA6HA|Bje)W7ZJ`HCova_*wP*+LF-kV zS*q8@4{I8?;nU@2WBl^5iOF_fa7inms$R(~Fn53nB~^zyg69|8g}5S}o*33kneQZ+ zqOV(tN|9fFb_SuTb4Bl%=jR!v!b)G(U8=^X4wi+NeRr`SzYdQ9`B~)QxYU{jiLm&i zsyv6~OF8ld3m5A3i+;M$C^rjL<>w+o`R;-=6+~OO5Gu_$@V82vk&|)K#qJaz`wZZE z#ggJK0>624)Y^~r$^e2ej=2}PKx6=s z3PcSMso)d9!)QeiH{7zIVWaoslHd?_E{CG9@`&*S&Szb+cm{97n)}}G;pgWcoYwYZ z3YD-z&xtcysbM2i!>OJ9?D4sBt$bxso%jc~P3#?RW8Rw_t)_h}6cE;Wv1qDw^c2TW zyF8J3e;=OzV;m*uOs zqm3NPBPeRD^-)Ew;}BZQUCSeC)K6JJpS( zI%J8%Y9@Mu|LK`y#=0GrfO6v|1p=T!38+V1g2&jr^i6ARh^@C<&1~B$O59?rf@=~_ zMA>4sLmUu?!>ITy8WHP7{j^Kw+L*fW(jCW-5>X4?#=ipNki>CqxP|yVj7AKbF)aCX zxv0gcTm)k|guDAtHZhv0Y&60#tO<0vsD4x~g0URh%%f~Jj#E@N-lpJ*a#8)LTm%c+ z>_?lq?b{SQQ7)?AxlI}S0Hk^ZrWc|7b$bQIQ|>ZyHyGSy;NvzYl*znD1-&~w=E)}Xxrs`?QOR4~w6Zo9K zkp~?4=8Ovr_(XnDN8bm(s_AD!9DExdgeLrta%f^kxZ3<@T#a)`3@o-?a}bMe?4`j& z$k};&1U^H?dcu6dywAMPexv=kDPNoCbn)fy|5(<*7cG0>za2M&XlUL%gc^A>XKgY` zK~OQ_S}+hn&zQ!%@eU+hFyoT_H>&nN*-FM_K=2L|7u(7-9P8Gtlv%fM9vbJPQI=N< zripE*=v6}n(@;SPtU+bMheqTMzc5trx)~+I6HJO=qf`5mbQs~2QPOZo3@n~Q5FX)b zy+0=2r^risR-dC(hfq@e7;Cwj^*>9%hmg#q77Mk)75#J7w{fRg)Akes`ND(4oE_wr z0iW%ldGL!F=MEFwWt%)Vk>^6K6fS;PfuwDd_j_@J;R|`8DY>;thaco*4ptUq2V6)W zhYJ}~D=wa6dBKu; zMjDTqV_(eJ2h0QTmOE4VXhMkTQ&Esbh`FE2BeEDD#0m%QnTJ5hF9gtr8yY*bZ-a{T zmniaF`0?H$Mf4UgeWX@qvmjD2W4$9Pjj{pol921eW zwz#Pe0Ct>I$RT5xhwMCdH}dB4aMG<5DJwZF6MXXc<-%4*PnN(-#FiaiPJl%4b%c-_ z7)d~j6)a}gO!$W!a~2cPhWAK65zE+MgBv@Vgg~|ej}MfM);voXO8K z*~Ab=ch5v(ISA#Pvxe-0))8yGqgcWaB;0))4UjSXZ9Ii!H6@Hm-7s#LH>?|Yd!%X3 zM&h*_zdLi zaBhfmLp+4|=IAPYQOt-{9E#BVCSq&w9Hil((O`4boCG`Cv`79ln)J;Eb_#^NLPeo5 zFXJLaZOv6dN_3L& z_t#lckosxlo*+^gWDJX^A?~o2_rEY=2&8lJM;m6-XD0ePz993hEcX3OXya8|O~^!7 z6fQ*!)XSMfqW2Qs3Vw`;{2nfeh-0g5l)EXJ2QoSN4q{)0B}%vwA4(fPiLJLNV8CFy z=OEgacxCIMQ|A)CCCDOTE26s}H;g94U)jRkhkj+4jB$m;8QM>xxQ^tow!ia~S46<+ za{1KdxpRm=A%v4DUY;u~BCptVr_jk$p|J31(Q+Q>MUfE;b;1wx>}3jvGueue60XFDNbz$!Tu7g<;P`u~+?^9c zXfeqNp+0RN%+WslsNZ5DakOrcBd{1YwB|9w!$qlEwHpyG0_#NrLbR& z^{^xCVOz=)^9wP`S~8ZNP&R)ahvdss`d~*wM~4DLe%<=ZC7}cQ!0J8s=xU zJ;c%oC&SyOP@fWe3(9rkR5O9$NCLxc8AdA9!v^HlK0ge8&~&YZ7LATV-1j=;A3;2^Jt4G2F7=?Omu z;+Pye-GeFg8B$>TDfDbO9^uIDBpTg$(q0YFIf+jqyf(+}G`>&aU_#mOfN95sKX{{4 z`xv{4+g(UDZ2j)hAp(AfNjJu=MtC9=Jh}^dPvPpM|Ju_@_I$UWPVY{jFuDnaZMnjM-VB=%t`O1_t_&t| z>B*G(BOrGRe0fheBH_!P&4_g8%xzC9;nMFDt3P1UZSH(ugb?P=*cODgfE%z)6`~jrq|7>m`%Gy?tU;D z1ZL>o4uja1za-ZO!e)fOg!F{J{pZWoa;b;jqJL|w{ct!OVeM!Fi|$;t(YVacab)6L z6?a4j%4Bx7Z5)<%vdnmbXuaSp{DlNYyOJ1f%OVc@LtzWTB0_q?q8$lTdNTk1l+@UM z{(USQkT7X%6CT~!)Si4t&`+FA$;n3#5|Oh&p|%YkPU;HFsDbsS*bDOQ6A_Y_U%NLW zxh>~Ni;sqF2qdU!q0cXW$AdWNV>g-}Uj2|L)1RXk;i8`|>8OglD54%GT#U+BBj;x-0m6!VUw*oV zV|sW^3lR&6Pkf%GJH$(bu>qg_QvZy7!HrDnAHEN~9F`nSmIx%qkK zC{LR>swqiks=s3L*GxKa@N=X~XAVx?J_ps`up5bIcHrONbmHIC_K{qU_Ol885z<@! zo$b!Q)9CEQPgMR{S};}N<}qL~hYO#7eNcT4Kk`>bEd6KlL_8wl4kc(Ct^1EG(t(Tk)kYa z3+!f$i?^V8Ibxi`h)qmFdd*LvYy@e zm?v!#)-%S(o~zd(_J)i8PmGVZpkO)TKGtq~K!i-(KQkg8k^FW}U~_{mhvFUwKVTy`4qg z$>d#3-p%COnS3V`+35Z)=03%Qn=MMVH5esU5{;EmpG1ndlvZ4oDJ}=ZRaD|{iBT4{ zE{cE@jO`4yw}<@9aeXrV1k|5CPBLU=@FH~HvT$S&UNYXa9JEGr$fbr-8F$DX%C*1a gLwAn!j|>dqiG07o7#PY9rQ6S=lDVNieDh5I7uRC@A^-pY literal 0 HcmV?d00001 diff --git a/utils/__pycache__/layers.cpython-39.pyc b/utils/__pycache__/layers.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f3852a95f5c81e30b4062203e97e5cd701e513f5 GIT binary patch literal 23244 zcmc(HeQ+Gfb>DpL><1Pg2!J3+-cf5F#Um&lMBx#c`2hnw>&UEu$-UIOUaCvMagVf zaxhU#F8NH?g4zeXJl@HN?+kf^-q2gd{60L{=MCe@aQI{xkp12WAR{4UKOm#t7$9RI zWCV}{-Z&uRA!HPggWd!n6Cq>_kVD>KKn{nH1ArXy9s}gD5Hb$P74@PR0w$=ARq8f19Ccq6aab3n*wAignSnuAN0-uawdcv1LQ;AG$7L<mpL5kNi?Lf#L^AN8IA<=e5&wCdExfnv80^|knML=E*Aya_7 z^5y`U3n3o@8?wb@m#ROZ9(!@|uB&gK}BiZO3`#=Pa3o&_MSt@o}j(>c!=T zw_2;bvFcZA%{OYL+ZENETDcvh7aHo#lJeLb9JSn)Hq6#Ox1eT zFBTPrB(QHSEN_`=0QoI5$QFy`TB+H@Q_@-qZ5~ICWt$Wd!yE$bf%;TbPePV5C_%+9 zvUC}lmbqlS;jCNh_PVp4YK3z3hPCdlr+uq!w_MLWVzknpb;xKtE%dMDc+O#?m14T- z@Tr$NXe?TuyJ?}v>A6Kz%VS2d>kob(#{&vQ2>w$SSFw24YW?)_mtlKr5*S1RtMP~wM(?vom44Ll?kPe{;ZnKgjgvjI#-x4Bc%)BGq+97D{=M0PWAL05%7K{$& z$zWkh4H-SWAx>gvKWj(V1^W;f1~?&B4|r5Qe7>W67ktt2Y%*-inVV8ipi$w9IS5wH(so`YFST;#E@>hyy%=bdCAoXgcp%xQj{Qs3=>O<`9G?RW5 zqMC^m;h>-&iQ&7@>S?@0MufCdyx-C5gdq?)fkYfJHen8E2o$9siqTmv`PTxcT&dP@ zXWk6bUiD_R*-+c~84N9#u2+ikuzIytIT^KY*y#=rgR1I2q>JzOdPcAoWM_ z0wq_I+ETpV(N$5WEX2VO84b@BvjIK$Ru)>_@lpx1kq+G1Mjc8_M$--g*rUKY}wh1^YBQy{!QW z*AXbHIjwkF#**+u3zOZY5zeAaUtT=RhC^Nqh0TV_chVGks1NdeL3f*aWj8;}_lZu% z8giuLJ6J;6@pu$FK6tTG@>f+wW7MA|^T_)g2b11Y`kfI*n(KxJBU+Jw29N9XXS z=8>y?qQI_I>Op3CwdPk>Y85C|@=eV!DKJTL>^XosX3I6##uFIFtc!Y&;vz*IXN02w zOf%FmA1-0+*8pZ;Fo3*h`VE$W$R08dY1bs--9)l5N@Qrg7E%0D`E?VonTL$eSvMSr zW`}q1EUl?CfmwxDl5VW}E31Ap?pBaHTdOwxa$|Yr-1DgGf8sROCX@XNWn=1Wtx+!3 zn&*&--qF%R_hHgeAH$_f24`@)Wyj8edXdisu@{&Vb4_pH0?tSt5A@R<`HLSEr zt0AhxqU!JSd51JOs~>J|2r9HK7_rIO_8)BU-c#w)^A+W+0S z8gdzaX~<32~6d`-U)_sYwP^|4a zUq22^ABnG@Vn@69I_%M<;a=>iN4 z=7H|q4)c;hyJB9bOU1l=Br-2u4M2M=A>N3ptzK1YLo^93(h)6q7spYfqQX~V17Zb7 zkh#H{i_$loq9fV*N-Ik>; z`01sLpKa$LXf7Rm&%V!+0}>X2b|Cj&L$qvARc@?SRi#;6R3&dRuU#q*=Xh8I?wf+G z7_-2vD`FI6FP9VoNM0#jvzd^F*j&X{E1Q>TR^+9z+{A zW(4A>uOMU)j^9=q;}bZr#CGUe18?BnPFY8n4eK&7ropx+&`Ui@BLfkn5$X#4)6MGQ za--@A2h<;D!Q)7Rlq%I1D{4lU7f+Z}*M1whg}CW0b&(mfgnX%&-EZJ=^Dq*_a?Now zZ|BSrbIQ!w88c^Q%v083Ywhvgtn17{F822fC~pgFLXWkfj3T6rGjY)4=8+t0S@F2J zw!tZ3+RG%Sgt;Jlsd@{ukDJrpmi>UD`+)U4prO^nAjo43lxoEl3Usqfv5F3oi}!r3 z`7I>LG3eVHsXOL+1_3$~%zwpq+9}U!WhhWBYtwGKo5nr+u8l}1r>Yr0-O5nV?wI#5 zj9S(rM&_c~%8VIcbJou+W$`YBBg1~&x7Ty69H2-&7r*R%`x!&S+5;OY2zYMP_$VNr zixSpZ17-6|gRNXE3o#nFF9P2l+BDkx{NYWs>F;lk+%@i+cdfh5-PB!o!S(V2IuB8nlNw(IT={I|2to5|L=eL#dAAM3)v>7g%fKrEyLQhT({cBX7}s`D#vcF zE@MviJc_vv*+_m|8KdjneHkgO_ zpl@Tf?gehC>EB+d1a`IV>u%&O$(qG0m|6zr^5lS?I4V}`Y9e*@f_>9HbzxYpHBjLq zDqd--lYH_gm=usqrlpj+L4dp@BE(v|Y8&D$EzMn=O@?z|X{-#?)wnrf4Vz>5jo_|e zO}OqmYa=@aZHK!_#UZMIwb40GO!lpKvJYLoL)bclqi;(pFx7< z2hrC_7XKI@%K&+QjQ4Kt6znn?txiOBcLkH~5GEw!j9jPvI~ss9_9t0fTtM+8 z6n(A!`km7DXIN#Z-w!H6$~Y4TNjrlx5nrmv!p>Wu(!7JbtP~-i^0-dq2BTNr#7fso z%Lw!!D(A~c+QBzAJIjixpRmdT>${1ia;)UhS3o`v-rB+bCKiW^pk?uDseGLaYnp1D zBq8o%m;GB^9EsUNy8Q$jVQYq!GuMvvz%6n-6FBk>7pLHO8X0;abi&sy!O+??BM$~^ zKvoOpg(hPRKSYHt;RUQ~4H9p!h{!Jj&CM8R#BewQf&qGXMI9{C>pE8R);41wWTHNA z;T1j6K~K<^$9nZ8Dm=Gic_+v>$&0cc$9iS)&!Nh$_F~Gtg$F;*_GoG|)K!jUDniRR z6Ninv8Jv$IA{g1bN(j9Hp?(3GZV73dT!*7!r=0K?g@|4CS(FWI#2GcQv$$2PNU1{mtVpLg290=VZ4i>iEY| zJy>by{d?;5=r#n%ibw4XoUq~pCCvBnSATt)u-_45}Z1hLRHDD6B%&pwZr{#o=QddBecAmr6pPrs zzk}36km3jN;x9!ngA}~qK?~E#jSHs#-H+kGu$4IA+k*+#M;yS!ED3DpZlPiSB zd9~PlwsS|IY&8;Rs&MjZqft9uXsmFOQmRcpTX<=q@S0k!oGyrSUqTqGKw>VJu!LP8 zK^ryfQEJo+CpC>b@VScLEZm%)oa(d%m-ITQs%J3`=AK|eNfmZP&zC!eI1`)lk)cvAF zR)TR=S;W+(68nP1YmLVBAYE)${9;W7xtLIayDU|OTW2^4D$hIcx60d*lYX{|b%9B2 zMc9+CMM+fs%e!MoHK@v|^D8!t1 zgs})j1Q4M>v;YwbJ^?(6QiSos9g`VW`ZOzMPzWoPBXKBs%y=F9v2Ix0qPJx&{>TsW z_?hSCbU!bkJZ9%PaYn0kEL_I@53*+$EA`5)6?N{vp_rcEJelmNrh8WiAbR7yqNskH zO$iS)Zra_6netch{I?@?pes2mmQW~sZdce}Q9nVLxJi5ug<^o%N>0}v;0*n~!{*vp zuWls_^Z}I5;uQYn9g?{%6PI;{H%(QL4Ppj7K5y=@V#Enk1!E}<|Icb?^!AR! zh6FaVY7TYeSRO%8qdh>?w294TEw`OVlxUDLfJl|O=?F|BN2o8&_!c5m8Q)qOMypUi z*h`E}XgWlRO=%`tg5T+x6UL?;m4H&?f&>DdVF{>3U4q-yy!3smJ;K^MwPtp$6(t_B zR>3uiC!%UyZ$~&F?uK6RS==MO7q`zE^)MD) z*i2xmr_04P#^qud%OSiyfU=3+#AV|iPGBye%f;`<??1&fM;G3Z5tzzu&t~>H83*dJHBPq5DmH9Y#~`GIBQ@-eu)(e|VRZyOGww z(gE!3OW!wJ`(T#yox5Sqm|BCK`~7l1)VUww{rV_JZ;@=Re&rC zd{N-Y1CG2u?*aopkt^QO_u*CfK_u;DdmqW?{Htux*FUEDP;&6JP!XY*|&oFwCvwKl5t0a-G}KtR9_gBHujG#Z=M!<5;yu;&?jpi!1* z3TBCQr|3yT{nAK50kokm;Wr~%hbxQ}JZ(nC@B{-QT-1!0$l!-j(y$Q>EM7#=9pP!c z7A792$V+-b@0FCzj_M~-9I^HRT-47n`TI!bQ!B-K@s_^a4s6_M)^t3CP`>csBt}M) z+%n*&9XF4EC*wS3Vrgufw@|lDX)1T@@(YVG4&joaaUjlKJJOoOv5J20o;7fHj=G&km zeI<%KCwshiND;l9OaG{q*)E7w%9z85O8YtF)mI6~Y8hk1Ok_+`A|}jN_~7T6{45iZ zw2ru`zYW+~Qi0J5!#rW(c!*e@!v_eE2!3)9 zQ3E|8T{yTffo+@c4cX@`2BHnmkv<}(uHj-fRxb&GY=zz)D1jAAz6DiaVPb2xvBb`| z`P)ows}KGt7ty)K36HST7G)1=AhkZw8UR((bMd55jNFv^>*xVo?Z1HwSH!%^Eeu|v z^iKh#hxFg$Ll;wu5|^hIu`Rozu**S)ILQ&1H#G52@|BnaTezLiaK4C;`kQK!PyY#% zf5>DTL;fyc7h|y;hH}nXBldCYv{mRSmM{bfSD!9E8N+My(?~W_!kE-O;cnwly#v`$g z5(fwvJog+#*Aky(y>R|=!lwjTL}W#5kK%^WgYHkVFz%sS85Uz4A+b;PpQE^r-0bN<7z`5=*5u`#`L3O zM|>YTTxQbIQjV#931$0+J=tj;_O#Xp*h=Uh^%bx~Mb6>_5$Es<$<(MolRu5gdS ze!Z)OeNhX$QkIxs>!PeBec2Pr=C5EQd}UT|;z;P{NPtApCxAhcymZidd_3|m*l{89 z40E$g3QUqxyb|?>8{yUtbR!WHoGnKOY0P_8D3L;O1mVVTw;I=p@pUTP7siG4*AnE; zmZfHJ39{N2Vp@dV-vfZ?g}oPc(XH!X6BvUm*o{A3|?M{Sl5FOrp`BC!Lu9y_4=igwNu* ze8z7k91JKM-Y@N#@CRRPY9HeuaigQLWy{dg4kvAKU&d|3cp?Nmz75Y^hZ3oAC#phN zN66l=4lx`b8@%>zF#S6cVt3ZP9rZ`EE{R5e)=^!|;_RjWI>SlUcy}L8-=9EXd>ab8 za)lkeA5|e-A!JXuGMvO^PlnXL2XYU=mmi9HBz!rv9g+T=dE`MQT>3Y}>fbWyr}Ta_ zMhIhPVh2LI@`)Y)LR5+HiIBbF)A<@=k#($u?%ntaNQ~Xb&tHjpr1`WRk^X$57a=3) z1Tx*qZ{6lx6G{)B1eG2FK=S#34sD>ha9$(RZ%3=XczX4&`tPqLke%3p?5;c{Q8uGW zgolLe2@glMW3wlt?LUxQyL0g;qdo~2kL*OHKOZ~eZ5mB=^D$|}u<$@bCX0n3Po4V~ zc$(U7&h)%F)n(HjlDl7wJAn~;xJM^;8Lxx+VKPy{kdwRav7ZkWV*R3?uZVO$>{9ZI85zkP*!~(lXo43KbyelKoX-}S;TIC zDXKwOM97}7XkP-AJsE%hk<{3I{Cy?rkT7Xt8y@}H)ERuI(M~s;l7o*PB%)@aLhTql z?981Zeor#loo`=_k;M2qxE;w|IY(N2IjTcAN64OVPEVb$tYF7b>0Q3koIraIHFf?5 zDY84;elhBeuuV^$LnQk1tTVL6(MUJXICa*OW|@QG%aj?S(a$0ITB311D~=l9l^5*b zFGLjxF9_KaUSzo$9$Q`g^E)PBjA63-&5t4J#R%?m<_7XxW9{?LVPlWo^gVp$AyKAZLo33?AYIl`75OMcy-v6o zl}{iSWU3*;>PLB-u47vsKF31DLgEwOWa$EvWZdMJNj{0nB5vYhDQIn^)AN}7DAYNa z=Q)d0_O_!#oN;p$W2yT;?Cm!a{3B#<`FE*5|1O}h_dZbhXK3M2g`3BK#Uf4|-+oU01}^efMU_%r z{U_dpqxJ#vefb+Wje4c|unhiIe+FOLYX*P27lSYD&R}+z{}LAuP(t>E!2^+=n7(kn z%yAMO%v0`p?WK#cuwRsKP9j~51y*wt3-EyV(Zz`&`a7t%FAEiw?lul0ghgG8k0$-b zvic?q6qqE(>+kJg=U7({_nM*qr;DMZ-Rm0&3$Wt@qF)aUUiA?GfZ&i?sySTXLdN@jv=bPY^G_L_^A`g5^| z?tO%T8S7s4E#Mt#*Z+=ue?I;;pNbz6ThD%c%)c8S_guY>^ycG3C|LG*Ai+mMCZC=k zl|P-Fs@JC$R_pQ)r=?o>)o>Rejl#j8{O9Qu_j)T7qVG7LUB)L(#WMcsRnfPSKP3%Z z`6Qt-KfH2VYA^DiK5O{wK>p8D9biHF{qpyI@*jKh?{oT(-p=vCD@-mk`8bnTnS2Tf z)}vSb2DhyTnTsL5@>!UAn?*jy zr07=(SLvanqc zUNXL{9Ja=D$fZV78F$1T$#s5(k;lgd$A(7mM1G$!hDNd@>CW@GWNu^tzdY0b2ku)F Aj{pDw literal 0 HcmV?d00001 diff --git a/utils/__pycache__/loss.cpython-38.pyc b/utils/__pycache__/loss.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..92642122222f784586fa879bd4d575ba423b0135 GIT binary patch literal 4845 zcmZ`-&5s;M6|buPn2(*EnVp^eUa!}V6EPt%#0nsg$(lF`A`0dsj)F!Tn%SwI*_rL> znN;`MyH>T3u%mFX@v7>* zSFe8W)qF5NUuJmz@#jDO=w}xg`zuw>KQ1aa@Fp>cWRgd$oqzdAv<0WKh@6qzc5T^- zypiAbQLaf(`VV-!@J%KQviN|>Vkm~i`%b%rmXa)^r95jX%Q;zjz}j#`e)$V}e!bJ>#5#MZ?BKnLH)(=ME`?@I zwhCG4+Y^}Gx%C;k@ceyeR`}D-WA>OEC!8dzfDTLk{qoJP?S7DiD%tJG(IDP^D@+d3 z@nrXS9F5iPVLFJC-DsR7S0~4&oQ%ffbU%3W=G$b}M9};~&4G5Qc>W>F(v!Rd60$az ztSzMfDeFO6={zX49qCFB)^I;%E^B*`xnPzs^_$V5`f!l$zc=m=(&Rm?J+s5(ovvNc z&WnZdB|Fe-S4mmQHM=jS;)HRQIzyK%^AK%;r-}NKS<4tV9>*b!T~eVu?52ZpjJ3Vw za1tt2!E~k=1cP{x27wWAY!)EZvl5vi*(B(7W8-zA$$qCLjGKf}Pc30MtXv9$Zqxxw zklUKtN7j9dalM-PuK5~cBc;wqxUA>0*|rPN&Ek zrqCO!yB#HId_q=dyu!=8C^mVK-@EX9yUfP4xSGd+$3E>;+!=+~sd5mE#_}+tawP~p zKI}w!kF~pc0sLluwk_Q-icYu2o`n_kTCwc@*HBBygNvf-*_JLU&pglf?}GJjc((O# zLWuMJT{NY4#@$Y2eLA1*U#F+@cRStFDXE9bwDt5RuY)}6+}52PjpH7;*h=f*L*KeM z9DV4_TwUB!Rg7&FR2?7HB0>&0+uD9-H0s#RRHSjswR>qh)honC&aA9ytdY)~_%hmO zTUO=1{b9=*XUaX_eTx`HdflYak3gR5zAm(Kn*cR(Vo-M#khVfD3vGwaxgOMJeLjHC z%bk3YWd&Ho*M%mt4$g+2bPj~N16TLS)iH}FU9|iZEd|@+ybquUGI zu#NZB%Zt`Io-GMpcRnpiUltznU6EC@8fGZ2 zv8=A^vUK38-=t+-(e?WR-+B5%mP$4+OW8s?hxov=hHl6*9N-TWm*srliEV_Y=-hX+ zMfBDV#3Qbl?XTN*FBW<+TTCl7!&~eT|L7-Jy_qfPl5VasXt$iLWUJX4Y%;Hx-^s-*jt9q^HqIXkoBHLh}2++nSq9@kaw06Se zB3!->%)W5e-;m97{SEXt7MU)S98H#O!K!tAL2vc2PbXq#(TnMVzBp{?B5}zjxhz*6 zy4kk&WJPbUvgzUpgG^Kt!zPXrsNS9~ov`V$ES>P_ie4YCo^U#f8XBF-eVsVhBtLLT zGw8m4CS{FuAuV*xTEfXL>8iez!V34fE|45Of$=VO>s0fnV=mnnkjKSACTp%@mi7DC zfm}!Q_>cI<6b&7$FCh-JHBSzo8KCLK zj1z^iDTVRjC{&#^G-W$g5DgAOA`7M+BpM#lrqKkM z#PI~fr2UAvYf8OCgzS7W)rtFI>qWySrf@2v2RIEQt4Z69pcW|f1%<+x4lST$fx;k` z*sH`8I&pV@tP;cb+dk3_8bn_jAi9&bKz7Ib$N*Ek<91=M6JbPX_`rzC!05fN}!!!&id3pma z$t@7ZeQ}jHx$kW8EvM=ff&0sj3%Z3!(BO66KrHZ4uRGiPb9@=Ux^vB`@NJCR^0x$f z_+`9hQQ?Z5ga+C{i}A0|FkW(?k<*83(XZzM+bou61&jB{GKY?I9*V4px{EBH z_klFh1@&fSuP^%UDc=+sW(LI0%Bhb?z=8chec&MOOT$u(fIlo*Ocfvf3YZG4M(@vU zFA#i<4d-Ht8MAU$#d&lQRVwH~HcPiJdi%goT|^nEa~*Oc>J%wiu(7C?)q%GTP@t!) zL$pG}2Ii^g2F~+5Fv0~=Rwa@tdWz13X+WJO#@BQc7}P{o zEv7YDgfy5LvexC?8arT!4vTsTE%j-Y*1#$b)e(^(QN9ZBz9>sMQfAATb3rd70JjwOjKpeEzf7UK%ZTWscSH#0*y@1Gtk=d5svevnXh~`h1 z^aeECM&yDI(CQLNWYiM$Wec4<}P_B(?ZVW!tpA5v>+bQqxS ze7;dddoR8k$2KmSdHYYv)qWTw_(gemTgcliV_?hqCKBKp|J(MiYA=`h#f-q&u5?Zp htCfup#q0S7zd?SG&~>f|$tt|!l)c76W2^X${{r!tSJVIi literal 0 HcmV?d00001 diff --git a/utils/__pycache__/metrics.cpython-36.pyc b/utils/__pycache__/metrics.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8d3ac42ceca8aa69a7ee795a0b341c3df2f78f33 GIT binary patch literal 3964 zcmcgvO^6)F6|U;P`Ptdozu7-YP8$CRPh#z^EKEQak%Wz70#S@Ca2RYhyQ5C{o5=8|h}fj~|%IV2aKLLEXNmn`JqvoFc_s%Ljrieixv)H7AD zUUj`!@4b5U-s?+?i}lZc|Cb;BuEp5j*r~_F_!?UJ19XB39tg`{mwle>4ns15alCi+j71Q8beK8Y{=U<1CEQ z-C>Z)&`)=giK=wNEDq9?$Wvm^j0TrAz^_!tfI7xg>X=WcV`+8lV|Bi`dZL#k`$#&P zHkY(z+mJR*In%YfnhH@H`30 zC0>Yxjbj^n$y*h)Cyta?<4c5C zKbCicoh0u0q9&}Oo>vBz;Gk!K2q-n2 zKEqmh3#)Cib+HavYI%KOrYGP!0#*^z;sAbK;62(ceA?lAz z*!OI4EPO^{dkQ1H>M18X9t@zxRu*tO|%}-O2`izoPqaji}fk*+PDpyUJI`2zB3vo z<1BEqe&Bj{1L^ew_r%BFaC_n1Al5HScXz#5%-_4(`O}@ci|519So* z%dUG%jv?A84Kv7;y06EHC#5%e!;KFwx+6(ouU(2lH2x^=f~wq%@}7Iwi@>2PM?-h- z!o?E?r)Z$K*!QCpbRS?C`dJ{{krvnWBQH%)?R$zW^JFi`W`tVI=hGz7D5Grv=JuS$ zl+vuOm$}31A6(npdF#Xw-Pjw##3flbi~_=EX1f_=V;RqlcI#Ou>Ut?GqmNv;VeBT8 z3tcbG0(n9Daa>w&9wo!SBX`e>li&XM&p-Y3=fAjetHH3&?!6e z4uYuT+|=>5Q^U!W5%}m2<;JE+LS=_>cIi#j7pp`_UHgnO;zU(&IO#}c${EMuopGS5 zkS0w$KTvks_Y#2nfgGi(7Q}En>A@IOswl@F4^en22b=?$C|i1QFHrTF`S!g;IbMhmY^KUCNi8579#|Lz{sj>(tYZ!R3s;6*}L#;_Uo|NNiIUf!|hZU0uBPR-a;M!?q zVXvRh|F3> zqQ{&vx)+s6(i-03(}iBcOZJmMmU57c>37kwr3OD|*3A~Tc#FTxH+Ypdc-2_umbuEC zW`m#Ct;P3voqx^TG92E-%2mDtypFY4jbGD{?}MAK(>4ZFTb|SrzXX!A!pM1HB7#%S zGzNM^d6#|#@@`?}$g;?LH}PJ7ItXiO7B(zfWV}>OAG(ORGN;WkLBiO@E8JCre9tp61X}e7-WaV8OSyOpCmhZy7K1^M+p0Hr|s< zc;8-BD?=}%FcO6a_=eK~#VMg|V}-I2vyzEcNPo&n`dX6oK_aQ(msC>g(p6uIa{swZ fdZa(Ts#k_1F^+<7BjB@?!e^aV&8jp1)xYyESzy93 literal 0 HcmV?d00001 diff --git a/utils/__pycache__/metrics.cpython-37.pyc b/utils/__pycache__/metrics.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4fe77dcee3bc6bea882741a016ce941f25f4511e GIT binary patch literal 3953 zcmcf^ONboDwW|6vpPilk&Hf}g>DaRHB;H-g#soIPE7?K`fozN{Fbr!pyQ5C{o5%s?)=<`xL#7ILsJIfc3;kV_VF@Y$E-z3SPWT}81-2&$Q? zS5^Jqt5>gHz4z+9>FFwg@{QVGe)RhWA%DZcNwJ`F18UZSh7(R>(xoj%i@w#eIpfx+ zq~&m%I{;no@)E!jFY^k(GOzL}fE8Zj(*Ucy!Dsp0r?fT2=lKH6)HwZ+G#8J7KiLi_ z0w@fs4fTDf**Br#6n?N)$Ihw3)yX3Ut^J6#*(y=AL574oBjn|7cYwf@&M$tB<+od% z&p*TRkF8_mdW_37skX9yx5+d;()7Wvi73JBmzQsC?%WFR?{vdN_QcMh7em{ZQJle) z6j6|^rvqJTM=}Ys3`sI%Nmz}F3c#=20)s8EXSQIS+k$Pj;GAsXi>=eW2-Jzd18Gu$ zOE#HsaLUC71u2>`^mJt`-)=e$!-<4=(l3w@6#d^wNXS^iFShgqWmYcCcO#C#cQtWJdd`Z*Et+)qy$?Hz5!T&Ek*IlV&ET%VH*WiC_W zE59z2iv%v2wJngTu}5QfV3fuh|C1rMUkCIIZJ1c14(;tPFQg(2qAcnqYtZrIcsJ;a z`{8<;bOJPLEU!StYh@(=DUeGtq>4_F5zSpil#7?&vK0uY=?hvid%>!_&_eHI$NwR}3SKcz=+NmrFVp|{9W`tju* zme1s~imBN{1`=5ahouq8=ag&Uyqf3qkY$h6976Gjdynru=B$u?g$&D_f|Luzcsy#T z24Fj=>wFPpsi^AIc%J|l!O6Pd{><5pELB%C*9lNG)PkW{`ZtQDXDFtT;;1dFCws@; ze0(=wmQyF()v~dSF=a9OgzZ?Wre<(-o#ZR|s#?Z9J*A?hD#Mytx2#sx$_O-1K|l9e zACY^HKO(%Oropc2P^VRW8CeFoS-QS9tY_1^T_1 zUs9J2W>pz*rZA8^u_-y38P0JwqP)cGd`8vI?sGUdB1?qNK5-CxIM3%$Cg?5j`Ejp- zb}@o*2D6JNeOVtajR?$L94;5|U`086fj6EYCddyuxS&eIRlYc)Z3k|{lHUN+^xp4v z)4mKnxgUD|{ZROw&^xv9H@!}DKTOQW!rVPS;gj#4G5&15>OuLi*B3#!>FrCIW}6!u zok;HY_tt}6cSHDXKa5kKM{(?L{BZ9e4CLCaPy)c4^n-DbGHhP+Zi-GHLs@3V$XceG zo_AODL9|{LNsuY?-bhkk2!HUVmmFR5dIG`KW+4jF1ihpURK;Q#_q_Xl3>?~`*Y&Qh zT{~rPh6XmycY`4o%mf)aY5FO;t`DefR(=V@YAEQr%WW@H5qO8e|jTfCS<{-`J zWYNQL9Af{vrR|Og2avN=;wT9pL|pD`C%_y<+k0>#4_CmAo~rlt^|#w6+iqH75txc4 zXtWdidtuyi?-;k+s=xsij`;YG#ljX(Bke?qeEY5D1grf?yMC%&KTX4g z10(4NhuTVGsmob^56?BzHUe#TLqE~g3~b}bNMG~^MZa9kM_r%;j6n=NHx4_%wN;Z* zXJ78d{-D>Fx|G4q1hvAVnH;2H^A*ixs%hB% zhS0*CX6ciBOl{Dsv`W8btuU9?0dt0Bu5f8E8t}S|*60N~>grK+qk}G>WtU!!T=G;FwGPcOXr9Nsf81 z6Nzj;3Jy1(=Lw$Yi;MFI9|Rs5JWMehd#pF&D3fO#6 zqseZI8Tj6u)^lB7q8o{$J$SwuhvF7cGyy|9;IqkF8F$qp%;hSWR=~?uln780k^xyBme*a literal 0 HcmV?d00001 diff --git a/utils/__pycache__/metrics.cpython-38.pyc b/utils/__pycache__/metrics.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dc066b064a8549cc01cb8a26b41b82bba739f719 GIT binary patch literal 3875 zcmcgvO^h5z6|SoOnV+4V{hR%>9nm->%7FLBM7b;m+lny>QkKYBE~AZ>-mae6-tFmW zSJ$j}tm;!@$sk$s4S_h!NL+m7hJ=K~i3416LUjR&Ls*Fe=Um{u>e)ZzU?U-Frt4Kz z*L(H7SFhfy{^8_gl|cDw?ax2@ZG(`%V&{CZpmPhJtOXS(oW`U}TZ|TMt7UV>txrkI z;Wl@Gy4>X@pe0`B6`*BaFIRq{v9IRZd z%2*-fqN+hlSH`e*QiO5_Yzlg>01RyWa$wJ#%>qvy*ZyxniMb1)W?lspZu%>L64MCg zOYKrSv5TL+v%Y)*#q6u0030tj%}Y>J&rp2j0tU3ph3!HA#GN+oe+wF?E>DfSpBw<= z{Eh$kc9C2ra44)zfkF%aL9c02QNe*P^Pu({@HRuoBbI>hK<<8VDHUN5Wl=9#frcN) zJ3(I@hO24P3DEQ>{NWslViQt`X{kMBR`tiZiQhGk9x;at%l_Zq4J zvmLZ`J`Xr5syZ>&5$HSwMi;`B*}IXY>T2o+0gi^6GaL*5#WS8<%g0;! zqMSI9trm@6jQNV*Cv4kNH8q938zf)Km(?O_^puJ%RT%;ngP=SoU&$MP1pSR%Mto zfnooNP07i~aE7xHu1e!{N+`ED%2Z#KGCaSw4e!pf$&5$E^l>!w8ZOjLx65 zWqr6XA~1G&xLDBrCFSrr-gtsD0YB(}NtK4ne11gR4xEDpzX1;Ez1!=ieHnUkFZBGw zQ23qDJN52sUMD&X6Z5igbk9%t*?Z5peYRTlpnTNpiy++a_M}X+jkUE-B=`Eet3j{3 zCj7P^#;MPvIQG|mu)7}ya^+4af#4bXV4O~jnAg4AqSMEulbJq>mg$D)ZHYcW>t&Gy zOqusqlKMjUgKJ)LblvL-Bnqn!1)Spn#raMU zXTbL%G^0R<-0K;*UJ(0Pc5dBsY|vZ5%$M_|aZlnhK+fzrjwXd!Jzsj=+aKOq zUw!A)5beb8f{6>Zb`*!$ADiu7DElHgGuqaRcGUATu#DMq?nQ~04p!QJCPT3zf+Q)d zcNQO`zrocpR(aUrtOu*ZO4>rI%jCO;57j8q^1!NYCC?aT|Z63go7aI2M5|p zW2wtoe;4;O(>4-qcSAqX)eQXN$e=IUgQ8t7#-lFq!Hhu+EjJE3AhlJKQD;x?#Qvby zm%5a}sRXmaqM00|Ve>W3WU6g+?^gN5VO!NR9~9S($&9OL8nx<$0l+aG}v{(>n8nmorC&nsmS?v_MG7Yp_!~k7^WefXjx(}K{7it*OtbJWu zh?eJwI)~y$|u zomS~3Yl*qE4l`%zD)g%`7iPn+&cp{GEiic+6ZYxNuknHZk9CNSd_Q=W|=;7lk2NI?QsmmEb(oEiiv66z@ICmc5?VE82 z6WlU60GXJUO#&|iPl;QL1ZSwyGFc*!ZpR~y;xd2-GwCdd9f#1} zJpLz?W<^|uj<(xz1R5Wz3K;W==(9o0KDvJM+!fEC{T`e9_)pphz-rM_$~~I zyI8%C)q6O@h0meA9xrZ@w{iH}SY5nNifcFqPli`WGGT*%FIhHvE?r_*>6^^VXtKLv z9^N-6^-R~77)s)37e2I1P;meZ|Fs=cYH3W3u8-QT=lH3<0=b}knSI=A6TTTpSrX+*lT#c1BPS~h3g`joUB zZgU4{ffslYXpxtA8EA=Dcok@w*Z36B3UBZkKKm(cRrwsBhmjhmACcz58R#cFK1DJv z45$sy9eC1fP<3GbpVEvfoSi&kP_sv@%~pt_4KgI$8lf(Gr)hrAQianA z2de^BC9H5Ic~yg!E|1~uCkSZ*a2k5A3}6najx(d_^Xk6=C+436H}mSj8C(;DUjkh1 z+;U#>&gRN<_-6k{_$DB~1is1y-&dYPH}@hsbb0g=bEs}Zt}E(_@xCR1_fM4K;5h7XL|N@db$~l#rG4RpVMd-p!U|^<1}F zGJY|Z%X^=&JxkTpH1@Vhww$e~C0x-{Dt1+QSTnPh)QVajfz2sc=l(9Nw5X=Qqw4TX zsrnKbuWD7393dEy}VaE{L+9%#+;xpAw3-Y|lc2BQn-ZCM{KjtGoh z87}4YXjwUYo;RK#C*X(mud3p3g)fX~+ktbi=rzD0-8;Q*(wBiN4+Gab353@P+zao1 z+wFuWL2O>;j_!IfpS*XC+o$Ul7s^MyzVL%B_fX0t-P+jbgz~U|uFW24+BoI79ABw<}Mv33{**dLqiULgA-o)~TSWjpG6DOkpAId{X@O$KXiFO`8<6Mh`$)|o?oyOKw_X6;y~TP-!Rf4uYsvxMzZGs|=fy zhu^c`z-s_w@g&qv7|WYCnyZ?{i7vvPq&*>Zp&y6$`++V2f;92`Ks)K7mjHPfh+eA8 zK@8p|Jg@=YgLJ{~cj1Di3!p6!iFSk+cLH4*TW#Ma=@!`w>=2%W5akm5Hwlc?{5Hm^Qb+rbn<+=t2!+nzgTM3(+#~^u>f) z^OF4}5IK&o1IF~bP?6~xy=qmg2DNE}zE0<8iPmU|%~0E#qjjrBubF3#Zqf=}wU$|d z)`4@5u0y{9yub~=Iujp&Hs9oF)C&bKmAI!S{r(IZieyaD%z}UqN%DvdO$!q({xV3k znXMqLLbAIDJmzbG+aoJ;podREIgl_R6)R^1Ni%*Q#)=A(Gj|^0y`FIg6Wj_p2A!Ce zO#&~0Op#kl1UXc3g)9?Dx8spUei^_7%-R+N=A!ovxS;%>0`oy96zO5;A8))Y61*%H z=RN>22qH2>n7liVSZ_sPDlY}b3qRR1ycgYk%5Y~mGUh#Z5^!*59z}N7-BZZed6sSi#4-Ny7$A)G}d<%xeU97%?)%(a% zfKRH!9?x%)w{dtItLN{N0zF>9lfri^Aypgvd)czlbLldBonB*xqsi_HWN1$5*{&xs zltkeHe5{$EQpl-T2M+B(+)4&!LFSuH;CGS0?+<~Oe1Ugqb6+(FBcFeQ$+XO8R%yH2 Xbh#|G@!cs;Eip1O*BhNR7e)>wrCTj^t5C z9=&^XMC15UrilGi^`#%6PwfZk7wF?Y1O@u!ub`LqH+TAHH9&ygVP|)CXJ+T$&hGZY zLiL~j_~!SA7lin?IP)otKSDD%07#HbM6>K=Q;{O)E75ewB@ewz@}a2vuRtjtv?X`= z0&(=j zR$hu1qVLZ{ze4UzPURzL_nG*O_*{@j{#PQG_rX7vR5^=v#Z(>0{ad_xAisEP>f}y8 zpa47lD_8xw7(7zs4$aYfe^hItU!93u(VaCx)g2z)1TWX zfi%AY_vM1SLvQ3B&*=_bIWGgeJ14wezlMFi-0LsSV59z9wv>A_nSp;MR|U-%^8QlJ z4*hZ-5yeL2qlbF-enDw+gHrH^y0Y*t3d3Vb2oV(-$}qawbYU3QeK8 zuT7@ok@b_JuPa>vm*x{%kd&axDC&S%LOW9IQ2y=}A86}llTl{-`9ssB$ACmw#M8tbQO*h(i(GTzx|KT~9#QAoBL zwzK$I$Ud`vSEu8Vv6Xfb8WS1o49A0}-;Xo!H5cM8+!(d8o{mhfIiKlq);q!w#BfNk zE+}QU_D@C=JO5=AcYB#>ag>^ujM5l1Ax|EJfvn2 zr{h+V8q=)t=$IF3)!KvmpWQ6%+i6eY8axm|HsW2**hGmBHoudMPG=$igr=F zza{d1I2uJmItsm&dpLx$b1pqMPd)za`@Mc7%KeUy_G!MK%`a>-XU3yZs-XkwjbC>~sWtQ?nz zUg-@M*><=LlCdo)O;E z&wZxM9bD(=@i=(=2pB)N9$1$`+)sQHSfM?1nvYw(s&Dg{L!jSdaEHNN06WK#FQNkG zxSi=8UMf=Hrx?{MC07xPkC>s7jySAFf6RoxV8Bi~Jw;`-YGt%oAHKcrfy8jK3RMPb zS@{S>0M{!7Vnyj+W3BELidqj@HJa%6F;#@6SUj@>U$^Bp!459SaT4wF#ba3QKm|^) F^m-6n=>;NhTn_TKfm5N z!PsBa+4}@GKE$G4LIMnU#F|CTn*sz_-?63zHaJ+@!0)n#`xH6Zoi?WmKMzYCi-kpf ziSjZwK{->d<}<9{VPmHGLou;3hGRF2HVCZkGst@eN9E1KedKp=rL`q~T6n@9vxz%p zlM>idKIZoj+o$X^_9X)c-0xV%Z?P>LhtlC*myN|X-?&V(+x-58v6Wd94?NWLMzr)P#Ty2#-`Vl3SV0Oya#MmA?nN4fQ zc4os|A9c!0`oyFP&*W$ve~VIa^#mLv)eBOfK)2Vl;M(@uLzB_wZa#Kn>2oDOiNF za1Ne@=iqrb4|DJW)ZsMvHWG&9N#iQyy_5;W9w`lw^a-tPWdOj;{@ogbL*C*la ztdy~iIE^#k;<=&cUAwbsFSR>eV-exf@XEdyk{>%*?vfXtN76;#x_^4iGcN6nqeqhW zVe4n{$VORwhhJ=PQ&HNNT747hUPBnMv|{+iZ%md^z)s`V`)jv?^|e6|>qM@Nk_hQg zhml%KeW`+0JBd4CcPSn1R92%<>2@+mug?+yLgTW{iv`~xfgoxj=B3prY5S47j*UJ3 zBRjBhUA}`wY~(wbqa92bMgX6(0(*`Q(YjJ;8K%aK^12~p30Fyq$DE}g6#5dQp3Py| z1-8S#dP?E@^l$u(tw(8KW^gy_Nrd}LVn32JZDt?%y4`EVi5&P*xEbv54@}9A+r31p zf5*Lm%W~Y+G1nNKTf667VCKf@M2WBe3o6-fAL?b~ChSk?P#c!9J2DyIru|VMm9YoD z>Kj`jy8nmB3giF8Q$N6CN}Vv$q}ioYhrn2=52oU`b-3=+;mLEzZQQO*hN+Tgu_K?Q zky1PIl?s%x;^Cm_J`6RAY0iXQJl>$CdoobHW>w3f?(O!_!hQ_6S+7vl+SqT6O!b=} z?Dn*3k@cFV_Ii7^%AqvN3f(PeZL~&B??IsbRyUa#1ofWQsd|5Txr<3;_+SaM!16HC(vLL#2j+TE2Gm zN@KxzxZ^zO7?Ge4r@rnPH`u_OfbOA!)Yu6gim_#YzU8Pi3bk|*lzf9o6$u`Fik6Vl zADI%lj*l_y6bYdmDs-_xhHd#g@t-Ds%a2EIWlZtz#;ngkt!x2w@?YNhBFWQfe`?$8sFORJV~ZQQ;oHz-yw)7kLeT8%qWM zMLx%CR*iee@fttR&kGMnSHzq(&*zb6Uf5VKVmT+|$0%vv=9J(lH-CghO^|5zkWWPZ zvREnFfO+Skwg0kMIo!vWg^e!@fiH_%o>v{bL&d?HLc3dRY;D}m1m0ZN=~kbVc5ip9 zFH{rz9=${4<|AsC!O7nN&e+Xd;>;|(kFmzvy?GCN^jf-OZ1C}tq34ldk|IP>mTXky zo75%)$hU~RP2?I9QzpCTHo?VVTgz3N${pZcY&A;7s@xJE(t=Wwl2MhsL5w$v&=DPY zeqpOxVJpHz*U>x3q9}5hD9ki3Tr`A-2QSbJ3qpQ^a}6g4Y8>>bbR^%yq1-6><{9 literal 0 HcmV?d00001 diff --git a/utils/__pycache__/parse_config.cpython-38.pyc b/utils/__pycache__/parse_config.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c5705ef1ad5b8120d3880cb2a94fa3bba8203509 GIT binary patch literal 2753 zcmbUjU2hvjaCh&+XE$-u^eZi;n6`YFk0hnEg(_4~$_q%9ph8q8m3#G^?W@ms*WGo9 ztMdyfqUD9~&V$Vx{|N014?OiRL_#ogmlQiyU%1oG?(ELa%+AN|d_6y3Vfei|_vhW+ zLyY}J$n+-=`52ozj{q3(h&76yHv|Z<9%*O77 zjZ0up_=tbQF0n7!SL`+e2i!+2<2Tqg&O>P$>#&ial^uTP?8wTju?HSXF_{>bLzKr^ zWyajK=JMb>7z$&c41$eJhd%`>H3E8d;jBI*? z-B}*FKe9H5i+B<*bID7b?T82Vj~r*;r`g9yhrTQ6rL45gcZB?0AAw6*DPwJM99O6-WrEqP!XBP0sQU>7Q4_UaS&x#IAE|3N*x&WsS#0Ex zpJf>b`B@g}EMtaQz$dIYMXqOE7*{GS!_>G@-pdm1$=3>iS{M)CL z!B79e&)9mH24)_Qw3$SB$Rze7Ny}#8p08WoW}L{rAB7KtUH*|N`Eje8NcCd1kK)!` z?YR-xn4sHxhA&j+#%V{1uV1Jsd2mY8cj`hOCSA$Rb<5aone_3v{xFcr*nMC1jIB_) z|F4i0#xGWGd z6)&6zA_^Av9+EvIwj%}YzGqYG3%A2jaNCw!{k6bxPF=jF`%C*cMR`ga`N_p z_+2BOE&_^Qk3A=sLOYi`n>2;o{nVdI49XLfQ&xjCO{5OsfV>N{@*aNqb^MI9)((RR zRP8lY3uA;UwOc_@>uQ~}6?1%Rcnb0^zzI|ZH6_5z&Gz2)dJ(v zUQJ;2gu0EmN0p1*dO+hcIC=HpjNHs6%FM!79DA$@Zf6!%ST~IgK0ZxUP8lXCq2w_H z^@@Ct#^fsbK7mUFE+a5yijdrUxH)WTxlT)YsC~nKEbqd=K&TR`_*zn}Cga)NNoHpz zwl~d2vlmj-Rzjlu185g<>5UVA2WN{Lr%H$moN|HpW|FW;)C)86^RxZz_p{%J?Uu_W z0$=mjzumo4g#3<;(T_pnHWc|L5KcG^NjlUq=moIW5``z1({`yRz= z|8teOJ2pU#k2AUQeA&<*LJ%+CyIzEWytE1CT4 z7T}Yg!R&L{Y{e&23;TrPQ^(ghGS?bZK(CcrgYhBC|Dq;RYnQ6iNM{HyXY2ix_eOT! ze{_Y>R*UfIAL%pNBF}LDMNaTs?vaNxeftS%Qa%G{oYcmXrl4>0wO&tvaXkc0Ct zsL8ajOP@1w=_A4~q=l3;*%XX?4Z8+kx_E!bocEe7Z6KjS)6x;YLhrPoJV3vi1=0d< zc{tUlDHUc)!N<^_fcu_pny?<-qnE2xm!xuql8*w_e!(=GuW7pdf(VR+y_A$do0wk?W6urc3NQ!KC$?n5%xp_Dz+_vRz%303I5{1Xd@~u`D$}-* zN(70v!>kqsn3Ij=1cI7~F(4Djl!5HguU=wszWfJ1ZK_`4>oRO(D-K~}apZ;qhxPcH ztGsq2ibcl_gB^d5zOM^zdr+*rwa_h16!pl^fcW0PO#RF@osTQ05MxROiGQc03Z;l zw(-oluZ_gzy5xE)*mCg<#cSDUOT^toiiUQe(dZ1vzdO2vp zQTq+m7QSrP$CT)*_I?YD>_!}R@8tR#+lO3Fk3I5(R$IviTC#rbpmkuk3{Sl%!R!3S zcB5B!)_mnQqK>RPKpGoC#OqEFcN<|WWxayj!CZh<@j^M$)+HRzF>1?IaYt$vxlp_l zX28hAgF6q65Fh*sJ>=E*e}%6#O6Mhu)xJ2s0S!gjO_EqBpC8k-c%W(LlTV{Vp}A(@hg{CRCFg<&==|IR-=AwMmB?EQ z3oXbh-L-kh0}I_y2{-Jiz>^D@M7FLi1X6Z=xp4j3V)cx6V5?ac(kup#PF&U2w!aN| z0enOHi8f<64Q&b^yvveF7${*Qqy;1uAaL#pm_Z1)rwiyeE(EicCxl`k!M}VFc;XF| z$1~A}8}+J=co%sq3?g6Ftq%8_pdMS=*zkMW*n%8TL=oD}@(OcWh=ivKC6M=ytrhrsS;vMAC9O62X8%S;f(M7a+W+9jycuK6`P-gyHz*P(RtjsPS z;Dkb)VCs?L4hrIFiF-)!TKmthbtShP;=v0E6-sms!g8ZsVKx}h0b?*Q-Xs$r!&ueI pfNBdoD(Q*$per*}HaMao)0H`hvkZ4$H}qHVE|Cb^VUFP}{tEzgd>8-# literal 0 HcmV?d00001 diff --git a/utils/__pycache__/plots.cpython-36.pyc b/utils/__pycache__/plots.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c4009b2cbeac325b3ed48191d91e40b2b75af461 GIT binary patch literal 13922 zcma)jYmi*Wb>4l=dv^ATePIC%Kmfvk#4a8pMM4k(h#)1#&=LXRfSMAunw`EoJF~Mh zgT8lxo$(#U_EIwAGHklSa#ixnKO84YQvQ*$6UBB?sz|P~W!X_&PRhCEa^yHw$);Qt zJMo_|t|;GkdUp0fmg8B>x!rxc@9pV6kMEq*xHdA9{k`A#qp$q>XDsVatOK7U%D3dEG@&FS*==1h4;>Z#`J=3IHs zw)}KsUZrol-Mu7T2PB zUM;8-xN};)po;1=N@vs=^$hMkqt2>xxSmy?QlC&4aOa$QQI*t1l+LS{)N|?*N}o{I z)uLKL=>mR(rB1C`YotkaSzSSEN!f2&rBCm2iI+=u>{Z*HF0c10;X1B~*EcIQzvRTp z)vYk}b(~(SH$#5Wmf7w;S--m7-R`ou-9_;U2c_2PVapGK*u7n=Rr%<>kBb7o!37Xy zE9YJ7?^r)!t=Y;|-n;40YS=0`>L+3?l%mFYEDk) zL3TZmlDprakFfze7;bf%e#zG3>^dQhrpz^rQ-g;updyp7?B4OeViLohFcTi^#1ux? zxUl)Y{T>!|$NpL8k@u!~pLwaHN6|^%jLFQf%*klZ;=}9>{^DwTv-9#P)MX+`t9SC_ zCbHkPH(J^Dv6#BuS*dobO+QY^fRY=#on{!Pb@g6FS2hE1Z+B(G@0vFb)&1tGP3~3ZzP=%SOVmpze+pIXkYj*<|9#-fMHLzT5Y3 z$BWP!xskVqo?mkNi9ve;_ma5h?l{Vx1sg_*MoN((QQL9*N!&Mm?8rOR#~$`cp-(zW zM9C<%hLIu(?y}Bdp28TVOzgBeak^P=1)WOOkKN6B3#76g=hm?S?HX3=s)811*3nJX zYxS_+!?FhJl@5A0+qF_Uwzp%u8{4fox$Uc(kJas(tH)=Qz3sZ%E_+?j{q>e@j+5+g zRh{ADWkD=w(Vn%>;!iK4w)=-z+TDBK0^u50Wgk#T;X7RP^Qaij(f1+`OX)DMRFv{-oaNjtbO5ja=gCdBsBYO&a!WBPmacA%#PeJIF_Y|C4N-WpV4WIAd zN+`FDEOl3YWsxd~7KZC#zdwdu=Ut)`B?E*2>pqx7B@A9%;4pW~I4e zZXaEZf>Eg}czCB01efrvfOE9$H2!)^hi|X!Dc*fGaJU1e^E{_xYd)luiak{cEBYEL z`jaHQt6u<#?bX<>9G%Y0;l~Z%_bf)3c{%CCgsrcmUgZzxhr7tH$iqcwqsZ%=4`p?s z%E5z>04Vo4YsQ*_4LuH?RPKz`Pee}VhDxa9yH@C^)Q(Nf-FHIl2S@GLQo^_$d(E_Z z!`75(g+wL$snBaAzy#oU>|X-4q)I=9jgQ>-ZGApWMG56XnjlsEH14LsEa<0F$E_%> zGEcGNb5=hSC1tJ|(isaiygAB{Vr**GT5}G+N#*&b9lQtEjL5Tjf{auiN9?^vWmQh) zYHq~$b9QNB1aR2Ma_uj_{^!5jON_LFa3(JaS-f@hg zEjjXt-5;P2t+lSSRC_Z{@ha9zv~O&$9NBfvhrWdGp2{uKK&`Y}{>o~5d!NnqrJ{rB(BF zfn~S)_-dTdel&vgM+`g@;E#h3w4X(6@0J8PPi5)!<7IJIj!AB@+LpjQAR?9Xe1OERArw6ybfBRoF*-_=8m-HrFCT3npJbEphllMGBT&el&8iAwY-`j z9K{ny)a0O4*a6-_%P6mr+ZY>M$JG=8DMt#!&hM$jv0Jsj|M&R$mp2Xg;IG#=L3V!* z1Z)5ZSmP4MuIki#&f=xm`{EmS-+*%Z0y^|ktQ5a|b?}(~eY2M_Emsy_Fu=u$y{5kw zf<@}J_0U)^qR@62g0J1M-H8+R78oH;wzooDdj-OWimnIk)?%ks+x<<>#MW=&GSK8> z--MX_FzRNie@TX%&;%TD5;y^@7`s4ZfJ|*&U5`^iy|&q|t2n!++nXz`t<6q1PRKE6qw5Q^#v6k9|C; z!)ArV*S3vD|{4wP&4zJpuNd2BWf`6-$z~ z^%i;zZ6U>NsIy~KVEe3)c*W7P2E{bI9jlLJ>$}_kEEIOVW4-;0FtHvqcS2bk8GU~H zgD8R4pXw)}FQJMEp8KgtFbk;Y&d7#sO``_w4GL&K3p&3|jek*8eXr#`TChNo*AG_}B3LGHORmn=HgN-z- zlc;E2>6Qw5gMC3Z(($>+aS9d~sw>d+9b&r;UeUgaT`1SseqwMqb(k*kKD@8i z8s6Isj&A&zVe*x*y<(pDMf5j2@8s>g<6+;^*zdeOHTX-Twdm-tqTh$vI=#4HvC*mR z{wRRE9zkoBu!^74_|o)<`uaB1`vHDf*yo2vf;@qH_JCJDY0#~-+hG!W0xsYVDQCmh z%c!|8VTAHvJB2TZYH5e@%=DMMex{%8=diOG>|he@89XD4oli`fT~1(BF3RtaUkXtH zPqp+n!W{Y(qTCewwU&(Ifw=v*zBg!YTSp%HHtacl39GCurQ+ z1Ly~mGrmFDIu;&_jt$yVDvgn7g&CRdPY>IrtudoAnD0z<3}yv=RC>ed&qlMD+xL~# zDhzw4`Go#lG!0XgsyPV@@5;Xf5B-L9sBQ97du@*L2K{owe*0}xrr-8YVqK>j(_GoN zBdqIu|2WoeCYl%dU5G}b1^nXC9eWyY?4OAEtKWe{FU(mjCtBz)V6IN^bI}|}sI08Y zUAF08P`PMf8oxp1qXlz+&6&3PC&St3I9B;&bOKsr@e~qMovakYustUf5O z$9RIMWc`GtZIC#z)@)ZoQOlx! z1s)2T)YM~8p?S+8?65j$?GJa=bh~9%MV_?zjFKjtG6`)A&@XogV=`H zqlqt_j*~_Y>vwoR(ZL+yFRt;Cz!jiDoQ5wh*tDFN($*?1exTprd+u|Xvl{4^S(A9q zk4sZ4i1Pt4_sUwO3h)Allwl-z=9}%HQm=~j(Rb0eoLWUxX=7!3uh1=z$fO2l1@nPv z!bDQlCVWyl_kv&oy~8)z4gF($_q!xS+=eaap2W63YP52KR?Kg1qf5{PvD~~p20N8^ zVX_&Nb4>7>#J7~=1&5$V`zYJt?w-TMPT9Vh%wySm;>-n~!k ztG|MuHNd{5Q4Nc4>DNHOh@s!nJr?u8%9YKP8r-o?Ab3{S*^hgqH%VF~4U!F#4#^`D zNA)xk3+xVb>o40;2z?zQ0EY32ls6DQ8s%yKUs`Une$+ab30J^WS! zZNc3GKuab50P6Q4Tew~Ix3KlQ#QF${daHoB(2tDvfghen!T)v%8_yEl$8PF z7|)L32sHc)TujF+YM;Xf-W1=9*ZvwV;$Fco7pE!(Rjn{xc!N~h*RJX`yKsNO9($^7 zk=1PAhbxt!1NYzX*d?@B608VUnJtZ(7|kcp@bI5T0QUqaz96E)z&8_8D5ng=hQ+nU1Pgz?Y87+@DT zB}u?fcl*m>O65dv+((2GfnIDaATO^>{g_e%nqW}M?f_Cu1C&s24EpDc2QFjtfP)xI z_ImUhG>r$(eH1>uc=W;=lgqu_z|T0q9lgwUanRb!bce+eaZrZUf^k&#>Soiq7>5|i za2g%W-pIMdYqjFDOP2r)uGO&ZWDVoh4<`XXR(WN37asKc_I8r2^RA`tFzyMI{1?bjmulE!uLWP5)7G> zm?=u?IG&zX3!=x&y*y&6xFZbrPk~gEY78Srb`S}Lm!5&PqgU=Bd6qHyls)!q>I5Ra z@>G~HJf$((7*i+JsT~Jb!DIKv1nnVHX0&v5q4W~%edWU5jPh7+dmqI#BDOy)0Z z%vW-rSCBuw_B16rcU*)d(U`z`eUio$Pn$%>^o2orimQa&>ZjC;I|dgg8pqU2{9Wid zt*#GBGwRdo##5SLt1+uSqh3}wC0K?UO#PU;rEX)+gH{o^SC|JdG)?WWPUVo?|;*`;aS&42)&zsFvwt-uGi1Warx znh^V5Y~SxCH*ep&wfKB5Yp&N9UwXbb6|U4&8 zl-psYx!;0VA;ZXw;F?fM79e`V*F@(vOaY%S-YVw~td9OCXhXt@ke6W*%=Pzi(f^)= zZi^Xq>t}B5{yWrme~P3+a-HNE60TzHcfO0y-P;2%<<^}+;YVK`6u$YnLEQ5{etxgz z^;48C6y zB?eb(Kpg`Q3J{cu3BS`s22wyKzyMUbgioa;Ayh#G-4Hp3z@URcNY%89QzG`j7=dhT zuNmeI`EDA4vWEcb)!3~dd0t|p{JJrg{vpX>!HElp1!Wgqg3BNlAVV78X$1by06G-x z1$!Ex;vzuBMffJfrHr}}qvml}|0%`{?FDTGUnJ1tk04lxiWxL&B0GCE#Da`2ZCE>d z_!BOI9E5OYphc0Pgna-EaiK%^EDaK#umX(BF*=IK1+ggt`DwTpNRBFwbeX>rH-hlO z^n&dX-6uIK#M4}kWoB&73Udc*05UE?3zHBSi~G>jg#%;r3Q-)nhj_0%iJ3E(7DJvi ztQ~n0duM2shGX{<6Rt$>7!8|*4L|}ybg^ra6WzzK0#T>#E)P>kt^7i8oY<}kZtoZl7iOk<))7e`q?a{b`aU!_i4SOq6x8+BW-BmY zDupls&++KLjZw89nfj&A{c-Pun*-5DoE!Ok2^|={MTDgP6Ue{=DB<{2wXIvoZ)a4! zQd`-mBc0(1O%2p^IW2;;@g%^(D(Uki=uMmH!J299BXthXbD)Yu{g9NV_0RHk1(Gu) zLriG&4#A*NK1G)P8_EA7p#z~QulwF94uUih+9{_3zp{!6$H@*7p_L%c+(AzKHN4)( zJS%n0xgt?suZD5v)$OVej~210oc={V?w3jajO0ZS9G$4{L(H-pz?*V9O7q=>5%IVklo-?wf}Rps zM$jK|697kqRMU8Ffvz#`k5Vl`?2vI}4+M)r)fiL_#FuU_;UVshBe`QYCLr3#LaLcT zCUjEG;*1AE*YKH;>8#soUYux870_~ggNgqvMo-lQ!iLAx!d`!>n@4N#p;CZezN$8{tG>%mY3Ah3v>JLFK`!EYan_dRN z+0Td=`s8%N1{W$x$HfNK#{>3RjdTX!7o056Zkvv zO5kg&G!uJmWWJE)1ot6Bi)D(luOYx{7VH2vAV(SrUPmbghzlue&H8HW=&FgqChC9! zf&S0zkQ&4T4v2J}kDw+%LYD3x&KVgvkmzUxa*g57#2L5M#iHPlu z*nVu%YtH7@$6I~=XdKUjjXwdzW|+=Kg!UFRFwMq8AF9L%=uU} zDh7N!8dJ}0Br)bL-Zq8Y_qaN{;UHfPjkLpOeHnfRq#qF|oWek?=NPixsfh8SX=HhE zfGC=P414OloXInHW?qj)no;o_LfreNOtSBeO{(oVhvS>ap~ve9v>vWkP@NS!lB{#P4}*lkr>`cK$z-&Xt|R`Gvg2HCp zIyj$yQMO6{G#f6XS>IrZ8Uu%3u`htnvVC%9K^M>?9^XgS_-`eYBRIQqHGIKvgV{;N zQjTNXd#~R$hZJyZpx)^&;-n*vw99dJIXxba$p2PZr2J{zNO5SI(-_>q`9oc=20Ftw z7eQe;Y++pS>+Jl1WQ*jhBwr`_6_Rg}e23&uN$B1RkQVDLYkU9|2tJ?{kJC3c+e|RO zEcX0mG|^Y)ACJHBx_8(ShXN3u%ez@Ooz7;mxojbQEIpn5q3n2iJUx;v;%_!P^Zx)G CJf+qE literal 0 HcmV?d00001 diff --git a/utils/__pycache__/plots.cpython-37.pyc b/utils/__pycache__/plots.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ba0be9250f861d0d076a0cfdb27f8d346b761ea7 GIT binary patch literal 13856 zcma)jZE#%IdEWin@5SPK0gw>6q)3{eL{OwGS)yrv*peJIG{jO&CGW_F7ke%iyIAZl z&%Gdl&0RGK7)Q;72{YAXCiw--v}xUT@}o^VZko=xiDu%Yaq50dC%w(2cJrgfOnv^+B#*>bMm zmM@P;U7=i%=V*BpPp>()F%OoNCT)%$4VC z%TMnfQ|Ub$wHcLtWR;Jr7gb)3JhIF4>eFgmO(6G-dPz;HDdbM5X;nn8lWIoI;#pEJ zt9kVdT286YsFFH`+-Y@MJ&U$y)fsgb&ok;3^(l28EoarMYC&Bv!<`;_{u zT2z;iJFl*(%jybp3(9`mTKL>PcXesOj=gHTv(4w7O1Oq+;?0dp&0lch>5cS`S-(5XA1STCK{d2Ok$1euMK!l&zdc*59#y z##*(NtGq|)&}!H!F=y>MdzMPhS+7|;TwZFaiZhlEpB}!G_y$EJp&8Dp(#quk8R8ub zanOsz@*;;kk0i3ba;9g8)(0Sk$etOb$o;f42SV9#{+4B1_Z{Wp-QGT5xz9=V$B=N7 z+Y-&uSiw(BU{Yu~UwKK(8P=P@d!1%G3>G`vA7`%B z{FcAf(bu2BQ~@XTtnOl!eV3c{AY8oGY*#DI;QHd4zjZ~|Rt_qLLl<1lCFwkpeeMNT zkO((BO@G1G<7}CbN|WbC#i{;m^C-w$ExUXCuXyWlyUhyw+dYNRHSS)%XMcd5+p&Mq zx$nJg-e+Fw=uvc%H)Am~%yTiCtN1v3jX$y4-soIEiLxvtX?0Khu!S6S?T%KqeC(cX zcb2Q$)utaOWWa(OyPakjr*-vCMOQWgkm&aEy1#AS*xv(Lg6u)|;5EnicJ7?zq+QLU z*jGrZq)dL&_>yA2fN!viB(!$zNT?F2yJtnto-1`WDK)YOb&l0@dmdW62(^(Ld8_F8 zp4&_G>l0{8qRriNlsyYtixQ2LBGsX^=k}6lH+}5Ld#aB;?2|&Dbd-paQEC+<_sAjc zlFnhBLKq8~*lBg*bhF+HI+dy)yBqZulFC+`Tf+ggYuK$T3as9&qnoPN>S4W$Z4K5c z9rSLtYYXYv-iqz**lxwiEnn4q?C!RCdYq%|ZPnFQ+1o~Xu-|gbagrUbsMFj$5#P?D zJ!_xApI$`W{vRcf-1%)J$XHc52V_$CvO+ff95O~c^t{N!RyvWZ+{f;F4z?1TBtPHU z5e8ivo*Rer+}_4FC?QdHWKZEtxZ}qyo(v}P6gXRXk3qQ$iKQC4;q$#4{NWGZSa8aT zm2kV`my?xNgqZlhw_zy}BAZt3jM-Yvt>*+uFWg9%;4pMy0uInvd>A!N|>J zOx~#k!9~m!aE+Fo#$Wf+)6L5R>3YETPj|p_UgC0W%}Ewgv8O6wMZbuG{xlP|>X(tk z_DXD5jxJ~B>FI{=dk!Pax}0=k!q!(&uJVWT!&Br}j^+Y{PvmvZg|fSl*Py}3#?zg% zW~?bF$Kw#f%AK)#iOA_(Qwf!PWQC4O?bsC0JtxF@aMX@1IgHz}S52)qtWB9(a8$CF z3cW@GL;#A%`6WvLf$N+=iHAk{tsVT`PvO5wuMhAyPp!WL$aMkiwM~{U6}7|9#~2XE`n#Cv?5$6y8b}~vpy=FP*?V^1Jf+}ej>|+pCMO)uP zeU**;P)_lbgbDsLGIBZ~f8|1Ov*bh&#yB;1*TXl#(w@DW1eEb0gq$$dNRt^<>ao*v zQOZzBC=jU1J_auB*Eq_Xv{0KnQk$3Bkzs9C71XF2d+f-_9Py)?G^O3Vnj+2&Q@bN- zx}PiT0o0&ol+VcB9W&3yxGEB1a-*| z{x2Z`MF8^ExZAO-I`yuzcro_A{MPNaAg8{B4&4+BrLSJ;PxOCobTg*r^5V<;e+lZ* z(`Fv8>92+$nR;z4H2m^orXjh~4nq*T8@4-fqTT{^#L4z%h-bGzOi|JGpxs*Rv}*g` z<`7%I%g2N!)`*jU2%y5)1^NP7YU}D+oC@l-jdoqd*;U=%SZ-}@bhhJ!JTxIfoWzgX z)PC%dWMXftS>K4gZKlaBLxag}^Ym&O^CX)-@STeDA(y}0tZZX}cunPjkC_%cL&(l{ zC3dz~Vi$EsiE~tl^Woa^h99oARqz^?5u89`xdl6G&pHKr0+d+z{_8REp-W{3^7o2cex%)_O*t+xjp{ zp!Vl_Nr*}adxGO$DiXMYRK#XvL#?J!g5X99=spV-i@J>I!FGsX)Ic3%O%Jw1?3%h9 z<_5IsTANhIC;-+1#Jys)Y;Ue@i^n-)O}rnfg@rnS?P3ekcM;; z6|F1V3kCf(c1=92t8fh<$LATxDJWk^tw7T@i0w9LMf)mtAy;Gjp@HBOUwVP$?TMPXtVpY@@WHXrQQycI1^9-0jF}-ZM}q& z`zl5#4{B5Bf`FDPII#-2D^ z-Ehp%_7mqTO;GoEZz39xlIsp&L;8+|v3Zn7jsdoS?j}*rv0OCesc2$%9P&M-Y)JSC zvqFr#6He?-MiUsD1H5<8G7*hMQ~1l#gp4NfmuC)KgLKusW`$EUmz2HkJ#qB!14>Y@ zvj>0=IA?sFyfqz8N7McKluBbHYIp2Lu~!_{OWp2_%3yb9qG@Op^ik<`r#BnTVr}13 zR;w`Vo#qU^xu^&%ovJwr3-8Lm3Jd(2^;F&DX9ji9KK*{VVZVdADbsK3XR)uvMp2|j zgnd2MJC41ZiH-^X&PSusJbv-$j$OnXd(TAt)$fC&=jW`J6V3PLu~sMerD%>LR91H7 zHtY1?Q@Loqh~J>{(Y$G2b&6K+L^vBA$1b0Uo`Dn@d7QAVC(ixb??Bo-arL!u4#03+ zcIy~+upqMPL^#nn9-V*@0Qn?xXcSP8(&i*u=4G}?H3ld+6`eGokR^b+{s`z){0XR= zXgs4P2#2_oU=K1jG9!Qn&Qgt|2fv>VqM<~Giv&P%A=9vmMVf1N|vxxL-F^L*CKdz;25J!9 z5PMYd3#a0wk;D3ZwkJARBTU6rP6=FrG>FqM!v&U>^ODFNF4l4DkXditWeaop8c#zhYw+5N*@`x;|uU4=g zs3t5VRc*o?rCl!wCeS;4likoi$#*|uLa=Sff(CVL>!V65Cuqg|<~F(nO(d3^x5uET z@-8$M^p-&?1>l!7^G?zx($3nBGiHwg&t#FFM$0TvH*w7|XQ1JxFv}S9-jsd9o^_LG zJ!ZdPXWjFdW7b~8c>N8$Z~u84t2=Nc5S!9+75MWHk)gVSqJU!Cv0+bIJsZHs+j>5P zTR}uv37|(jIbbbs7uIP*!~{ixz>X3iN1~C0=Ky{Km3Ztz3?z0_Dk;!9v71(2KbH}Z zo!HGngg`Y&zZ}#Dm=+`Qrd8CJ3v~edl<wxAYrGK!~B=(OqWqK*W`e0!PFUZd302Mn)iqyj!rqIMvJy}-X?|JNwLlv~vf}HLL|ks;LLW`NJRJQS zm^mP}wwwu@w2{V1j)4Yi%4x)gY1 zpfh_18Wt`%XuW~HO|S$apalk}0_-ud$Y8L01N4i7rnd=J84f!~c^>hw^KyuAE~pV& zUSesP8OER`psx>RAR2~s=U|Qr_5-2@GDZz&l^VP|O+UYxN9?ZYrEWU(;Q|h~LLBV} z_^k%og1QZ$h2Rx2&g32n@34WVR(}^1uul&SoTA+TAtvCp`Yp5%Nry6kD-+WBHDsP9od%3{!_O*N^I*2~FjG88V~~R!KqN3lV8Fvn z`f8X`Igu83;gW=B7Y7QU%O`_4CbtXz@8`0601`z&5lRcwe>Z1LZjRZ_0|eqpqPqVE z^_zm2VUihK;2 z(aP+OoLzjeR(kHzMWBKgYoIYwf-&KTYk`sRl>8|eJ^ioP^Ea7%$b{se|1}c&e;)Wv zkx!cE59RGw;E)bNrKvUI45=inhmbB5KMM(CC)4Z*aY z)?bIiC>13k6kr1H*{w9n@Ho<)WE?vX6$l5anOdrs+PWTQ)rh?13XCoI96^gY@sA>> zW-)s~&57WWHqcs%eT3Hj37~0G{DFLy>z9o+s2v#3@Sj#*&UXmpaAOGc?Jtq&lZQRfDF4t~<|=q1B4U1)9N$)UFca!K*VN}kT%!bYcT8PZH?Zb@tq9+nO9m!SgBn2f{~QmIg-`Pd=>)B0 zAmN!RMKeDnCSpPfJWL@o2$M0uShV`yTg2Ri{w_(nTy0m|%M25q4M>p!t}wd<(9UCy zBWO#Uxc>|F11IijA%8O$#{V!kE?iFE#)#P7i0!74x?HmH+)=H{Z`scv2PFRiyQVe+ zAK(%=v8AXy>^rf2x0~F!b?4^dOWmw_UR`|krS4R?TyL%V+J|GMbbcea1PARxw-7Eb zed+Ck@)%gV9afqLHE<5ng-q|Q@ug&eqSt*b&RXiW8ou~^`DQtHXs7f)Mjawf1io~R zV8QI@pU7bU?$k!!tgTit`XsX(sHk*MH zfM+qpu5TYzyP+m`a$&(DZRqFb4-4m2{EKWiNAv!ou)Owh`wb6 zXvtWE-)SPKDBuz59+F$!qLLHWsObFd5MhJB=>NW})YOZyA!6Ui`fO~k8p0$35LsXL z;26CUyA=e$7g$LWHVWV$Fn2@?6rN^-eRK(4Kw<$Dr134nmwy`IpkUA2MZks&fDIR5 za}aORn?~%JN2~r*j2Ws0vN{owfQLT@)4ascsloz*w|nY~f%mA_SxT68yL_6N6!{8tczAEFGB%^_Xe= zU@T^uU090K(pCj`gVetmZh>R=P0(aD`o?N~Dsjd8!CR z56>}tBk&S3Dxv1WDS#sQ6``JvKlV_9;(2VV6I2<8-g)wq?|jE@#aP=FwiLlF95IM z=)a9owI84QwJ(0Z`{9j&n9ndmFP;G-kp3&@|6?S5NWXwPPt~?=(V$cHN^N<)j%bA^ zX{s-T%W2`K^@jm&LrI^9LAPj@2bHC@kDxdVOOY`}{fm4>fyrqmL)2$P4&k1WJ0<4+ z2a`W%LhC^PPbNcy6W2PL@Ya-5fnQm{BI9HSVa-YqXI?{u`whI($J#1&&GjMdU9X05 z=JluId;bzNU+^%dihnRs% zmtT=Eui$OQP4Npwm?DVPh!-CG4UTX~vwzdNYrRV|Az`4Y#1NAa+z9A@D6$By$TYIJ z9|l=bw|^d-x%ClbgG1+oK*nFEo!gf|u90bwIPhGHuNZ!OxIzF8;U!H&BEb&}cstNwx^@p5mRXv8}(0T?UT|sQV4%p8|F8A0n0w#|ix$WOn@>Cd474t0k1QPfQo5 z9@G)#s|Q2Ihqn+hba0;#+@Em>SuV~F`7r1@>(A{I(t!nuRJs{>Tt6q)mHq>%#*I2G zfaEOx+-~;l<(Go1(qs5zj<|ffbJ;s!f)YJ2s#FO|{ohQrZ)T*`vyFvW}6Oy0n(u1+co53fU%Gq)~v6@j;ry&0$ zvPv@!cZH1pPjoaqvPRQq;tbcq8z;WrXJ;48VMO8>i}reKKQM7JXJhk+t9kxV+^GY< zlReL1nB`=J{nBtHjv+UNaM=VLhIAIvxj2PX_5e{w4bRvqj* zcm%RN27bgBpmH_b--3;YJ1?P)D;aPF$#Y~6cLvE+PwcOxq3E3k`~srR`b;sj+8C^# zeGDML@G4k%4(ydg$P?y){#gVFaJOa0L1=_wvQ(IEWYioLo+vvqYZ_`XvkRX>FSGUa z5Vo4Ia<`Y=x{4raRvnwMY7VT<3%wi|CueSk_3{Y3WyLm3MR|1sZmk^b!;@sn`@aWU zO2Fb-YE_d^A>u)y0;DR9E`NhRc`1e}-VT%+WTsqup zEId)X(>Q*^?BqdBp|DSq*cVMkL9ySr_5K~(DL@W|lpTE9w!ls4iPf;YR&e_A-?>OFBNDmsQeU*N7SqE-S4oW&E!Cz{t1c; zBe>BAPZ4Ds?o9Lz!O*ZCV7zRbzQBR!nGEswA*j8;h9h`;O7KB{{Brw&uW`@e7Jg(< zLT}5N1;d?&uEcGHz6XZBm=hk;lC9}^lqsu8cE^XZm7DM48+b6K4|`zb${%ikMJYmR zz{kG3mg)^&0`aJTSJNL@I->vV5kRcmG6P6e6L1b^c5nj_cYG&jpgiTKF%;7ycum-g z0m@!BOyEwY0P9u>N(lUikaYq;w#b`o;C0lg8S*+p*fL}AO?Y5t5fjLPdlGw?Wyk(0 z8xrDJIKnrIM97105&qu(gRlTj$wR^nd754mUg54GPM}UhE{q`@K7y+_&q6K~!U@U+ zsKlvo5|W`Hk^yONbnE?aLY0K~b#DxT@3Cl9)b@BZrp~M=Q^1Eajj(kp0^DyD5w0Cq2xx%P5PS0dNkZ)VAH9n&y-vHwM6RUxD|e4e zKG~S>@AU!zJbXixAtFDX?|Wn}gDRlB$)W@AwD`SGkd96=DKRy7vw}qqTsY<)UZ0nK zm;oIL@TA+EMZuv3dx{~|kY$~fIZEx3B= z(%ImWS?!Xr;o;>HMy+gC*L~xRlIy&h?8z6(nsHnG@A&a|LmfEGk z?}pE!L{t!zR(Wf%RBx5e2G5sTZQSi^)koH?IBh7p^(ib*WU|3{&!UK* z^|a_l)B)9fF-~A;(4FAC@31JXECSD+<%C~i?a;B)J^fBax3He8C1S6TD}x=@)>yR0 zdOwSAH#hi6M*!QxGe<@D-{s^*B(c**3^v%NcSU~+@15E*6Q&)$q zssjb_ZS+Q5n@1+l7gwU|qF3~D9DATsi0h0GM>g~SvM4-a%L~kXadRQFzjN!?}MKf`(|9?;4^VxA60mQg}%-g?S-zOxg;E+QKQf9ay z$EbgvO^fKIuQ5kaf;+7^Hy~>{Ub&Z`N6;f4KLFkMZz5zcSikZ#bVICRPFgVsM+V5` z&YQR8B0?Eg1L~daMcih@jdi)LE|x$yhS zg$iSvUuEZeOg5SDmYD?GB|58ri^bn%@~2E_?+V5?+MQ=`Xj4Q6PNGYkzP8b35c#^O x`s=8o@ytJ7aWl_Bc?_<)yqk5?>1-yO%NEkp>0doUObzyUx);E*D<97+O3=*^}0kSJ}m)-@-raYgyQ?wJ9=RXpIF+qZ9b-=6OC_|7@a&reJw4SY_e|I?3u`z6Ep&%8POiQ?uO zzTOqnFoYq@hEdX2vt+7!t7NIGU9xet8cx$GIVRiKjYuz?s5 z#!nmTrm%%`FYX(4Q$*&CzO`?N=)7^m*y9vqD;4bX_`|Y;?=-$%9>mu}aJaaz*-3SC zj&`%ql8o~-yFCFC81J3wnZEHguti|bjwnh$WzBOk>LuHs;_A_}T#n8Dv4Y zV0Nv=rLdNs3eDOFW@v7FU_3Z|gB816 zbzAOEM_zdZLwWo$j$s+yr3zcGHfo-~bh**4lpEfar44sySytB$D#o3Qw&bK_8f3sh zFsF#W-D$XTiuGI|U7A5Dj15Oxz>UWrVwl|%f5{tKN6UBZ zUSwDw!Luly+546-=fH76q#hIGJ>2cvy(pgRHfG=)X=9Gt#Ly-lM1p7#TSw1*%8R`s zQ<$e>j$$ITTAeW7sI|OKx#EU)v(^GB?}VuhY(l$=^;#AX{zeVWM6Fu$Yh5g?w^8n( zb)#J^#zS)_GFW~2*}Of20_87ki|xy7z(UZ3;!4jT4D5@z`=4_fi3L&_PZ9A6APu5z}Qow zx^iUyI0X0aHojf~M3{j&gT3L(pD?&X7|aonbK%?v7Z)Qde}ZQCe0Y@$aQC&MRf??n zyB)U_Ew{>C5>s;6VkC@Kw*B_{dT6bCVWcgED@%53cdInfYRhK1v8o?Gyeb*3QCBf| zr|fx8Vl0nyG|V{ux)+W+2FoL_Nh~I~C5A63%`IsLIBEp7b*?U$mj;C=j4cdv=2_uM$#C`1gywOVp zQ8m{D>8yb}yg5jaqHk=@Shx75{`gHI$v5reJ-8;6Z0iwJPvNk~=sh9@L!YkN0pHKr z#R(0-WfLpazxd$KfB$o9KYmrKaV4|V5ig;k7{Rcn+{zk2` zNqb+ShH>A5jC^2~66@`je;ek@l{e6E@G(Avk5H0VK#IvQBHP<7A&CM|F1M<#%&=@% z?ktB1=~jHyLA}S`czxy7)$7;g89q%_UH>h2$G=Sl73*xPn_^O)S6yq(syv|vY?RmB zMwsD*R>u>t_;B>0_TpYCarh$?)E+?)gbj7hJ1c;S#@^X|2j2+q_RW42aL0j4viw**PO%WN z`&Q4!U4o`VfK^5EKJe?X#uCo7f!fqVwP{s5F|JLDjF=SJ`Cp$fpQRf;azF_RPssxP(za2XJC&1DOFwwPj@^jCr+cvt1Kma$UBYtF7%O;9EppBym(2 z#gE#SZs?G0LT9H@YlhA)X>>>PVRTntovOq*(S{4`C&IMP>901*yOCCq%tXSQ+^38 zju=Kt9CeQ3*55|~hS2cMI)Ja7)6k~x>>E939PlD|m1_FEUxCGSpui)-*i6VXK?HSQ z??s_Bq4o)!d$B-)7*M4^0~1CzjytGuP{8{+7+lmPbPGO*GDZyyLsGZkb0}b4m%`Wp zJiWBfx4YARapbg+qIE?Q`0k8UDplyI{$mMY{*d8a+90sotp$z5*mKCs11< zz8ZZf0ApVGEok>cys&V<3;hWB0nf}KpFE@CuBtcvDE0(gKo}~lO;fJm&VCU+gaZSr z_<}H(#yBwQ@pqkGqL=KYu(Jv5U=;NUjFH67N2aMKCt%WJ*x&RX`6Ux%FsdPc!%v}2 zCP>Y&O=@_TR(EMXy=U|${b+r{&wxA724I3EJO}u2)`1%FbW%j`+r6xx)m;6)^SW_f0?7 zpAK^9n*!*!@FW*xgBkp#=w1fX_)D_{sR3QLFB|?09VuaMI`3QZy8som?rZ@F1j(7& zq--7Yj|Inu^)V4gPt@+2^?WZsu2*&aS&_i%%m&9`QP4)jH?7`WFo(H)D2!HS+&az? zdhUPkGuML$)EU2-jMHYZ?COD~~A@2a~hCN_U@u#3&uKtL~5!#{WB6>~- zGwa-C?6KZ+^qLlqnBly({uJ=kmcQvI#IXbZ4kk_*A`clTV2tS?JL1})Fy>)!v%Ui; z&eaheQaoJfp9-=-5vQs1V7IdXmvbwJFeO2kvc&Qkln|(h2s$%bDbjW-tv2*|e(!46 zI_q^~XT8GNrHku`G0-|IWF>zTr2#>ke3gU>bntzaXhP6Zy=Y9}j?gv@5yCgG4GTZG ziZI0l5UDFW?MB(JwOhqUWx!#mer1p3%OGK7z0ofFN(C!bt5Bc$l>X;Jb<*17T?pGP{F zBOJta4hj4K>V&!ME~YCns_Y`!%o94!9LkJGY%W25lIF(CC;)H zHO~R7B+aOmHM77iNzl`HG6#fB{BqnHS+g8Q$)fFyxoGC?D4rfSpE8|~qt0U(WzJl} z2=dqP#=#RN=6*DgKqN}9m0armO%NJ4m<5=zJrjPU(K7*Y93MdirLYQsH!79`VRHKL zN$W~2Pz5N6vFJzYQA7+7FA$OYHiSOXkBO)P(UE>!IKxsx!Rbgp355Y8pxUKiCJ@+T)Qx;<^e_~%dd)>R47Js>@<5TGJ{e$rP!k(SI5176V)DFPs7TY4z@Rr zP0bDgYg)lp`E5L{u*vAdout63o%~}IRKyrNS%<$3m&s6Z18p_|Jx1oScVJ8rgM)z^ znbrvJzyWl&5J`aRrJOH#>COn|Vy78uf&+%#PEmaa-0ig5MMMO9YKCono54dl&cfbRKBua>iI%|No(31@Jfg7T z4H$z^qr*Y^M=0yo7sZ&{;Byl&hB4@8feFI3qo#g(S;pCQfzRUJNS{5$+ob3Et7WeP z*Iskh1=JR!8vIhvz%SpycUN}RP5Fl+i4%&2xF)N3<-bHL75q0K`E%xJD-Yd}gigsr zOCVJBJv=|cI#d9honoE8jlxmZslz|l0xhCd2Oc^%Gscb7zBkwbOaiY1zB-sCCQ?dO z+(I-Gfn97T5G}9F>gZA*;y)}U_kkMnz#UW;x_v*TeQWmVr-AlxB};AHz?&!NwTr2- zD#)#LQ$t5$%n#kfPGMNrP3(?~6UzS>-)6KQGP)n_u-c~I#Ui{V{~@U6tpZBjiL*;j zR|}6{coNqC=_HrUnU_B$nSzMzH`TID5WI1=`h%C z86jxT7jm2T39^P?^Py6xbBYn|1Dl>=1^GLu(?;zfos!iIp&cyp#P`wCqp74yNkg5$ z2ZEKLPbB{kwGR@Wzb$sTgcT~v(TBn^c7FaVL`<`WjtE1;Qy1vXM|>Zf&ny)jQ7{Mf z|4>tqQSJe%GLi|G_y^QQbwvI_BM8*Mg(YiHS5)CFASi=qAlznXqRqJcDuO|=APOx2 z_jccG#c_|gB11(wXan7Vl%SZc#^6XL#e{mtGTbP{6~TNd6$3>{%wg<|m{)pAJp*&a z*hVqe-+}|32E$D-zXo?Zfs6y)Yk=Y)p|vGj9NdW$h})_$0Y~X*O!l)(MebX;E{c=L zN)#Ax-FlfLiBp_66O&9zipRvKs<4W%Yv-7d6pu3*S&bqQ$vIvSPl=27V@jV+DPf58 zb6BZmTFM~OpGJ!GnPGW`>m!ztAboCMgWO2}n0TJQ2`%&D1zoDoBK3LcKJBYfpF_&? zvUo{pX57Jv&5A4HDrP*aReJW?iiW{?@Bp~|w{TIaZzvdK3sh57l#o=YfJYzl5aHi# zd{vZiloMkegRvPiNiw!5I{YNcIiuQDrm)>E=friT8W#6>H^qg<|{vwy}H@J%X7 zc3Sbh9u2JgN%2|4k^TV9L$ev04Lz)~9517CKm)3N(I9~mVEZ@NG`8)zK#~B3EkQ$J z-VDuK-Dvar&1*}~c9Z&gY3aphyEFc3t+nn-7ZH=fxu$mkk=J53nEM`znDK|W$d5?q zoak=XzHx2vuW>i{I!T%263L?^)C$$_{SH2FTpv0j*KQ08UwL&{_|9jBQOtk$S4K6j zU+dbNOHa!8(Z7^_b$z{7sUfj@!}YPmAEHvtB8-$9osF`Z%wQ3<1FFBlGRgC7^fX8} z_nDiYFWm09t{AULH}%S!ue@2fezR}~MmdFakShx+un8Xtsi#W!m|h4hKVpSGmVEc9 z&3@pJi$H+{i}MF+SNC&Ah?7b0j}WLa?ae;J-nMLC`AD!q5W(hGYi8?KF@& zR45U49okxjIaNu8I+bzX^^pqjw4on{K6SlvDwL|%`aKz%>zXHN>Xm*kIS6zvhjtmM z?#Ee4?$t{F@3M4MCc?~72^gS>cNxTh^^YqAaK=0h6p%I-%o*T=^XA;gar->5{_yYP zs7>2I%fJX}8oq{nfc{6!0%f02NFj#*X!5|ok%-Lb4lx|9I{U`{2uuR-t3wRdER-f( z3z!0+3*|5%@bfu8#|lTXSiAZ}2tpk9+o*{Ot6 zJqZwrzD#S)8-D8W9T195lf%3L`r_SxWiZI-k(J1GR57vOStVqcH{J6|d)jsIY1@i!^jqN8|zH z5gA4V1W`a{9mKpp12lxVAyJ&u zmrbMVylp&-z3U~voP5Xl0^&{Y;0`g9!r_>S!odFuyFWAp&!G4xI>j-#I)p_CBq%*5 z{|+7wN+6+Khi0%sv!XzEN3*%&XsTZMRgyiBZc@A2V@RzXIGmq>4&H9IJPnm%@br1~ zM*bA~RKGOytDpI(`^~E(?Vex`UU~mo75x{E{@+OcDM%5goGNYEqR%F3G;h=u;G2~5-D z78}-i#2|-)^}} zyX9u%2wG^wIl*XvU;)}6 z$`nEh67?j`g+Vve%%7tqrl5ZiU{Eme@6nYVY8+c@Fz6S=p;hD;QF#PA00Ql(RnEf1NSV7cbmU3Fm;zR~w%kX*< zVyoEWv=!~C}dM$S{tA?4_Y=O*>mF(!JAe+;J6@qRb-b_#b!~Lb-o}vX)>32a%LUScFlG-$k_yNt6>!^Q>}0gBg^< z*qs_ubu~}^AKp1FB*GzbiqDW2H6-OMOpb-m>sp(S4<=EGmw$vyH-X^kOUi+g|6Wz& z=p0^7E>>aLZu0EKXT3|R#rPK-3FUO>lQ+Tn1x8jxq3jigKa*D8lsJ886lX)HjVu(h zjbO9Tp(`FHUqb*?ugoFaPnOfd+CnJ=>hdX8joMmh$%>AVMrttip8NqD#D=c>A?a;% z2nszCqx95p07#qrNJk>jsttQ0OmHqdKH|#%#l|+|LF>RQZ_UlnyrWZHR&)E4tvY`w zj>|#F@gqIH(y)?B=HiGD($}9s#w*7-psG!x4C~F-N31tfT>L6odks%vb@-=_`M`05 z>jqCh*<DL#lb8e8Bm<7KzK)8C-!k1kW%!%`5r@rRvq5HvQ|SW7&~Unwa?v$ zoo8+o5<3rhiXvkPS3v$XMEP+@WzRxxgITVaAFn6GJnfmXWt7f1tX`szNC9(T;<%FB zJIIIPPaLPL!l^vpOF>Ff`lwegjhtChIe)PrEfx``O2Gj5xNc3zg+MGi;Z^0W-4&B!j-+f?b|O;4>>uiPLbG;LK45x2`DZJG%lFkY#uXiJLg6 zf#hks^&m#~;Hjl&LmN0YjNWn)pl&=!4{I+y1-BuzUq;R|`Q}<3As}@^H1i6>&NV=4 zl?3S8H=ooF$CIyJ?4}W;-){Kcc?EA5mX1#v1VJ^X-N43G!A#gM@n~*h*mJc}2WZUr#{WPrh8#FfSi z-oJ{GKrG{5Gi1Op*gZf}bvg`?Q{)ieP3+7)tnL}JZ1(AvDW0XDftENA}w<+O| zLqw6N?*0ou1K~(Rk&MNcaS$HLt|HDaPD2G`k+`0~p_xaa0y2J%Dgfp#>rX=oWRwyB zMUpxv3QEjnFN*|qHkecfc`C??Gn-NLcn$BGL9%&DoZYmLAcpGM<4EtoL4c?uN`zw% zh{+s7dN~#Vw%7AWtWF6eo*}e|zNnLM>PdeVW*RN12;Mih7T-3uz5`%9r-1Qy0gR^s zjA#6LaSkc#X_{@C+aXY%{_-?|^453W!52=;aiv0H{C)issvNB^3|F`a5ROO>6^2q5 zPYmNL%EbOQ7y{;tNLfBb*11TsO!7F%5=b`&6;gn%=w?_r&==ht?~m#X$!w3j0CEIp zS$Khi?`?elvcOFAY;_!ik^h#nIw~G2__Bu44}vfHXxva>J}LClO;$Y~_byaAg{}=C zAYb_ZO^<7$QXn`Hx?&L8LE{2-2I#qu1u#6v?^5_hDH=>lRd>XyF%PZ}|=+_~zP@paM3hO+sSE#iL zXT2v1tu_wTwWbXrUGL)ufDQ^ zL)Y@_s9BNsSx@r;4F4^be_faV2p1rKeQU}|j%aBXLUGJaEUPw$WLb@HUTvrRc|Lyy zt>jgfs48%l6#EA_thQO55s;6dML2bUl=0s-C`Ry8)m8IPlOvoWsgoRMAN%I(Z>U27 zC8X(Vo!upzAjFw*bzWQ@&kiTl{{~s2_(?p7v1=S;^Z@7jWUb;!!uHTcSXLcw&|dUQ zZ2UIKmq>n=lqMOS< q9xcI7%?;$xPhhGw&F+elxF7b)+Le^m?sjdNt7x}f??5&)oI4!O za5yu(?@=PBZ(Ju-5+@DgO<)B1Km&4N6xBbii#SCS1g+fysr!YdF5or~+tz^#7(@%V zY1#l$(`BUm4}AX7Cw#IpO$~`CCmCgGeDZDl{T^F*xGn$d@WzfOFrJ5SeqNZw?VlWn1fxaxn*QpRe8LAp1EG zE|^-IM>1Hy+y`wjb;dhpTa^vpxIESmtf}mz;I)t^OPN+M=UJ@X%u*4{SZd zv`j~4lD@*9SZS|yUVe-<-0$w`kDAJ1!|q6Vvx=3|?apFltJ17SG3iirBe&BGqNJ|e zD(mu^56<3NT&-@I_sL67@FnKWhc98F&!1#DNmm!}+*cqKvhu+vi3Hi}6@2|&kigot zLt$l5ch3r)Jy*(XvUg}7lsQ(==8O9n3 zMHWSF&+WxgZ`#g)xe$$Z2(=WV7D#JLO6> za@XoDkn(1fS-}RhYgn&&1sTz-qnWDL>OsAWh4oj;9kgz?YsF+_Z$|c3WVfRDW>wXy zSl=!4^f*Sz+pMe2lD7qVWW{B%<7_am&T#QW_B(U-wEYDB^d*$-e-Hz?^^ZZ2uqv__ zNbr-umxZ$MPaDET6*#2<{?%!MZ`UMbWhjt!& z!<9d2afdLNryvfLw*w9?#uk2nX83&XDi`41Ymf!8rC_U5Eyc^NGMB{G9JUyX;+2h{ zy}TSb%YGDVYgN@Hx3#ri8fvxmTDiGssvljIVIz1KF?gr!`{yy1&pBFl5`Wzb$Ez0) z#Pi|#Ki&Y-xywJ&hh_UQRML zX6viSSNOvZKHO1$WsA;15QSdn$v{>Z0w4Ss+IYG%)|8cphCK-huG}fB7Ym)v6%|wQ z`&Qtn#I{Y@-g5%%343kZl0v_2d)btFgVKa4#p#Oo5`ouO4*uxldah2Qw1_<5v zZ2d%#2xH2{$#S=yUJ|uQFb&$N#7Qems?-j4ea7mg!nn*eMLK06hc|~QQuIwsTgwjL zv^#i{O7l&7cn_W_IooCg>8U*SICxLZQknPdkZ_fDnX4mDGYOh5JK1G{}ERO8ONA_Ny zL3HCjQp@dD@D`L- zRlkOY`yb*{boc)VX?^X06w^^mw>Mf!7g26-T5e`p*2wk`JTA>#=@(M z*RSidtj*M_!Q0i%;4Mn1L}x=X#k3}$LTRtnH1%2(YnGR)&1jhOS{%$Xa)M^$R4(B= zkMwdB4;Fn4^vHrl*`rJF0><+x$SmiaJ!#KFcE#;s`vm0Iq^;jYd4-h+ABrz-mz;<1 zA|cy^lb~G4bLN~7G8)_G?t1vfnA@{=wayyRn%n*;Nag*E4syuOPkk}nk6aCci9)J-_M)(Zf z-JE$gM%5${D|-s}&Mef-PtJYwAaSks>%Yd&?_3ouQgrsY5c&ql{;z<5DFGpCT=B?N zo%%y(?tJ9E`{vCzfemh>K{vrn;S2Nqf&T1jH)TpL&AqVy=U^l~WybNE)#U(eQ?IQA z=J1qEMSQ6p1mJo%Xm_Gmy#)@5;_Zz9&+agxMp@VWc5ANFs_lOpUGV15v*(zmev0D2 z4&cPd1tbHAYU|2Ml<@1dwRT-a>1EwsTWoEt0k_5Ep$XceIDXWIu0|f&Ch|6$^|i>` zB8_hvK8$afr&rS$C*G_A^r^ki zFdb&XWS9!GwFJ3%{gdlY1j*eLP?a$;glmK+4X<#y3_f}eDafL9-3K^=z^PPq6Gc({ z?P|+!YY3yxhHkSYRmF~1bs5Hzh^ctF)4?VhwuzOsE^ifwHIG6RyH{7iN)%tIa(|-) z^f08BuW4IEb{l-6t15CKUL*USU}M>FO~ZwjT5B2aZTgRHdd{%%V$fbRBU1L5U3ao} z*759|>~q%6_y3Y8optmM+C5CyY4Q1UjZSUr?d5`;;p+FC&o{u=qrI_kG>or0Ko8K=@44%D z&^L>+p%Xwb;Jb0;Gt5t#d_El89ff>PC>s)f%*+tIZv|t!Fps|s z&CqZhe_5t*YCxCWD^`%F6{YM|?>$F<8>oT`o-KfZaB@ahdA25kiEyG{o={2jMCrEO znCwjs%B5^~N~N$mQ{eHqnSNho!r4ju0+kJCP5rVnY4uJA)8R?1 z@#*jsWXRA?4EyJ-e>!yDbM*_s4DjKotkemt-mu82)4^EdWOy3(0A!QMpb;QKN}9(| zGb>|_s~iwvK77nzL*{_(`aOW{Mg`lAHBPB9VmtIZjeg@{ewllW9X30TKI6($dCqG6 zhd`$;xHzRI4!JuVI%%m%oPYwx7!Pv?JR24U91LDIIbhzwtwiL-v;?A)bgxEH9K z1y^ZWQ2TW7=+{X^e7gG|F#Xj5c@-)2y2gS5fn`ECNCAn@SFiOG|LrQg6+?ihRo!Vf z%k-2KKc>HkCL)OSUt{V^AW>|&*)9hnj78`Q9u)YfpJ2ULsn(m3w+087nDLRfywPkL zVIAAR8$PLjlI>CfRJ`A5`&E6D`M9ncMAEFbbb=-J+rR<7ED%MkhA71Ftun;_o6O%L zSs?i=YtnBly#4C!#p}0Uy{--m?Lb(Wey1>0qRFd z*y935OIb;6EK$e%`fa}F9d>h;d`&%VG{({14WhY?5q|@E{c9QL5bu>T9%{qLg1*snV|r&Xak7 zk;(Fls;&jk7>*rhZy|Z4qETUwrd}NM{#_LNCqOJ`%=WOoNo;Da4_&hYVD&#lZG}w^ zKHNzjSlP*chlKc;v6Bs$-Y}jlaUmE53J`LjD0_!$74A5wx&tK}V-6fZvkOiI7-3?D z!DROi055i$UMHAh*zF8OdC1+)$}YmWpoVB}iFsv4$id)(l0F=P5Ev$%gE7X~4j>wE z7$qE4O7QL^g9Tr@ydUWRuV3-aPp_^GtMMrLmDz=5>f(Qt3N>fG1j2~;OvBT z{w5N~S*HO@9h%*JpLRMo(&OP4o1Clw27&DXiye%D%@r|m8%|01cd?bgwS1CEOllWL zzn@C)0Xs~>A4EZ6+V5tJh0Q*@Sztaq$xiFn@a7>#lGN}(42&#vGktSnzzapMo7yb& zOS`G9L2^j!kU@6X*dqt|gZ-6FqUrax{wAp5EZPFyp|f+()(THwI1e1~Yz;id?Kg({ zU=lDVj)^W3dAMI>o%In;Hv3}3V#D1DFl6d5C^2M{Qf zIK`N;g>7FLLH{e18I|^kM9CR;FoqU6;sdnw$3Q4hC`@1r*%$H6+nS={Vag9S)D{=9 zf+I5c@Tg?)6~_2|95BNe#x)H2ez^T`^ke%O)q+8aGfwH>mlKrS_W@by(u8UJ0VPob zApp>raRvftXLsNZgwYJ?vw_13S5P91@~#L8%*pJ!VEO2#}OS%8M(+7hdGsp zvsT6g2&Hi`vYTVza?imNZXkprX6R&H|1plFPH?uE=gIdmz_IP#@=SK+8#W&ZHVs41(@Z zb6SmQ^)v&aB6E?0p_*ej6f^9XiUhs5V8E{NAL(yli$$pQnVwq?2In!515lx8J@}A6 z2<+a%S3H3SK`+J@C#}Bs6;U){sS83EEA2{qaSLGVtS={(3=7FTf`<`v$2nM>u>X~n zlOWwQ!p~+Tu;@p*Vc{qGw`dyKYmwcQA&ZG4kYB|14K&!Nij-frpF#@2{Fm4*vEf&N z6#@HN3aS7R1N&BF-|oiOuHU*g_k1^Po|orde7>6x7VE9$s;K`~zYm;VBvo7emH z$F&>%#Ft*}C%*Z5-;Mc)f9as)jcZ+Zb?&_WWsFeDzPh|zuhgrp;6^pT9t>s@#mddj zO4;CIvd{i$l39{@l23q$#nc^p{nlSDyw#~z)nKiKvlXEW<{_q@E8Wziml`N`k_ZY0|g2!&KwF|{creQYH0n4TyC(eNU}}x84`hl z-$jb9&>E0J-}(W5WGJE9X(ECs2oXRJvRa&-k`jlfDE6%YaRJ{b`Mv|xl#4+jg5F5? zbYw4^!w~Y_ln)KPlS(gndS2 z|E!${Hh9XO{xIpDu+tyLU1|jC1Nuj^urKugM!#dqfKq|y=oNhZ53r~3Wf?eW{4obP zcyNpm-L>`(03<-ZTy=1!AU}QMzv2d!>^>zjXtb#7%+J?A!6Az(S)vsLa>^-9z11u zjzWzk8)HCHST(4ONvMn|z;^hA;r)RBPfe>CsFpkw8$5>Uq?+CFkUOPL@7U@ws+1$| zEBxsD-mzLwdR8#;p1t*58-8Wm>So`u-o>u<(qBxs(&l;5x`+~ZOddb#F%j`eS<+V! z^qc&^gg5rD_Ms452O`V#-$CU*A#&t4AQYU)t_aNT80Ho}78!RRZS+rrbkoMT9)M_L zvKiOEVf;Z`2!I46^~PGuH=rp2i=UTv^v`0X+F#B8?brXP`_-!l`XI%yym$eOZ2Hf9 z=MPD~4N}DYs7hP65Ts72dbzf^T1Oj)#^P2_Dq|5es|JL7fhkl^objf%mD2f!9$p5qz|RA{xcGoO_s#|^+RR{wk7UzG~t&iCH!i636qTC z9mFonew4a_VD@WxWfgNP*EQ#d2zR{_M5$LdD^=K|@Ec|H*E!rjAOW_p^h-$L?nC7e zJeFJ^CXmxnvNMtWBcVUzxiG&qg)#clx&(&S&n-APCj z_#beM%z8MVx83Oui3xI zQ=5UTf&)UpR_N9|+q#a}tJl5hPhW{SU5Wq-yNRyCN@W84e)DKfuVWdQ3ge~Ct z1yv)EKQw?Dab%=C(HKRX#@-zR425h^ll^$+xSGP{4S1np8Ku;8-3GWMIP?^J?CQiS zBl!iqEngEj8%(H^2ko~p|Md_fPx2NWBm?@P{xO#KPc_a@{3cA(s_v$%ciYXoRRLr? z;c!ZRj}ONH!heFaIqz^DB8m;S3B4TOL9xLyJmaFmC3>_k&Wyy}I)Zw2U;hOv;U+}n z97j?34=ZA6p1&wPIFx?ZS$%q+Fc2>%g6XEQq{TdvkW@V1x z1G1t)4;`c;;Ix3ptyy1+99=O!)L0#Gz}J7m28q6Df8%!mS=2%w*)X~sJRLk?=mj>eTfiuhr?-cz zgmPYf2;J^9VELnE3>u7ad+2&bN8vYs0fwtdyy0c*FTm4}dn(%wq8p5JC4yulMcoc( zQikRrPF)h(FSQFV0i$3lC2t525yhWM@eIP8e5#iL_h-y)uU-~Wv$U9kiI8W}2{SMU zhk3Hrf8Er<6m;1}^tH6+3C8(KtC3YlKkT;}#tk#O1uTP}KM&130(WPQ=NG0_|IWbr zIXP4L1>WC)AHCkv=doBqqSP&1+CU(+-Fldy@o;hO`N##D)fd@>B#Pgz!dcyXnC;hJ zz6kRISE$?{Lo77?rd*kn8>Yjr&@|oy;`%qbmZLqCfgKA zK0`uYeiX#Zxgt0>82V6o{!f$@hj3#LULZ;>Tz=>qhk-d-K<=_+`aF9-O)`MoN6@t( zl}8|UUcf-V|6=sST9fMIZJ63TBD#|zgE`fe6F`DZ;k>1S0O++J&dw#|CAlzyveEjQ=C>rt-F6O7-}Fsj5+3T`fa#i+{Or^)WrcPA`YpbS@kb4Lcg?BnF|FOIK&9XJ_J;^g(O!(y2c*^v;3(g{h&8z=j# zIRg}aLY<@*7D+MNcf0WL=-&r{KDxt4T0cw1c#hmJ1luxFcuKB#+U8r;l zT^D*?U-`dH?OD7{A|ALuJ=TLg#2eIH{r<&t-du6I!!`f%g$rl>3udAVW`3OM1E89v zjml~@Fl9VZdIE$CVW#`o(UW^*x(7GTyD^cV`UUjp&M&kJ2fqw{gaXyFUs&cHy+XZJ zIO{)CXti;Zu2m}pZCQvp{RzJ6QzRlGzlFEMiI050K#VBadI3)24w71C0g_%~*V}CV zeG)-KXWl967T$8XKu8nteXw-e8dK4j#i#J?W)3EL3pOsT{eja!daYwmTHaL;NXEk}&LY#K)1> z*H}t{sb2$$oVq$2j$(cct|&qGUmTh2>PJP?O(a`nOH`5AneNsW4evk8woDunCd_mSI}j z965wC!c($k`d3;1I$G)LO!1841}OFn&{Vck;`zFO7SZS-YQ_Ju;5mRTDNn;AZH^$j z++^w$`?$B>xG4er5<=?r&ek06{o$&&T=SM&)zOgr=Z!g@AB~CxyE1%oP=VWcx?b@$ zk!0k;!Kw>va*5qe$q8XSQd1 zx@W6<_DXLMYba}tfQgL+NDx_hNE{^2;imvm5(GgG=Os}bI4?oL@I(Bd5CL+%1P}r^ z-(Nj5JEZ;KS#)(>UB_Sl@%?}8lT%Zr?|kcf?|l6fW8Y^JKNI=q@x&)J#yI0z!0Ops zt81#P*LC$a>IU9=V0Lr$oJM_&Am1(23o5S#R^3wlik(uYTraD9r88BZQn~5QRA;6> ztMUix2Rzo9^9~NQ`n;F(4sr8q8qe|kZ4)&M{0uK}>$X-u%pd0^UPf+_SNPOzRzJe0 z`3&AiTdbw=S$+U*$M_sSh`Qt6RObQI&hta4J;4|FVbngzYy1d5iW-|AwS`)NYKYAN@nt0+Gl9ch>T0zVu ztfO`G0Rt_KH;|%UU(>~7ll7Q8ztb(*_+9myk#Lhp*hyq}-8Ij4gTU^IsOiPA7u$Yl zb3aam-{>bEw-e#|p&z#GP1jHC7C!Ub=Bk}7u3h?9dDH>SZ^&eBR+Ub8e&LFpc9>!`Sb zCq9dWsF|{Xrs%7IzBbnr-7&a<)C?=?ZKyhvGtk>yQJa5#xtbFN3?R!m>po63_B<|g z%{~u9ImJB?unP*uiQkmPh8H$hyRKLR6PVOdQ6NjoG{`KvCQi0BYL6eJ1=7-)UcxT0 z6FskqIqJ)rv;+DT@mP4&Zb({F)?ouk?3%u-r%XJP>J>^^0#4%k1(p~cvy&UJ_?aZn zjg&25=kIE%E-rF2v_`GFMrw%v;d8sDswoWEZZ6I7!kQ`GO*MMb+ydj)njXK4af(!K z>DgFHI$2_+xq)^SqwnT<1)~%{V0`LBW4Ex?NGwjR`c?M!#a%14Fs?5Cj!z|}v@p=- z*=`{zcPe~3Eu>bOPpu=YGc|lqr{*!%GHE7_t$*_t3+Gke(F`*m8q}idW>wDU%%sLw zG;x9-2z9OvNLmlBNTb>7%lRueuU>ZEc=^W5FT8Tixq9u5m#AT`iOM6l?#gzHZ*ggr{m` z%x8k%TzRPNB~G?F2NK`&MB;mKRhN40tknGbnC;Wjg#dinjJmyx&(dr(b)M~fCiVia zIU1;TLFt0{A~K`iJC&O;=?ulE_NSV?Gk+>3dY#8EA3YVHu}|?=U#RYw_Lb{51qtI$ zo)&oitk2jitIoQO8UUbx|lC{!B}htXM9{MZ@OT~1$y;=)d%@R|^jkd@&M z73pLSTD0xSvg2%cB1R|2kw)N!G9N^3s9tIMVJnhm=yp9>NumTI+V#4T*p~ThplVS( zMzfaGX*qOS^)eX48J=#oCao#;ur0MMsm0FC%cFX~3BW-1< zhSXvOeIv{9Hna^Wiw#LL@w?u{l1VuVrE(5Xi!;%eUqxF?Jjk>~O+1PBm^*1z1|GFC z;?4xzInYS2bPSc6EqyINToGp7?s=eFD6O6=V2HdxR<620D5cMTwq(5+^wb+kS7f|erdf$|>v z6(y%t{hk?USwzx!0SU&R-qon@TApnCI!;y5PVE{d^$0o%LIa?OwwBR?$na_LLfZ?y zt)95}ITRHUOt$m557Z%ICB1$#i6?}YqpU=|HvSTd!EnQEnTuE5o;RFLY8#|5cmsmD z)eQRFgSq}mT85gpbYVQucw*>`hkuNT=}7&=>&AcwI(NnMH0WhYh#4{ceuX;enfu0k z6}__4*2OjC$7Cgv685-49o|8*RUe)4+bd9dg&IxEe^?cp8b!Ux~tvC%7!XK(=5d5?vGwuNOrS9nabH{Pt=RzlP2+ z!3745Ls&@1LwE?1O>JWgLEL|z$biq4z>ti0K*Apu8Tx?Cof4T7$S7mA@Rq&{dgC*F zj}4SCOqwjp<&$u7gV%)O*p9gXslk42z<{e#QTNkiZKuEy1v)H@=keE+Rn1BLK{A3CV zN==?iHJ-m?08~Sos64=z>nCr)aPb0w9@J+B;*?Zb8KmAha9#A3PofM!tyZ%Yh5%<6 zUdUxWV{H)WQLIqr-7aCMokRAi+LLX&9YhT`ut$5Zmh1Bhrqsf)Hgp~^x|a!OPSwa< zkE-Yel_RGJDvY^H+`uH_MI`l^;GEM3-->=S=?kFzya12@>>y&o{wlsAVhfKFTAd)4 zzVBdED-k!IM#6MDkvTlG_)~4;;~>9+u)X3l6Sg<=QaZr&9iaA(Nk@PukEcMgdZ*B_ zIIu5>&B)hQYL9Z20ikDn#c@2lMluaNtf77uxdX_};W>C%Su=cN;Z`PFK0=am5bmlx5iuo#v_=V(?g2qJAmn z84O4)?`*gNA0bSDk-A-U!S+N~4Mpy4EDd-NvNG!4_1sVvFuStg1!*Ggup#wESTDBv zvB#-c&u=>FZEiYf$Txo#`Nc#OI)&^??a1c#zNEG5E|8#t$81d;z{` zF*yJTVL{b>H`POMZ>qQ9YxBctae?guCWI$Umdqswfrd-)J1ZK{9U?eWyQMDxyT8ej z`OYD}m@Ht0kdsWV;2R%F4Wvg?6X~(E^a1(>b^n)(TD(%4=Ah5xnTmg(%tq%W*01yM;`!7d;wz! zGSd*LKG3e;?z%gb2~1GSeAJVto^IbWK1##!$+BP?yfOo=LDh(;%d3&d5 zs|~<`-kGxZtv-q+d$gUnS`fD}SY}_|quh9d;!9L~A4zpl{LHwiMY)?udbJ2^}wwhj# zTy}AqdOSkODN2@+RLhyE{6*xY<#*ee5;sDpS4Yy$+-qe%34*CPWv&%OQ0RJVhzkk2 zWe#jEhSWM;4(tLoYdZ_MAGNvE97~ z-_a6=u@`uE%E~HdqN8GH+&IFZ#qym(V!`S{r=bS`66~4bl=kb%icYjZ!>Jx+Wzll5 zXh6k1;i+&e!h!_EAi8uWa4X6R!E69EM~G{v?ZSO<9CF1F?$BG!%;XS{XTYX-eFPaN z&PkaOWzkxmO9&ff;A%gpLOf-y2&XuO&OB{tq zA$|+BpWwmozb=gpK^{d54>3*r>qz!MBDlzv6wnvS58-Ui-qVk;A&LZO-b470f(ogb z@cftIkSpJwE(WaBND+V_<1!pgCy<2Dq=_#}N7y3e z4djcIZ=1Z#D|d_=nSz~KQQ}IgSNtHYh9CGLLea}`yLPa2X%g|}6kMz}P(lO*BI3I? zTJBMcbb87qdI%Zi5*^LrckuMFwLNwW3*2M#Y|<^#F}Rzo!i0N%FsZM}9d85q!}1HeDyQhLkQq5e55%}s@Epj~gxVf^L`rWdzG{E_e{X#McRu*;C346S(d>J`u1Y-` z1b?+Xs5=Oc(Y+e()HK|uO%a>l*6JqD^8)UsaxKQE5yHxDy%0?Y5}uKk zvLM%9z3kdLW40>U*S2~9ogqa6P_sdi+RLyLj2^fdXS|r*_ZqIjk)Qo8#t5LX=w3d`kU78?DMw1~))PR@mxEn=S zI=8VIQnM7Fr>zXbD73uLqboERYIRLU*J1m&XK{1Y>*96|D@W8F*JxdQ83YsiQ_7Qu ziLqcrX*MwSswX@f*KTU>qsR>(Q8A{=1%yawDH%m6ymLk^n63^GB7qj75R#(l#7)@# zx;BN|!-C>$A)lQ1-G6(j_JjYtq?Ex};7Hhr5$RC@qXZX4WXi-W1Wylv=HR#?=$tYK z!i6-af}q1_s*~S34n1xX;D&%s!+kU}Hxxp5VH`0VYe@J4`#~d$A@9MeW}a;s#(;F= zB6K78+gMQw%m1f>QZ2uSz6b$q^K4{rRMn)BJZTL>}6uZi;uiRpEteX|I6tHsHVO$=BDj&s^u5{|fYx};;v;;sANZbN-g34Ahm6O6pwlqg zy)!>%jfoC4{-4uGBu~iy1LZ%ae1RB^fIhHc$r*-8k*_$;I&P?kEd}7msOdQ3Z|TFo zr{o_g**9lnnw|a)@2b8m-c@fwh)nP3v@Mya}^}f;shm6Adw}6Dd@i@0>2@2sz5lSx4rLf$YCh2;WpP`DdJjo%WjRz zVwyg(M)5uch3f}L#oE3#k){2-_$JN&8}wxn_ouI@TSM_}syt1}A5lWmr*7TVMV-16 zQ>sE;M5sWz3LdF2u?h!fA;2%vXXI)qY^-38!T^eNKZ?@OUr_bXJr@mp75@YhVp966 qfN3eE3q(sV8M-!GE|p71t(;|6a#m$JXBA6j>u_nIbg(o%UHpHpjJ$RL literal 0 HcmV?d00001 diff --git a/utils/__pycache__/torch_utils.cpython-37.pyc b/utils/__pycache__/torch_utils.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..14c9e2e9cd752fdb16306aa8c8b4b64e52d74901 GIT binary patch literal 9143 zcma)CTZ|l6T0W<$y1Tl%XFM|=kH?o}E7?qv4tDzzXR|TJj&HLGB(qNJ*xAOWQq!lp zr)IjVdQMf(O*If0FL96zy8>}h21UhXCU)3p6D27OfbP) ztd_lbjjOU&)6`q9>3C}`qn)eeIQ7w6`F5dJPcHFkJyMjRIfVczAngQ6-dQ9^E3 zl*QOxRy!od#Xh_bixXl}Orhn7*e|A0bJQDKeyqX80WpKx$HhT0i`vhLlj5*Af*M;K z6_26j32{o;;tA9|DNc*0#2j*;7tf1l#IwjfCC-S7cn-O_FEQ~G;@Dj#j_qi*rv<;x zDla@B8ZK0LI$jTh*xQKPZs0aOi5gRQUZ);*Hj%elVcl&x(hXMXpt1VVTf5J%;!oz3)URvsDC+45xQG<+Gw`!QCecsXZsi|4t?WeIf;(N#yqW$Jhm zNz8gYVY^&Wl7o`A&|=Neg^tt+%IdAFIzup`Y_`1{^Oc;;V}P`TQ|;pXBF__PuHF?v zkd6rtq-ukfQS8^#;-VMSm)fpe!EqbZQSCZQXu0VGy2ehnHENGOM++ckoJBht~McJm}?pRgX=B~H<{wS>tti58tnSj?9-p`Bx~zHIbzyDT~r=Y^iI z8MecJi6@#oCyc-xv~KH(E`K0$+lHzs?6U1#0%=<@EO}wJYq-`E4^XF|H=RCCagt6n6MD z+b+bV9;7!ZBxaIN%tLH>tp6TQj3caJ%&5~^{rDCOrd8j;4C5c_bdIWSpUUaIeTn`x zE{}?dKofkIq;hg0)$5&ZI(_-AD;J%cuU>!k(rZ_pD_3v6dim=0t>aW{cLR6L^;_;@ z%R2@US#w)Hw1Btf*FAeKvZc4$^&ksk-e+urJs_r6Jiqnaq8rzjjz|6$IyUVfjO})} z75kl*XNL{@OV{4A>tQ#D(KuvDI?NoN>v*DBBuyOSor!IpiFATxe_dNOu4j@`(KD_y zz-VrIpy|aVwca(MaS$p@%p98mSJDM_LiK&uv>Xr z;MtRvi8R*=*FBkPVU!lUV9l3dp!OhV&@V0fku#)rI(`vq37ZImt5SwCE%!I4sFvoS zBb#1Ya-20!M(E@?sowH}G~WuFP^_up2aPZ_0=Mm@DWUQdk#IKr;#wN3|Y1Kj5fs${8_xr zkMb!#&L>b`=Et;YNV7bFo}}1^KRO2`!!|MlO)jTZPTytVU1<8)=o!1Df%rmS^{B;4 z(iCYa+JuGyWwD{ui2b%VvSd;bQmL6qYH>#TiYsV~NE0(Y%jIdjhm1*Vt>aNEBgTxu zm^+-b$+E6eqoJ+j`zyk%n;j2y3sSS=N|+h1m6or#v3qS$I(TKz)QmPjDdyUt@LD5l zR?97cHwI(9iUlZ!Wn8|5yy{!0_0f;sgGUL`d+a^lfXoZ+u9;~?(nnaUzA)Ti#{P^c zt3ibDN%GF!*4}5^{2sfv1$9x&mb-=+)tuPz$;nY^J$r$?bUk*;2kn>Ow3+HaF_6 zuJB-)&(bo~ywGvwsVefln%-7K? zJ8(_X;S9-2BqhsxnL50KWTQ4X;X4aZn;g0&(rTD5Kg#o~aBh35Z$^k4t*`r@?<%}*Zx>c7A7$?sjbQ7NYd zaBa8cMUqZfoqM#924o82ZaXc$;z|*%`_WQbe9d!L zHsADKdn+|mnxpg!X~7RVEw}DL-G7mWfWB0hcRrAu(SD34c^z=mRoUN?;yGMwzOa+uAnhjnA}u zY)1*hsKElj^fmJ9q{L%5M09aQeg^Ve+AU z0t*+tpbM?%#;^}rPluYpXGB1Gof5@(Jj*MdD~}HRbAuU~CT}7?WH?ff^eBcqkBs2i zmN$+K8P5i6;)&klyX-}1k3>rV8do%e>L_cgQ@22#gdZVx(ibXP6T_(AFf%~S7HK?y zYY29lqdm1!J?geAMxPVhlrN^d^C&Zz5x$H3c(aaSGYqVmV1hEA=JF2eK7C&O{_(<) z2k0n=g0zML&Y!hdn=1!QQ>nhKk?9ed(1mdy`HgefGe9H&+(}SD2TaT-2FyVLM_)}K zN{JzI2^abMIzYA7D^htEU#?bf(MSbAJ!nw^jO5^f9%crqw z#M_S?k!o$BJ{m{Dv{{_Z6rKtEskY(46wpk(su<3I9VR3Ip#-L125w(A@Z|91@f1jC z?-zQe0Qv>7*|N1<+9mymT(7*#dSl4p@OE)n@i-7MNBsnHlgLfs*?$m-n*O#89G}&j zM%#hJNc3F|-%`!YuIfQP((*yUphrKB$Cqc-3^O=fS{r7?k!u01?I1h`zI;&4bO`BT ztZXpjew3%O8E4sc9^)Sz&Y0^R5nzyA4KpK$ar4xZ*hsO-jY=*pt#~r<;BFDp$&>3U zytL49g#bWJ^Gz9cp=^ys-;L%aad;+IT;V=>02P&^iV4)&rX@dc>Pv2bA)~Y$cH?15 zl4QyyN_#39>xd;vcbzF`4!Q0T!=h2L3NF79xY ztX+n*w96WWY|Pc4zQs0cZQ3@$Q)WD_@a(?cgeZwJ)U&p0zzB|s@%y?c9%5qO z5hf@;E7 zWhP?iI$E%Br8AG!oEJTFnjKW$eufM63I(=PMt<_no6hoNu>bS(%tlAW)Dn~ zNFNAG-ohyIr;tFXx`4ZWv+Zt`M?gU}b5Wm~d!hNz3@G(yAd7;D@an4%s2CYG{FbLw z$OAf#d7zFwa9THpV+(7r7)GA5=H9ozeenSiY-eMOo9?DiUe*JrQ$y0syl1l4@-`(jXPRrYLg;9%(Z_sb>N5Y-+GECF zMP+0oVcHCzfg&pN!*~o{)ik`1p^l)9rODq!pR@?A7pBQUm{6Z#Pz(~DI=9z zxBjn)@*u;B1u!dN$VcQq-Pa0XxD?{{8wkXH10VGRjZ@Y*f&ho% zH^^Poeu@XbA6yt}`8{K61144Ssg7~wA7Z+i6#?=(3QnYaDdp>u?i$p>uz{r4W@9qo-4_mU4*>cl_*Xd9@tl@agDx;8m=a`;NJ`ka7oG{s2SEOO%jiN-a2> zGKPnZGblt}?8dRI@fh3j^s8`Q~yK-S_-yXCKVYAJ&a ztgo+E@kta)7>4y~D4QqTj_-)D9??FQ!r(;QIHC0Ei4me?H0hH>4S3Ox+F^*L3mcmu z6-!Zf+Dbn>Ldy$0y8D9tRrgzTkF|I071fu#Hm=36a>UYc|JBBqt=7o?l;WgeA}kou zmo<#Nn{_sC9C`B+7HWD-vJf)qAvA)cHt2ZDt( zr$U?k7^#!rI0_wZ5RwM!9)~w*WL_l%?%XgEHq?#)`AWW(cAQ1w9xQ5homPS&AiX#X zy{O_HN?G~eEhuREP4tCFztI6&?Z-NE!mGQRdkQf%&$Zn4qHxduGdlk_9>g4#R6s_m z4$vxY%bZc&nDw4Ve4n_FLNa$J=!7c4DY>$b*KDH+I$a*Sf`UqY~|FRqFM`VC7 zvgp=5Ig}cbn4yW9Mj;D-5~xe`9Ryz#VQ}Y?seH9VV6EZOB^krajwl?f62&H^Y#|QW zkN-fYelUA$ddMLo9ccVNqmf9gkXOU}9pwweZ-n&$4=tx31x3E>IIFmK?za>GA;Y@k z$Zydge@n@CDA`-naAERK(eU?pltD91L$fTitZKpL{rRWRj(=TH-{VEdxuVU>chy_| z7G4ThVWq#1JY1o@nJUku1-g&JiA$1o`4}aqkfatO6ZDS|Eq_sJRDlRaXLHX9N&7Lo zMYp~J8xmEs>vVHamSgmhISA}2fLz-@C|39EOls}rr9DGEd1(O72inr-VWw$^lV`S%Dvg3=}o@NRs6;<#7&1 z09xt1j!z?EeqvwxD}$X)6%m}mZNH^!e4=EPtixu`G_0Ii9?zLYt7IOuW~}|zxK;c= Duqnk& literal 0 HcmV?d00001 diff --git a/utils/__pycache__/torch_utils.cpython-38.pyc b/utils/__pycache__/torch_utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9e25cbbe562d7d21394614be52e8625e73ec5b47 GIT binary patch literal 9216 zcma)CZHyd8TJGxZ>FMd2U9Z>c_4*^ePR>p;#$MZrb4iS`GV$Z z%=Yd~_iS~~u6NZ75$h12gt?sfgOm%*N{0?35JDhv;sTYGb4#L@73yz=*Nf8(g8eV<;2e#n)MYLol~FLL`XtL@{{ z+~H-Erg(*q+|_FP`6%Co=K+3_@8;uZImq|$3Dg`4Miw4xFus>hqV_P~$EQ&H8Gec% z;0ICT@we1$IAusz3Y$KSArBRe9Br1U;`Ws`Xf=Z8Akm23opYPj%1W268j1sHdUoOs)}a^9{Z}Q!NM!1IRMYwu@6t zf`H3Hz00FWj_?4)YJ-|d8rEfLE{N*$ZC@;c0Zi(scAjUnWI2Yesh4k$+LO=F0xQTg z-N8<<8{J}JH}%!(#2bSjk&u$w(P5lGruDQ<;un2g&opr+(~~oqma+w%>*uu8STK8q zO)WW-THMIANo|Au1j}@Bj+>D^Xk9llL;N!@tedK)xT&odGRWPcDc;E#rK~Wi@#3PM zyn}IUdT;3YSiHoYyIN{zg$;HYqpw@Mj8RG-XuR^Fv0hAVUPPOIS-X9H-Og-`s*B&_ zmDI_K8*D;Dk8%%EoE0-Wvod?XwlLCvj%MaTtzk~8^IH0)XhQWIj6V9ILFcIIcBzul z+m#vbGcm=-BAv5MlFi+-(x`X3a^lj>%NM;juHLx%^0h18#Wq+6k(4yOF=_ zhb@1u6�kxx*%8%>w$YTaYeAy4ImACCe*Ys?IE$f;`+5?bADQ%KaqrM=-70l zICb0IRvLC%fg3m6&tJdk*5huJqHzaX(t#F`ZXhL7$eK9D+hgl27wim6|C+vJ-pD1U zYUG?}g4ukzw;7~fzBmt}-3de*21!+y`t&Ku!bdsHIoASH*5h{P{PQ#$NumX9v-Wrr zw1WCzsOfV`?342-4EnECZYHE26q~vqGk0y`XhH-#i@iR5G&$}b<*&Y6T{GQF*Kdk( z47_I163?Ehj>$qRUI~QM<3tvNXgL&dq;?@D(N9*w#M`2|9K8tjgl)vp6(M3FEB);$ z+Q|a+Wiyav&sz>ef=-?%jaCpzs}(n)WThEKjaZtI-wtFYjZ+9`J7~vZRa*H#)sm>v ztd2S^kB+NW22*(blg-z}>QaxIlHHLk5&O~qQBscVpx80=NS;Kd=@m$E--gdAkF}Z=HR!1zF zf<-qN>6HaTDLEm^tjr~dW9+rG=&#%H(zO0v+7 zdC(eKxmt7?TrwEzDrQNPB4pw$%BpXjRyg=kd61M)y{Em$8qfn=zia1OlJpaFf-$>e zt28R9c4bkBEFnqbxm)^Iv~_k*yT=wSVW!%98ejwz?OppGl-UN`V2PQr1$Ga8ptQ)C znD?}d{fv^_s{W}tXmLd6ghT){jIOf`-!58Y+Lv(DePXJuJ~HTc`}fk5*+NnfE(EHJ?wi zr{&iJ*n$^nSk{&-%_+7m0`mRIsa(!T&A{?8IbACNs$5_A{^ zWzQyW>Fc06KGpAO8<37{hgDSd1N2-aleB)az=mKA{%;>RA?Rlt_OwWKY7j;7Rc-NQ4P{R%qV;Qqm z{UZ#aeuDUT0IwZVgh1htZ&{$-w4{-A+f}pA558n`l64+s2|K}eNtmwIF;)(jbuvm7 z7$`w?A3dvn-@UMd6X-Cv#7W=nzZgBHH%GfWECUD*b(-ZZ{oY9;~1p~JP4&qn3B9f(8A7<_uB63qO05# zWh17ir(K0Lpht`#;?l}F9cTFz)+sH&O^9o4uX}X**{0iU#dChk9Z;cKu1zR7GaW_K zeLn-E2f3^kRE;cjsEQuYKyrj|#TJ8!H!z9#0w!N8|8Jjx3rJBi2)q@UyVIS9^Pk%WbU6@icOyYQHZI~5@u1B=C zeQ+$;!aguF*8aS*P@=pNQzZn2LdTC z1|kaJnGqDS$j{}0EOva(fnufA6mb`7*PILeWJVB&=d#5VI23zPQ9Yy>L7lHGhmlvG z_ah9M$V%Kzx86jXK8`4hDeEAUqXZ^H5hPwy_}r5^_^d&#RcbBNTQPXbTn>YkKAPvD z-;@Rqm*FBViBw#per2#8F}t_y3w(sQ0Y+-K(FL0lOKK?cffH%K(~y-x_jce%vWVH0 zEiXzFfrw?P&qcLTqniYrUTfBhr=I4DhX!k||2BuWN0?HM(O0V7^Pyhnq7_Wthr2dF z?%+@V*s}DGP0RY&EKM+%xyCW}G<#AP@1p(sT z0yiSi0ob;)7WPBDpX%TPIHRN{QgA#$V=_y(XP|TnNY;8WvwCn|iCfk!^oMH-hq4Hn zDE8pm!c{FUlr|XNoJ|dRc~ff?^AVR`S*wvl+d`mf-3E8r>8OIeyLw~1%qvjT`lbn^ zIKoHo8@#k%BfllJnhJX;NEv-qfPQrk0MUdjOjq zIH?$&{&;cS`7(h1TUt8N+snt(Nvu!n?W0|7FgVeBG6VTUW+LC4IUi^r+Qj!tMNWPi z$QmGSLTMOeQiR|UkXdDdteW^i%9QO>8QDH@9@+ay+sN`UrF%Vcw1!OV0EO-_hXW9y z7I6rp0O?K;(p}I5%p9Haqf61{$PVH8zgPA(S{WH9!n_s$z|rRt_vo2Z$X-M`{S2~a z=-DGOnfe1iIJqdW~TFTNBk7^qK)JZ{Et(K%%Ch^ZFgdk z=1zO*%AI=9AtzoOr5;aGc7(F4$g1Vs9)1;NX@~7*uBDC0>(r1nbBCMGN6b-1bIL-a z6+>g|jXnn4;!x>q^W#sWXYvFxO`l|wP)rs0vlTp#d}13qO2<)KVH3>J#n)-n60~F# zq^D1v7R2N`%qw9zvJ6T+5n;%@l5vHglQC6J!~$Y!3k7cM*Py7Z1w^=DTwx^912_m~ z%kZ4-dNP(nEl^^rM_C!J!tCq7F-}JWDj17Ep)x~EoeUg`vH~m1z{xS<9ct5X96XQw zFoZ$$Mm@JNFJY-UhACYi;KZTRQg%a`utwk$SV@2zl`ya<$B#v@EMdz>RDs;piebzY z4i6iM*qMhy!TiOJ~0}MP&nJ=By}3MXXQpgtX_vRu~1on1RFf zW2lxU5nqnLxoQFy!~hy1dFZ0$Q_7FFya!1!rE-K0P+$*nXM_qSw0qh?0GE5(gf<-F z(=mac&ckj8VM{4xa>JXz`tVx9I6oGpoXH3JHVsd_sv>!hCAd`xJP6g0Zr*Z#lwMM7 z)%=V9yz%{C_~36ZkPD6=XEy*!eF>vTgMcoZA;<$J=)^7ZlnfxJs)e?8Q~Vlw)tm%K z*i#5YvU$l?1l?8SOZ!zS(fVX*Yc;=#*WCAn%V8oqHU+1HfWsjsehY1N+QHyQRNIog zFW`j?Mh*#XpT%*S!~~hX!Ne0dPJOYMIGNcqD9THv%z%^r!jiM(q&RXq@C|qwOO7&= z`mAIg_PU8-#SCRfkVyxwr%2&+)5R2 zua)0)S-b+EyGz>2G+cT0qVMXO=BhXM%AF3-WkkL|YL+Qzdl7~PS5R(4#BFyzO*_fi zlP8oxsFlSJU@(^E7y17)6jr^e?Y{w0~A-) zeGpwKrSk#$ki7yu(?IDa_vvkO;{rgsbKOP*5>w4DkY1k;aT`S!QYlD+ z!4PO43K}Vz4&94wuWLp49V{rx7xK~JZ~o&e)1Un71*HXc2pkC;382!|} z7=!5P5wO|@>mdl8X&%H2c|k=$`{7iNJWP}DG_dz5{6jPMGa+i+7sPL%e!mfxP^ z{`tQ{=l?)LC{jrV@1<-ZQuupzW)GvNEp--bpinZp4gjUlzL^4wg>D zA5bp^7gLld+Ksz_Mh zq8AN!i9RJnMD*I!zPbj-1ZRgEKK48?um^n7Bj;Zy=N|fx4)Jq1#6r))9fAeWjVEGI z2nkTBXKzJmpjETNLTSMf*We`SFXs1t(6dt-8Pok5IY$!|r7A34dsBN(`;zvW_Lim& z*qBx4q^xGOk~_7tkzTjJ&5h zFc{mB{yjO?k2nEgWZtbKm_sr$_)&SwNKN1+4~r6{%Zv?#V~8;Mg-EXY)jol>wl*Ms z8($5DyA`)6Mk(jZ@yL$+3p(}VjcXHIJTlY)VM6h38i~XTWi`xSQ@Kd+kPtuMq2u-A zq9|89Zwc4d{gxsiWL)<=@n(wW`mEpHxrrAsz}>VWotz;1F%kRDDVo=^_v3u4G+2M!lawCLKg8=)Wmi z;hcc60|+BayX}g|ek^a!uP?%mB-8nAyghgqBlM9y2>2<0T-!5vo!+)J(%CMH-=O(_ zp1v&Ms`Z+>V-zI4xnJ|^RH6`ux}8_odg}HpzmfKQ2u=?^hnz^ZXP$MMes4qTnFvQu^r*ah2k W3U+0*V3(Y-z0a9+_Bf+X>AwMhXu^5` literal 0 HcmV?d00001 diff --git a/utils/__pycache__/torch_utils.cpython-39.pyc b/utils/__pycache__/torch_utils.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0ffa35bb542703e48c858a2cb7a289e16ba3599b GIT binary patch literal 9193 zcma)CdyE`MTCeKvnd#}7U9Z>c_4;wS&SR5Iu-A_ym&6!5i60jS$7jxQLI#Jv-l?A1 z-ks^5t?t?Nu6iM29paM+mxF(ha)DXt(47$oA&@xn2XMT^LkFij2~H)%AKV|o0`U+? zND)%_ebqC&vko9;Rn^s1)%E!5d;Gqy)*l;l7=F!f|A+t8S;oFkFQY#TFV7<-M>%7H z3EpP){K@NFzZ-Q!Kh3&{r_r`Lg?fQgAG2NTlwe{qV=&G-m3n2+Qtgb@ z#|Gu`&X{i9Q=b^r?5*z&Sbs9ux5evIK_S>LtgmoU5XIZpCa+J6W1=MN+q`~2%n3(S zP?`}{F?O5P4~lWI2hT&|xY#Qu(Q;Vq6H}-;5{xZ=s>#KEF^$>>!~rpb+E0rU;*dCu z8dn?3g=YHSe%-A3G9yUXiNJ8t-GPx{eP12o?LQQ1Ih zASH9iGEjeo%gK~2^1iVJ8VchzZDs-LE=vjihj=lgy2 zQmr717(i8U)IA((5(GjO8a)w3YD@&6T?fQX(y*b*^Fh>D==gF8{9sW>O^7_BC94T^ zO}%`3)Si5r7Fb2b3GZj2(Ad#NvL5UUJ-L{Un5M-SjYGS+8XV4o#p!^oI?Dl?L& zGM4g1Ll|dRYA#y+!WK)OdKs*gv1zu+e~M>@JR_{g9=2|nnJNEC6gDhfQ`}-3g$!c2 zWXZQOPAMx)Gf`SHlD9C9P47)3A4`;lbDO25tgy*1VDycms9+TPeI}|Om>Z?EB&f}} zz}`H&VP`f*HRKA)IhI+6S<{-<=e7JR(Uk5v z9DV!)lg?4s?a?K(Kc1QIaXBL&K|p?M42IAGZDZc5oCjv*NcysD@xAYy|FX;>uvT7eF|~rI4`+c8{b$3&Hvm z^M2Y`IF^L#=-6_jICVR{b{clufg3m7&tALXHsW5CqHz~n(t#F`t|KKg$XYnYHzziD zF6SAR{$*p?x}FPA&CEH^0<-yQe=A5KIkY$rlHLtu8U{(tP{!N|#l!nK%{kKsQ#RsG z_v|y6=--jp`h!W(4jRK@=FVshkeo$f*mu2pBOy(pS=9ZAyX#Z42@&fw_WAg1a@3s_ zue?}Wx7_pBZpcX*v`EW4eWJEU725G?Ae9j(suV;kp^PKF138U;sv0KVHoaAKuoX=w zRe-8&1*+nCD}hYHIPyGYwu49&+i?r}Ras%wjFlDnoj_I7IE5f~f=(>gR52f@R+crI z#nDIO(b3c^U;uA$iuoFNLm5#^@mq=~@*w)(PqwjL6nX+Zl1Gs-qYCMC__Q&}E&MsW z%8&3#K8`;dm;G&v5x9p1atQ7}`YAt0Q(y z!H%1pbjYHqbE|2rU{#V-Xsv*`*SY~H7%98!OW2&Ct*RIN)W0@-J8)sg?u<4-Ar?BZ z2-+hn*Nd)zD~4lT#w>|edR(4HS@&(w3Wq-`50e(4ci20;37HqhZ9CV9q>CW`=G9$W zrBO+Z??Xz6EFnqWnVZHJ*#^JE?(h{Rt(3jPfFPh$Z`*gExHkDFPpphD@;m4Q1x1F# zy2CR5^IA%4#wX^W#SxtoVtLFkzQHqmyTZt-FXN~O#ME26Ye=HHJcjHpS9~caj;2fb zOe=_jTU~kfbJXQ9vh`yjFo#UE1c!|@84+NCcP;F5+uuPon0~aT3dw@s4QlyZim%*- z$`JFA&|8gmPXzJ|jj!i)&TJ2}GwH+sk4fot!!+n5fCq-~<#`&CsGM);#qAmP+&9=O zXvt69kT0RUi=ISFIx`wAxruCez;9lK#spnqbc_sUe9`PN=`P#4Y@z(DK&w{}2J+&d zzJz`@QW?o5S%_|&+= zHX#k!E{nyL6T*HZsUPWmY4+B7_BuuYj~6?Qwx1+Bj4ltM;R{GPi@VJ*65fF1BmBK> zpupATAnHK{`Y8-X-qWLIa5#}s{wQUdfUi?Yb9Qxf*dIsrkm-I5O@LN z@O;bL%;y3%@yzV=E%pqQM`mQejY|f>cf1?R6R6>biKWb|HRD4Jp?{J%c!;4LQh9*d zkY-h&-L#dN^g1o zkK-&K!#Y*b?+}h!-|x=OJ=t+P3 zx<`!>g4kv-`5GpXFC(k(X`l3ZU|iX2q&*3UUzETXKqEv}m}JdbE$SXpMyr#=)At>b zZf&DJ8Ary98Jx|8PUBp+?K+qex)7&oX0u?92^2sm0q7S2+7~UP0#XsFL_+&+sSmBS z#X)Yi=qy&YNCTqOuWqsa7)m(2EgV-m-robe<@6^|+KbX8(!K)#)bzJu;Q*cf6x#M@ zR%UJ)_?Bv>w{#CW{KW%;L6iP!I=(ogXPCzE(%LX94&8`oZ3o~k@Wlgqrh~{2VP(S^ z_u+jqpK*q56fypR?HLRG!vajQWngBMFm9205+i9=c@~fYe`>t$^f4=skKI}g+@CDPgyHrusXo#BJ^9z6yXXSzh#-qE7Y$7 z)+1*3R(y$%5FEfroesKSQ*v1kMV@V2OsV3+v-R}8Nuk+C=rXB!W8*p~`XM9vF8XsB3;zw3_ zio4u3pWu)4M-BNl`d_;D7L3U*I7WUqCC$I5HUDbULdpZ`F-K6~M!28=XuE4+Iplk( z0X~3>Nopbm#}f!9gY@PlD4haQaif$K`*2B#TQ-X5Z)XL#i6zKHsbAV+sf}`ZljF_V zV#q67tXaxOTz+Z2PHt)&(Wng@+-0ZZ8sP5fPl$@BLQxxA7R=$87=PCk<%3M@In2Za zoYQi;7ihwUvin|UMBv`cXu;7Id$aO1+i+fI(GiQKlj**u@g83*a-cgXy3&U81_1pD zmQMBei^+5vt7H8Gw5LrD$9P|6BA?1E% z8kc0#ke||8*CS_Y#JCQT<}O1x1P*!uM=%1=>=dEdMJ8e3 z=zQ;AhM~rGiOv7LsxNP^%AI{Wac7@8f$TY?lc$iKqPLTStX6_=>8~Naq^*KYG8qDA z$v0_?=aE6IdO*5itK+X%M?k?`?xQ|3`*ds940s#NKz0Qa;W1DkP%<)ZhV4Krk$ZFq zmw-BoKx@4eo-J&`e4GSi3s~^2Z@qAjD78KY&v@=R`2Lr!eD2!y^*yhUv*TXxbr23( zJMFHQUA+M~(CcIFj`hc@;|{lz)MliI!SZb_Qt1t3Y9cpUJ71A*(9lu7L!IdGl|>jR zr-h~Uhnb|JZlw!xSN;t3qK)M4_)k)aOrNS;?sQ|BUX(JHs(kTQBj~2^zU3_Sc$Bh7 zD7%cTR>^JPmrz!A*lFdO*^Iny9a$@Pr|Ep;JY_VeDm2?Mw6xJ2AiizBRL%}3{w#VX z4w|3zv;%{eWhL8iVStr8Y>JHr>kLfbrwg#vLD<%+m>lgE@I5@0 zFwT!ft7P(czKJwY$qB9s0uKT;qLH`V9j%izOSS&$Kd*oPm*4-}bL4CzRM`uFN}tCt z$|PXRFA>B619amyc}6CXQmu%#PD}mEp*+*lKTn4q=|-a+M+hxRE&CUDFEY zSlB`TW4V6OcQ-$b>nPWE;lt2?{$M~vrvnt%^d%476Qv6Q`jD*xJ<~wzCHL_iQ{w_K zy7Rqe6Vg)4Z-ri62yw+kw@fKqHo9Mm>HaBRb(_7YLG}YOmae(&a4FDB8E#;8b#)G( zB!JTBJr~Q?aladSB5owKkA*loo;HtbJ$ihEC^=0AB+&!D;3l0o#?pn0&5&xPusdyK z5F(-FMFHK0!TRbOFS^;9`T=;LCPIKt{0Ky*7JQN<%G#$C!+F94C^t)J4k}u@L z2fp?XFU@`YFXyxt*d=fzY$SkMD_=z+#nlf*#N-4-&xnA~HhCZ6<&1d{FXROs+Z+T+ zJ@P0m!q33kYtogPHPc4!FZ{EAht9cDd{|2cw=- z-j@*ar|s)du{Ny;(Lre*Y|#6k-Lxx(TlXndHS@EjOew+zH}Ro2+j`obo?3I z27ACJeRB2f75<8EDn4uvlJnP;~f z#YNrhB;tQp(8c^1{~n!_31oCrI?k)cA)rDVe*>&d%#!!ihX~_4HntVi2M1iF!rfD9XQ~%2~>O zlQIfp=zDj4iKp+%@;j)PsDZ-jI*ueMV60<;c^vR 1. / thr).float().sum(1).mean() # anchors above threshold + bpr = (best > 1. / thr).float().mean() # best possible recall + return bpr, aat + + bpr, aat = metric(m.anchor_grid.clone().cpu().view(-1, 2)) + print('anchors/target = %.2f, Best Possible Recall (BPR) = %.4f' % (aat, bpr), end='') + if bpr < 0.98: # threshold to recompute + print('. Attempting to improve anchors, please wait...') + na = m.anchor_grid.numel() // 2 # number of anchors + new_anchors = kmean_anchors(dataset, n=na, img_size=imgsz, thr=thr, gen=1000, verbose=False) + new_bpr = metric(new_anchors.reshape(-1, 2))[0] + if new_bpr > bpr: # replace anchors + new_anchors = torch.tensor(new_anchors, device=m.anchors.device).type_as(m.anchors) + m.anchor_grid[:] = new_anchors.clone().view_as(m.anchor_grid) # for inference + m.anchors[:] = new_anchors.clone().view_as(m.anchors) / m.stride.to(m.anchors.device).view(-1, 1, 1) # loss + check_anchor_order(m) + print('New anchors saved to model. Update model *.yaml to use these anchors in the future.') + else: + print('Original anchors better than new anchors. Proceeding with original anchors.') + print('') # newline + + +def kmean_anchors(path='./data/coco128.yaml', n=9, img_size=640, thr=4.0, gen=1000, verbose=True): + """ Creates kmeans-evolved anchors from training dataset + + Arguments: + path: path to dataset *.yaml, or a loaded dataset + n: number of anchors + img_size: image size used for training + thr: anchor-label wh ratio threshold hyperparameter hyp['anchor_t'] used for training, default=4.0 + gen: generations to evolve anchors using genetic algorithm + verbose: print all results + + Return: + k: kmeans evolved anchors + + Usage: + from utils.general import *; _ = kmean_anchors() + """ + thr = 1. / thr + + def metric(k, wh): # compute metrics + r = wh[:, None] / k[None] + x = torch.min(r, 1. / r).min(2)[0] # ratio metric + # x = wh_iou(wh, torch.tensor(k)) # iou metric + return x, x.max(1)[0] # x, best_x + + def anchor_fitness(k): # mutation fitness + _, best = metric(torch.tensor(k, dtype=torch.float32), wh) + return (best * (best > thr).float()).mean() # fitness + + def print_results(k): + k = k[np.argsort(k.prod(1))] # sort small to large + x, best = metric(k, wh0) + bpr, aat = (best > thr).float().mean(), (x > thr).float().mean() * n # best possible recall, anch > thr + print('thr=%.2f: %.4f best possible recall, %.2f anchors past thr' % (thr, bpr, aat)) + print('n=%g, img_size=%s, metric_all=%.3f/%.3f-mean/best, past_thr=%.3f-mean: ' % + (n, img_size, x.mean(), best.mean(), x[x > thr].mean()), end='') + for i, x in enumerate(k): + print('%i,%i' % (round(x[0]), round(x[1])), end=', ' if i < len(k) - 1 else '\n') # use in *.cfg + return k + + if isinstance(path, str): # *.yaml file + with open(path) as f: + data_dict = yaml.load(f, Loader=yaml.FullLoader) # model dict + from utils.datasets import LoadImagesAndLabels + dataset = LoadImagesAndLabels(data_dict['train'], augment=True, rect=True) + else: + dataset = path # dataset + + # Get label wh + shapes = img_size * dataset.shapes / dataset.shapes.max(1, keepdims=True) + wh0 = np.concatenate([l[:, 3:5] * s for s, l in zip(shapes, dataset.labels)]) # wh + + # Filter + i = (wh0 < 3.0).any(1).sum() + if i: + print('WARNING: Extremely small objects found. ' + '%g of %g labels are < 3 pixels in width or height.' % (i, len(wh0))) + wh = wh0[(wh0 >= 2.0).any(1)] # filter > 2 pixels + + # Kmeans calculation + print('Running kmeans for %g anchors on %g points...' % (n, len(wh))) + s = wh.std(0) # sigmas for whitening + k, dist = kmeans(wh / s, n, iter=30) # points, mean distance + k *= s + wh = torch.tensor(wh, dtype=torch.float32) # filtered + wh0 = torch.tensor(wh0, dtype=torch.float32) # unfiltered + k = print_results(k) + + # Plot + # k, d = [None] * 20, [None] * 20 + # for i in tqdm(range(1, 21)): + # k[i-1], d[i-1] = kmeans(wh / s, i) # points, mean distance + # fig, ax = plt.subplots(1, 2, figsize=(14, 7)) + # ax = ax.ravel() + # ax[0].plot(np.arange(1, 21), np.array(d) ** 2, marker='.') + # fig, ax = plt.subplots(1, 2, figsize=(14, 7)) # plot wh + # ax[0].hist(wh[wh[:, 0]<100, 0],400) + # ax[1].hist(wh[wh[:, 1]<100, 1],400) + # fig.tight_layout() + # fig.savefig('wh.png', dpi=200) + + # Evolve + npr = np.random + f, sh, mp, s = anchor_fitness(k), k.shape, 0.9, 0.1 # fitness, generations, mutation prob, sigma + pbar = tqdm(range(gen), desc='Evolving anchors with Genetic Algorithm') # progress bar + for _ in pbar: + v = np.ones(sh) + while (v == 1).all(): # mutate until a change occurs (prevent duplicates) + v = ((npr.random(sh) < mp) * npr.random() * npr.randn(*sh) * s + 1).clip(0.3, 3.0) + kg = (k.copy() * v).clip(min=2.0) + fg = anchor_fitness(kg) + if fg > f: + f, k = fg, kg.copy() + pbar.desc = 'Evolving anchors with Genetic Algorithm: fitness = %.4f' % f + if verbose: + print_results(k) + + return print_results(k) diff --git a/utils/datasets.py b/utils/datasets.py new file mode 100644 index 0000000..61576a5 --- /dev/null +++ b/utils/datasets.py @@ -0,0 +1,1297 @@ +# Dataset utils and dataloaders + +import glob +import math +import os +import random +import shutil +import time +from itertools import repeat +from multiprocessing.pool import ThreadPool +from pathlib import Path +from threading import Thread + +import cv2 +import numpy as np +import torch +from PIL import Image, ExifTags +from torch.utils.data import Dataset +from tqdm import tqdm + +import pickle +from copy import deepcopy +from pycocotools import mask as maskUtils +from torchvision.utils import save_image + +from utils.general import xyxy2xywh, xywh2xyxy +from utils.torch_utils import torch_distributed_zero_first + +# Parameters +help_url = 'https://github.com/ultralytics/yolov5/wiki/Train-Custom-Data' +img_formats = ['bmp', 'jpg', 'jpeg', 'png', 'tif', 'tiff', 'dng'] # acceptable image suffixes +vid_formats = ['mov', 'avi', 'mp4', 'mpg', 'mpeg', 'm4v', 'wmv', 'mkv'] # acceptable video suffixes + +# Get orientation exif tag +for orientation in ExifTags.TAGS.keys(): + if ExifTags.TAGS[orientation] == 'Orientation': + break + + +def get_hash(files): + # Returns a single hash value of a list of files + return sum(os.path.getsize(f) for f in files if os.path.isfile(f)) + + +def exif_size(img): + # Returns exif-corrected PIL size + s = img.size # (width, height) + try: + rotation = dict(img._getexif().items())[orientation] + if rotation == 6: # rotation 270 + s = (s[1], s[0]) + elif rotation == 8: # rotation 90 + s = (s[1], s[0]) + except: + pass + + return s + + +def create_dataloader(path, imgsz, batch_size, stride, opt, hyp=None, augment=False, cache=False, pad=0.0, rect=False, + rank=-1, world_size=1, workers=8): + # Make sure only the first process in DDP process the dataset first, and the following others can use the cache + with torch_distributed_zero_first(rank): + dataset = LoadImagesAndLabels(path, imgsz, batch_size, + augment=augment, # augment images + hyp=hyp, # augmentation hyperparameters + rect=rect, # rectangular training + cache_images=cache, + single_cls=opt.single_cls, + stride=int(stride), + pad=pad, + rank=rank) + + batch_size = min(batch_size, len(dataset)) + nw = min([os.cpu_count() // world_size, batch_size if batch_size > 1 else 0, workers]) # number of workers + sampler = torch.utils.data.distributed.DistributedSampler(dataset) if rank != -1 else None + dataloader = InfiniteDataLoader(dataset, + batch_size=batch_size, + num_workers=nw, + sampler=sampler, + pin_memory=True, + collate_fn=LoadImagesAndLabels.collate_fn) # torch.utils.data.DataLoader() + return dataloader, dataset + + +def create_dataloader9(path, imgsz, batch_size, stride, opt, hyp=None, augment=False, cache=False, pad=0.0, rect=False, + rank=-1, world_size=1, workers=8): + # Make sure only the first process in DDP process the dataset first, and the following others can use the cache + with torch_distributed_zero_first(rank): + dataset = LoadImagesAndLabels9(path, imgsz, batch_size, + augment=augment, # augment images + hyp=hyp, # augmentation hyperparameters + rect=rect, # rectangular training + cache_images=cache, + single_cls=opt.single_cls, + stride=int(stride), + pad=pad, + rank=rank) + + batch_size = min(batch_size, len(dataset)) + nw = min([os.cpu_count() // world_size, batch_size if batch_size > 1 else 0, workers]) # number of workers + sampler = torch.utils.data.distributed.DistributedSampler(dataset) if rank != -1 else None + dataloader = InfiniteDataLoader(dataset, + batch_size=batch_size, + num_workers=nw, + sampler=sampler, + pin_memory=True, + collate_fn=LoadImagesAndLabels9.collate_fn) # torch.utils.data.DataLoader() + return dataloader, dataset + + +class InfiniteDataLoader(torch.utils.data.dataloader.DataLoader): + """ Dataloader that reuses workers + + Uses same syntax as vanilla DataLoader + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + object.__setattr__(self, 'batch_sampler', _RepeatSampler(self.batch_sampler)) + self.iterator = super().__iter__() + + def __len__(self): + return len(self.batch_sampler.sampler) + + def __iter__(self): + for i in range(len(self)): + yield next(self.iterator) + + +class _RepeatSampler(object): + """ Sampler that repeats forever + + Args: + sampler (Sampler) + """ + + def __init__(self, sampler): + self.sampler = sampler + + def __iter__(self): + while True: + yield from iter(self.sampler) + + +class LoadImages: # for inference + def __init__(self, path, img_size=640, auto_size=32): + p = str(Path(path)) # os-agnostic + p = os.path.abspath(p) # absolute path + if '*' in p: + files = sorted(glob.glob(p, recursive=True)) # glob + elif os.path.isdir(p): + files = sorted(glob.glob(os.path.join(p, '*.*'))) # dir + elif os.path.isfile(p): + files = [p] # files + else: + raise Exception('ERROR: %s does not exist' % p) + + images = [x for x in files if x.split('.')[-1].lower() in img_formats] + videos = [x for x in files if x.split('.')[-1].lower() in vid_formats] + ni, nv = len(images), len(videos) + + self.img_size = img_size + self.auto_size = auto_size + self.files = images + videos + self.nf = ni + nv # number of files + self.video_flag = [False] * ni + [True] * nv + self.mode = 'images' + if any(videos): + self.new_video(videos[0]) # new video + else: + self.cap = None + assert self.nf > 0, 'No images or videos found in %s. Supported formats are:\nimages: %s\nvideos: %s' % \ + (p, img_formats, vid_formats) + + def __iter__(self): + self.count = 0 + return self + + def __next__(self): + if self.count == self.nf: + raise StopIteration + path = self.files[self.count] + + if self.video_flag[self.count]: + # Read video + self.mode = 'video' + ret_val, img0 = self.cap.read() + if not ret_val: + self.count += 1 + self.cap.release() + if self.count == self.nf: # last video + raise StopIteration + else: + path = self.files[self.count] + self.new_video(path) + ret_val, img0 = self.cap.read() + + self.frame += 1 + print('video %g/%g (%g/%g) %s: ' % (self.count + 1, self.nf, self.frame, self.nframes, path), end='') + + else: + # Read image + self.count += 1 + img0 = cv2.imread(path) # BGR + assert img0 is not None, 'Image Not Found ' + path + print('image %g/%g %s: ' % (self.count, self.nf, path), end='') + + # Padded resize + img = letterbox(img0, new_shape=self.img_size, auto_size=self.auto_size)[0] + + # Convert + img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416 + img = np.ascontiguousarray(img) + + return path, img, img0, self.cap + + def new_video(self, path): + self.frame = 0 + self.cap = cv2.VideoCapture(path) + self.nframes = int(self.cap.get(cv2.CAP_PROP_FRAME_COUNT)) + + def __len__(self): + return self.nf # number of files + + +class LoadWebcam: # for inference + def __init__(self, pipe='0', img_size=640): + self.img_size = img_size + + if pipe.isnumeric(): + pipe = eval(pipe) # local camera + # pipe = 'rtsp://192.168.1.64/1' # IP camera + # pipe = 'rtsp://username:password@192.168.1.64/1' # IP camera with login + # pipe = 'http://wmccpinetop.axiscam.net/mjpg/video.mjpg' # IP golf camera + + self.pipe = pipe + self.cap = cv2.VideoCapture(pipe) # video capture object + self.cap.set(cv2.CAP_PROP_BUFFERSIZE, 3) # set buffer size + + def __iter__(self): + self.count = -1 + return self + + def __next__(self): + self.count += 1 + if cv2.waitKey(1) == ord('q'): # q to quit + self.cap.release() + cv2.destroyAllWindows() + raise StopIteration + + # Read frame + if self.pipe == 0: # local camera + ret_val, img0 = self.cap.read() + img0 = cv2.flip(img0, 1) # flip left-right + else: # IP camera + n = 0 + while True: + n += 1 + self.cap.grab() + if n % 30 == 0: # skip frames + ret_val, img0 = self.cap.retrieve() + if ret_val: + break + + # Print + assert ret_val, 'Camera Error %s' % self.pipe + img_path = 'webcam.jpg' + print('webcam %g: ' % self.count, end='') + + # Padded resize + img = letterbox(img0, new_shape=self.img_size)[0] + + # Convert + img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416 + img = np.ascontiguousarray(img) + + return img_path, img, img0, None + + def __len__(self): + return 0 + + +class LoadStreams: # multiple IP or RTSP cameras + def __init__(self, sources='streams.txt', img_size=640): + self.mode = 'images' + self.img_size = img_size + + if os.path.isfile(sources): + with open(sources, 'r') as f: + sources = [x.strip() for x in f.read().splitlines() if len(x.strip())] + else: + sources = [sources] + + n = len(sources) + self.imgs = [None] * n + self.sources = sources + for i, s in enumerate(sources): + # Start the thread to read frames from the video stream + print('%g/%g: %s... ' % (i + 1, n, s), end='') + cap = cv2.VideoCapture(eval(s) if s.isnumeric() else s) + assert cap.isOpened(), 'Failed to open %s' % s + w = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH)) + h = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT)) + fps = cap.get(cv2.CAP_PROP_FPS) % 100 + _, self.imgs[i] = cap.read() # guarantee first frame + thread = Thread(target=self.update, args=([i, cap]), daemon=True) + print(' success (%gx%g at %.2f FPS).' % (w, h, fps)) + thread.start() + print('') # newline + + # check for common shapes + s = np.stack([letterbox(x, new_shape=self.img_size)[0].shape for x in self.imgs], 0) # inference shapes + self.rect = np.unique(s, axis=0).shape[0] == 1 # rect inference if all shapes equal + if not self.rect: + print('WARNING: Different stream shapes detected. For optimal performance supply similarly-shaped streams.') + + def update(self, index, cap): + # Read next stream frame in a daemon thread + n = 0 + while cap.isOpened(): + n += 1 + # _, self.imgs[index] = cap.read() + cap.grab() + if n == 4: # read every 4th frame + _, self.imgs[index] = cap.retrieve() + n = 0 + time.sleep(0.01) # wait time + + def __iter__(self): + self.count = -1 + return self + + def __next__(self): + self.count += 1 + img0 = self.imgs.copy() + if cv2.waitKey(1) == ord('q'): # q to quit + cv2.destroyAllWindows() + raise StopIteration + + # Letterbox + img = [letterbox(x, new_shape=self.img_size, auto=self.rect)[0] for x in img0] + + # Stack + img = np.stack(img, 0) + + # Convert + img = img[:, :, :, ::-1].transpose(0, 3, 1, 2) # BGR to RGB, to bsx3x416x416 + img = np.ascontiguousarray(img) + + return self.sources, img, img0, None + + def __len__(self): + return 0 # 1E12 frames = 32 streams at 30 FPS for 30 years + + +class LoadImagesAndLabels(Dataset): # for training/testing + def __init__(self, path, img_size=640, batch_size=16, augment=False, hyp=None, rect=False, image_weights=False, + cache_images=False, single_cls=False, stride=32, pad=0.0, rank=-1): + self.img_size = img_size + self.augment = augment + self.hyp = hyp + self.image_weights = image_weights + self.rect = False if image_weights else rect + self.mosaic = self.augment and not self.rect # load 4 images at a time into a mosaic (only during training) + self.mosaic_border = [-img_size // 2, -img_size // 2] + self.stride = stride + + def img2label_paths(img_paths): + # Define label paths as a function of image paths + sa, sb = os.sep + 'images' + os.sep, os.sep + 'labels' + os.sep # /images/, /labels/ substrings + return [x.replace(sa, sb, 1).replace(x.split('.')[-1], 'txt') for x in img_paths] + + try: + f = [] # image files + for p in path if isinstance(path, list) else [path]: + p = Path(p) # os-agnostic + if p.is_dir(): # dir + f += glob.glob(str(p / '**' / '*.*'), recursive=True) + elif p.is_file(): # file + with open(p, 'r') as t: + t = t.read().splitlines() + parent = str(p.parent) + os.sep + f += [x.replace('./', parent) if x.startswith('./') else x for x in t] # local to global path + else: + raise Exception('%s does not exist' % p) + self.img_files = sorted([x.replace('/', os.sep) for x in f if x.split('.')[-1].lower() in img_formats]) + assert self.img_files, 'No images found' + except Exception as e: + raise Exception('Error loading data from %s: %s\nSee %s' % (path, e, help_url)) + + # Check cache + self.label_files = img2label_paths(self.img_files) # labels + cache_path = str(Path(self.label_files[0]).parent) + '.cache3' # cached labels + if os.path.isfile(cache_path): + cache = torch.load(cache_path) # load + if cache['hash'] != get_hash(self.label_files + self.img_files): # dataset changed + cache = self.cache_labels(cache_path) # re-cache + else: + cache = self.cache_labels(cache_path) # cache + + # Read cache + cache.pop('hash') # remove hash + labels, shapes = zip(*cache.values()) + self.labels = list(labels) + self.shapes = np.array(shapes, dtype=np.float64) + self.img_files = list(cache.keys()) # update + self.label_files = img2label_paths(cache.keys()) # update + + n = len(shapes) # number of images + bi = np.floor(np.arange(n) / batch_size).astype(np.int) # batch index + nb = bi[-1] + 1 # number of batches + self.batch = bi # batch index of image + self.n = n + + # Rectangular Training + if self.rect: + # Sort by aspect ratio + s = self.shapes # wh + ar = s[:, 1] / s[:, 0] # aspect ratio + irect = ar.argsort() + self.img_files = [self.img_files[i] for i in irect] + self.label_files = [self.label_files[i] for i in irect] + self.labels = [self.labels[i] for i in irect] + self.shapes = s[irect] # wh + ar = ar[irect] + + # Set training image shapes + shapes = [[1, 1]] * nb + for i in range(nb): + ari = ar[bi == i] + mini, maxi = ari.min(), ari.max() + if maxi < 1: + shapes[i] = [maxi, 1] + elif mini > 1: + shapes[i] = [1, 1 / mini] + + self.batch_shapes = np.ceil(np.array(shapes) * img_size / stride + pad).astype(np.int) * stride + + # Check labels + create_datasubset, extract_bounding_boxes, labels_loaded = False, False, False + nm, nf, ne, ns, nd = 0, 0, 0, 0, 0 # number missing, found, empty, datasubset, duplicate + pbar = enumerate(self.label_files) + if rank in [-1, 0]: + pbar = tqdm(pbar) + for i, file in pbar: + l = self.labels[i] # label + if l is not None and l.shape[0]: + assert l.shape[1] == 5, '> 5 label columns: %s' % file + assert (l >= 0).all(), 'negative labels: %s' % file + assert (l[:, 1:] <= 1).all(), 'non-normalized or out of bounds coordinate labels: %s' % file + if np.unique(l, axis=0).shape[0] < l.shape[0]: # duplicate rows + nd += 1 # print('WARNING: duplicate rows in %s' % self.label_files[i]) # duplicate rows + if single_cls: + l[:, 0] = 0 # force dataset into single-class mode + self.labels[i] = l + nf += 1 # file found + + # Create subdataset (a smaller dataset) + if create_datasubset and ns < 1E4: + if ns == 0: + create_folder(path='./datasubset') + os.makedirs('./datasubset/images') + exclude_classes = 43 + if exclude_classes not in l[:, 0]: + ns += 1 + # shutil.copy(src=self.img_files[i], dst='./datasubset/images/') # copy image + with open('./datasubset/images.txt', 'a') as f: + f.write(self.img_files[i] + '\n') + + # Extract object detection boxes for a second stage classifier + if extract_bounding_boxes: + p = Path(self.img_files[i]) + img = cv2.imread(str(p)) + h, w = img.shape[:2] + for j, x in enumerate(l): + f = '%s%sclassifier%s%g_%g_%s' % (p.parent.parent, os.sep, os.sep, x[0], j, p.name) + if not os.path.exists(Path(f).parent): + os.makedirs(Path(f).parent) # make new output folder + + b = x[1:] * [w, h, w, h] # box + b[2:] = b[2:].max() # rectangle to square + b[2:] = b[2:] * 1.3 + 30 # pad + b = xywh2xyxy(b.reshape(-1, 4)).ravel().astype(np.int) + + b[[0, 2]] = np.clip(b[[0, 2]], 0, w) # clip boxes outside of image + b[[1, 3]] = np.clip(b[[1, 3]], 0, h) + assert cv2.imwrite(f, img[b[1]:b[3], b[0]:b[2]]), 'Failure extracting classifier boxes' + else: + ne += 1 # print('empty labels for image %s' % self.img_files[i]) # file empty + # os.system("rm '%s' '%s'" % (self.img_files[i], self.label_files[i])) # remove + + if rank in [-1, 0]: + pbar.desc = 'Scanning labels %s (%g found, %g missing, %g empty, %g duplicate, for %g images)' % ( + cache_path, nf, nm, ne, nd, n) + if nf == 0: + s = 'WARNING: No labels found in %s. See %s' % (os.path.dirname(file) + os.sep, help_url) + print(s) + assert not augment, '%s. Can not train without labels.' % s + + # Cache images into memory for faster training (WARNING: large datasets may exceed system RAM) + self.imgs = [None] * n + if cache_images: + gb = 0 # Gigabytes of cached images + self.img_hw0, self.img_hw = [None] * n, [None] * n + results = ThreadPool(8).imap(lambda x: load_image(*x), zip(repeat(self), range(n))) # 8 threads + pbar = tqdm(enumerate(results), total=n) + for i, x in pbar: + self.imgs[i], self.img_hw0[i], self.img_hw[i] = x # img, hw_original, hw_resized = load_image(self, i) + gb += self.imgs[i].nbytes + pbar.desc = 'Caching images (%.1fGB)' % (gb / 1E9) + + def cache_labels(self, path='labels.cache3'): + # Cache dataset labels, check images and read shapes + x = {} # dict + pbar = tqdm(zip(self.img_files, self.label_files), desc='Scanning images', total=len(self.img_files)) + for (img, label) in pbar: + try: + l = [] + im = Image.open(img) + im.verify() # PIL verify + shape = exif_size(im) # image size + assert (shape[0] > 9) & (shape[1] > 9), 'image size <10 pixels' + if os.path.isfile(label): + with open(label, 'r') as f: + l = np.array([x.split() for x in f.read().splitlines()], dtype=np.float32) # labels + if len(l) == 0: + l = np.zeros((0, 5), dtype=np.float32) + x[img] = [l, shape] + except Exception as e: + print('WARNING: Ignoring corrupted image and/or label %s: %s' % (img, e)) + + x['hash'] = get_hash(self.label_files + self.img_files) + torch.save(x, path) # save for next time + return x + + def __len__(self): + return len(self.img_files) + + # def __iter__(self): + # self.count = -1 + # print('ran dataset iter') + # #self.shuffled_vector = np.random.permutation(self.nF) if self.augment else np.arange(self.nF) + # return self + + def __getitem__(self, index): + if self.image_weights: + index = self.indices[index] + + hyp = self.hyp + mosaic = self.mosaic and random.random() < hyp['mosaic'] + if mosaic: + # Load mosaic + img, labels = load_mosaic(self, index) + #img, labels = load_mosaic9(self, index) + shapes = None + + # MixUp https://arxiv.org/pdf/1710.09412.pdf + if random.random() < hyp['mixup']: + img2, labels2 = load_mosaic(self, random.randint(0, len(self.labels) - 1)) + #img2, labels2 = load_mosaic9(self, random.randint(0, len(self.labels) - 1)) + r = np.random.beta(8.0, 8.0) # mixup ratio, alpha=beta=8.0 + img = (img * r + img2 * (1 - r)).astype(np.uint8) + labels = np.concatenate((labels, labels2), 0) + + else: + # Load image + img, (h0, w0), (h, w) = load_image(self, index) + + # Letterbox + shape = self.batch_shapes[self.batch[index]] if self.rect else self.img_size # final letterboxed shape + img, ratio, pad = letterbox(img, shape, auto=False, scaleup=self.augment) + shapes = (h0, w0), ((h / h0, w / w0), pad) # for COCO mAP rescaling + + # Load labels + labels = [] + x = self.labels[index] + if x.size > 0: + # Normalized xywh to pixel xyxy format + labels = x.copy() + labels[:, 1] = ratio[0] * w * (x[:, 1] - x[:, 3] / 2) + pad[0] # pad width + labels[:, 2] = ratio[1] * h * (x[:, 2] - x[:, 4] / 2) + pad[1] # pad height + labels[:, 3] = ratio[0] * w * (x[:, 1] + x[:, 3] / 2) + pad[0] + labels[:, 4] = ratio[1] * h * (x[:, 2] + x[:, 4] / 2) + pad[1] + + if self.augment: + # Augment imagespace + if not mosaic: + img, labels = random_perspective(img, labels, + degrees=hyp['degrees'], + translate=hyp['translate'], + scale=hyp['scale'], + shear=hyp['shear'], + perspective=hyp['perspective']) + + # Augment colorspace + augment_hsv(img, hgain=hyp['hsv_h'], sgain=hyp['hsv_s'], vgain=hyp['hsv_v']) + + # Apply cutouts + # if random.random() < 0.9: + # labels = cutout(img, labels) + + nL = len(labels) # number of labels + if nL: + labels[:, 1:5] = xyxy2xywh(labels[:, 1:5]) # convert xyxy to xywh + labels[:, [2, 4]] /= img.shape[0] # normalized height 0-1 + labels[:, [1, 3]] /= img.shape[1] # normalized width 0-1 + + if self.augment: + # flip up-down + if random.random() < hyp['flipud']: + img = np.flipud(img) + if nL: + labels[:, 2] = 1 - labels[:, 2] + + # flip left-right + if random.random() < hyp['fliplr']: + img = np.fliplr(img) + if nL: + labels[:, 1] = 1 - labels[:, 1] + + labels_out = torch.zeros((nL, 6)) + if nL: + labels_out[:, 1:] = torch.from_numpy(labels) + + # Convert + img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416 + img = np.ascontiguousarray(img) + + return torch.from_numpy(img), labels_out, self.img_files[index], shapes + + @staticmethod + def collate_fn(batch): + img, label, path, shapes = zip(*batch) # transposed + for i, l in enumerate(label): + l[:, 0] = i # add target image index for build_targets() + return torch.stack(img, 0), torch.cat(label, 0), path, shapes + + +class LoadImagesAndLabels9(Dataset): # for training/testing + def __init__(self, path, img_size=640, batch_size=16, augment=False, hyp=None, rect=False, image_weights=False, + cache_images=False, single_cls=False, stride=32, pad=0.0, rank=-1): + self.img_size = img_size + self.augment = augment + self.hyp = hyp + self.image_weights = image_weights + self.rect = False if image_weights else rect + self.mosaic = self.augment and not self.rect # load 4 images at a time into a mosaic (only during training) + self.mosaic_border = [-img_size // 2, -img_size // 2] + self.stride = stride + + def img2label_paths(img_paths): + # Define label paths as a function of image paths + sa, sb = os.sep + 'images' + os.sep, os.sep + 'labels' + os.sep # /images/, /labels/ substrings + return [x.replace(sa, sb, 1).replace(x.split('.')[-1], 'txt') for x in img_paths] + + try: + f = [] # image files + for p in path if isinstance(path, list) else [path]: + p = Path(p) # os-agnostic + if p.is_dir(): # dir + f += glob.glob(str(p / '**' / '*.*'), recursive=True) + elif p.is_file(): # file + with open(p, 'r') as t: + t = t.read().splitlines() + parent = str(p.parent) + os.sep + f += [x.replace('./', parent) if x.startswith('./') else x for x in t] # local to global path + else: + raise Exception('%s does not exist' % p) + self.img_files = sorted([x.replace('/', os.sep) for x in f if x.split('.')[-1].lower() in img_formats]) + assert self.img_files, 'No images found' + except Exception as e: + raise Exception('Error loading data from %s: %s\nSee %s' % (path, e, help_url)) + + # Check cache + self.label_files = img2label_paths(self.img_files) # labels + cache_path = str(Path(self.label_files[0]).parent) + '.cache3' # cached labels + if os.path.isfile(cache_path): + cache = torch.load(cache_path) # load + if cache['hash'] != get_hash(self.label_files + self.img_files): # dataset changed + cache = self.cache_labels(cache_path) # re-cache + else: + cache = self.cache_labels(cache_path) # cache + + # Read cache + cache.pop('hash') # remove hash + labels, shapes = zip(*cache.values()) + self.labels = list(labels) + self.shapes = np.array(shapes, dtype=np.float64) + self.img_files = list(cache.keys()) # update + self.label_files = img2label_paths(cache.keys()) # update + + n = len(shapes) # number of images + bi = np.floor(np.arange(n) / batch_size).astype(np.int) # batch index + nb = bi[-1] + 1 # number of batches + self.batch = bi # batch index of image + self.n = n + + # Rectangular Training + if self.rect: + # Sort by aspect ratio + s = self.shapes # wh + ar = s[:, 1] / s[:, 0] # aspect ratio + irect = ar.argsort() + self.img_files = [self.img_files[i] for i in irect] + self.label_files = [self.label_files[i] for i in irect] + self.labels = [self.labels[i] for i in irect] + self.shapes = s[irect] # wh + ar = ar[irect] + + # Set training image shapes + shapes = [[1, 1]] * nb + for i in range(nb): + ari = ar[bi == i] + mini, maxi = ari.min(), ari.max() + if maxi < 1: + shapes[i] = [maxi, 1] + elif mini > 1: + shapes[i] = [1, 1 / mini] + + self.batch_shapes = np.ceil(np.array(shapes) * img_size / stride + pad).astype(np.int) * stride + + # Check labels + create_datasubset, extract_bounding_boxes, labels_loaded = False, False, False + nm, nf, ne, ns, nd = 0, 0, 0, 0, 0 # number missing, found, empty, datasubset, duplicate + pbar = enumerate(self.label_files) + if rank in [-1, 0]: + pbar = tqdm(pbar) + for i, file in pbar: + l = self.labels[i] # label + if l is not None and l.shape[0]: + assert l.shape[1] == 5, '> 5 label columns: %s' % file + assert (l >= 0).all(), 'negative labels: %s' % file + assert (l[:, 1:] <= 1).all(), 'non-normalized or out of bounds coordinate labels: %s' % file + if np.unique(l, axis=0).shape[0] < l.shape[0]: # duplicate rows + nd += 1 # print('WARNING: duplicate rows in %s' % self.label_files[i]) # duplicate rows + if single_cls: + l[:, 0] = 0 # force dataset into single-class mode + self.labels[i] = l + nf += 1 # file found + + # Create subdataset (a smaller dataset) + if create_datasubset and ns < 1E4: + if ns == 0: + create_folder(path='./datasubset') + os.makedirs('./datasubset/images') + exclude_classes = 43 + if exclude_classes not in l[:, 0]: + ns += 1 + # shutil.copy(src=self.img_files[i], dst='./datasubset/images/') # copy image + with open('./datasubset/images.txt', 'a') as f: + f.write(self.img_files[i] + '\n') + + # Extract object detection boxes for a second stage classifier + if extract_bounding_boxes: + p = Path(self.img_files[i]) + img = cv2.imread(str(p)) + h, w = img.shape[:2] + for j, x in enumerate(l): + f = '%s%sclassifier%s%g_%g_%s' % (p.parent.parent, os.sep, os.sep, x[0], j, p.name) + if not os.path.exists(Path(f).parent): + os.makedirs(Path(f).parent) # make new output folder + + b = x[1:] * [w, h, w, h] # box + b[2:] = b[2:].max() # rectangle to square + b[2:] = b[2:] * 1.3 + 30 # pad + b = xywh2xyxy(b.reshape(-1, 4)).ravel().astype(np.int) + + b[[0, 2]] = np.clip(b[[0, 2]], 0, w) # clip boxes outside of image + b[[1, 3]] = np.clip(b[[1, 3]], 0, h) + assert cv2.imwrite(f, img[b[1]:b[3], b[0]:b[2]]), 'Failure extracting classifier boxes' + else: + ne += 1 # print('empty labels for image %s' % self.img_files[i]) # file empty + # os.system("rm '%s' '%s'" % (self.img_files[i], self.label_files[i])) # remove + + if rank in [-1, 0]: + pbar.desc = 'Scanning labels %s (%g found, %g missing, %g empty, %g duplicate, for %g images)' % ( + cache_path, nf, nm, ne, nd, n) + if nf == 0: + s = 'WARNING: No labels found in %s. See %s' % (os.path.dirname(file) + os.sep, help_url) + print(s) + assert not augment, '%s. Can not train without labels.' % s + + # Cache images into memory for faster training (WARNING: large datasets may exceed system RAM) + self.imgs = [None] * n + if cache_images: + gb = 0 # Gigabytes of cached images + self.img_hw0, self.img_hw = [None] * n, [None] * n + results = ThreadPool(8).imap(lambda x: load_image(*x), zip(repeat(self), range(n))) # 8 threads + pbar = tqdm(enumerate(results), total=n) + for i, x in pbar: + self.imgs[i], self.img_hw0[i], self.img_hw[i] = x # img, hw_original, hw_resized = load_image(self, i) + gb += self.imgs[i].nbytes + pbar.desc = 'Caching images (%.1fGB)' % (gb / 1E9) + + def cache_labels(self, path='labels.cache3'): + # Cache dataset labels, check images and read shapes + x = {} # dict + pbar = tqdm(zip(self.img_files, self.label_files), desc='Scanning images', total=len(self.img_files)) + for (img, label) in pbar: + try: + l = [] + im = Image.open(img) + im.verify() # PIL verify + shape = exif_size(im) # image size + assert (shape[0] > 9) & (shape[1] > 9), 'image size <10 pixels' + if os.path.isfile(label): + with open(label, 'r') as f: + l = np.array([x.split() for x in f.read().splitlines()], dtype=np.float32) # labels + if len(l) == 0: + l = np.zeros((0, 5), dtype=np.float32) + x[img] = [l, shape] + except Exception as e: + print('WARNING: Ignoring corrupted image and/or label %s: %s' % (img, e)) + + x['hash'] = get_hash(self.label_files + self.img_files) + torch.save(x, path) # save for next time + return x + + def __len__(self): + return len(self.img_files) + + # def __iter__(self): + # self.count = -1 + # print('ran dataset iter') + # #self.shuffled_vector = np.random.permutation(self.nF) if self.augment else np.arange(self.nF) + # return self + + def __getitem__(self, index): + if self.image_weights: + index = self.indices[index] + + hyp = self.hyp + mosaic = self.mosaic and random.random() < hyp['mosaic'] + if mosaic: + # Load mosaic + #img, labels = load_mosaic(self, index) + img, labels = load_mosaic9(self, index) + shapes = None + + # MixUp https://arxiv.org/pdf/1710.09412.pdf + if random.random() < hyp['mixup']: + #img2, labels2 = load_mosaic(self, random.randint(0, len(self.labels) - 1)) + img2, labels2 = load_mosaic9(self, random.randint(0, len(self.labels) - 1)) + r = np.random.beta(8.0, 8.0) # mixup ratio, alpha=beta=8.0 + img = (img * r + img2 * (1 - r)).astype(np.uint8) + labels = np.concatenate((labels, labels2), 0) + + else: + # Load image + img, (h0, w0), (h, w) = load_image(self, index) + + # Letterbox + shape = self.batch_shapes[self.batch[index]] if self.rect else self.img_size # final letterboxed shape + img, ratio, pad = letterbox(img, shape, auto=False, scaleup=self.augment) + shapes = (h0, w0), ((h / h0, w / w0), pad) # for COCO mAP rescaling + + # Load labels + labels = [] + x = self.labels[index] + if x.size > 0: + # Normalized xywh to pixel xyxy format + labels = x.copy() + labels[:, 1] = ratio[0] * w * (x[:, 1] - x[:, 3] / 2) + pad[0] # pad width + labels[:, 2] = ratio[1] * h * (x[:, 2] - x[:, 4] / 2) + pad[1] # pad height + labels[:, 3] = ratio[0] * w * (x[:, 1] + x[:, 3] / 2) + pad[0] + labels[:, 4] = ratio[1] * h * (x[:, 2] + x[:, 4] / 2) + pad[1] + + if self.augment: + # Augment imagespace + if not mosaic: + img, labels = random_perspective(img, labels, + degrees=hyp['degrees'], + translate=hyp['translate'], + scale=hyp['scale'], + shear=hyp['shear'], + perspective=hyp['perspective']) + + # Augment colorspace + augment_hsv(img, hgain=hyp['hsv_h'], sgain=hyp['hsv_s'], vgain=hyp['hsv_v']) + + # Apply cutouts + # if random.random() < 0.9: + # labels = cutout(img, labels) + + nL = len(labels) # number of labels + if nL: + labels[:, 1:5] = xyxy2xywh(labels[:, 1:5]) # convert xyxy to xywh + labels[:, [2, 4]] /= img.shape[0] # normalized height 0-1 + labels[:, [1, 3]] /= img.shape[1] # normalized width 0-1 + + if self.augment: + # flip up-down + if random.random() < hyp['flipud']: + img = np.flipud(img) + if nL: + labels[:, 2] = 1 - labels[:, 2] + + # flip left-right + if random.random() < hyp['fliplr']: + img = np.fliplr(img) + if nL: + labels[:, 1] = 1 - labels[:, 1] + + labels_out = torch.zeros((nL, 6)) + if nL: + labels_out[:, 1:] = torch.from_numpy(labels) + + # Convert + img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416 + img = np.ascontiguousarray(img) + + return torch.from_numpy(img), labels_out, self.img_files[index], shapes + + @staticmethod + def collate_fn(batch): + img, label, path, shapes = zip(*batch) # transposed + for i, l in enumerate(label): + l[:, 0] = i # add target image index for build_targets() + return torch.stack(img, 0), torch.cat(label, 0), path, shapes + + +# Ancillary functions -------------------------------------------------------------------------------------------------- +def load_image(self, index): + # loads 1 image from dataset, returns img, original hw, resized hw + img = self.imgs[index] + if img is None: # not cached + path = self.img_files[index] + img = cv2.imread(path) # BGR + assert img is not None, 'Image Not Found ' + path + h0, w0 = img.shape[:2] # orig hw + r = self.img_size / max(h0, w0) # resize image to img_size + if r != 1: # always resize down, only resize up if training with augmentation + interp = cv2.INTER_AREA if r < 1 and not self.augment else cv2.INTER_LINEAR + img = cv2.resize(img, (int(w0 * r), int(h0 * r)), interpolation=interp) + return img, (h0, w0), img.shape[:2] # img, hw_original, hw_resized + else: + return self.imgs[index], self.img_hw0[index], self.img_hw[index] # img, hw_original, hw_resized + + +def augment_hsv(img, hgain=0.5, sgain=0.5, vgain=0.5): + r = np.random.uniform(-1, 1, 3) * [hgain, sgain, vgain] + 1 # random gains + hue, sat, val = cv2.split(cv2.cvtColor(img, cv2.COLOR_BGR2HSV)) + dtype = img.dtype # uint8 + + x = np.arange(0, 256, dtype=np.int16) + lut_hue = ((x * r[0]) % 180).astype(dtype) + lut_sat = np.clip(x * r[1], 0, 255).astype(dtype) + lut_val = np.clip(x * r[2], 0, 255).astype(dtype) + + img_hsv = cv2.merge((cv2.LUT(hue, lut_hue), cv2.LUT(sat, lut_sat), cv2.LUT(val, lut_val))).astype(dtype) + cv2.cvtColor(img_hsv, cv2.COLOR_HSV2BGR, dst=img) # no return needed + + # Histogram equalization + # if random.random() < 0.2: + # for i in range(3): + # img[:, :, i] = cv2.equalizeHist(img[:, :, i]) + + +def load_mosaic(self, index): + # loads images in a mosaic + + labels4 = [] + s = self.img_size + yc, xc = [int(random.uniform(-x, 2 * s + x)) for x in self.mosaic_border] # mosaic center x, y + indices = [index] + [random.randint(0, len(self.labels) - 1) for _ in range(3)] # 3 additional image indices + for i, index in enumerate(indices): + # Load image + img, _, (h, w) = load_image(self, index) + + # place img in img4 + if i == 0: # top left + img4 = np.full((s * 2, s * 2, img.shape[2]), 114, dtype=np.uint8) # base image with 4 tiles + x1a, y1a, x2a, y2a = max(xc - w, 0), max(yc - h, 0), xc, yc # xmin, ymin, xmax, ymax (large image) + x1b, y1b, x2b, y2b = w - (x2a - x1a), h - (y2a - y1a), w, h # xmin, ymin, xmax, ymax (small image) + elif i == 1: # top right + x1a, y1a, x2a, y2a = xc, max(yc - h, 0), min(xc + w, s * 2), yc + x1b, y1b, x2b, y2b = 0, h - (y2a - y1a), min(w, x2a - x1a), h + elif i == 2: # bottom left + x1a, y1a, x2a, y2a = max(xc - w, 0), yc, xc, min(s * 2, yc + h) + x1b, y1b, x2b, y2b = w - (x2a - x1a), 0, w, min(y2a - y1a, h) + elif i == 3: # bottom right + x1a, y1a, x2a, y2a = xc, yc, min(xc + w, s * 2), min(s * 2, yc + h) + x1b, y1b, x2b, y2b = 0, 0, min(w, x2a - x1a), min(y2a - y1a, h) + + img4[y1a:y2a, x1a:x2a] = img[y1b:y2b, x1b:x2b] # img4[ymin:ymax, xmin:xmax] + padw = x1a - x1b + padh = y1a - y1b + + # Labels + x = self.labels[index] + labels = x.copy() + if x.size > 0: # Normalized xywh to pixel xyxy format + labels[:, 1] = w * (x[:, 1] - x[:, 3] / 2) + padw + labels[:, 2] = h * (x[:, 2] - x[:, 4] / 2) + padh + labels[:, 3] = w * (x[:, 1] + x[:, 3] / 2) + padw + labels[:, 4] = h * (x[:, 2] + x[:, 4] / 2) + padh + labels4.append(labels) + + # Concat/clip labels + if len(labels4): + labels4 = np.concatenate(labels4, 0) + np.clip(labels4[:, 1:], 0, 2 * s, out=labels4[:, 1:]) # use with random_perspective + # img4, labels4 = replicate(img4, labels4) # replicate + + # Augment + img4, labels4 = random_perspective(img4, labels4, + degrees=self.hyp['degrees'], + translate=self.hyp['translate'], + scale=self.hyp['scale'], + shear=self.hyp['shear'], + perspective=self.hyp['perspective'], + border=self.mosaic_border) # border to remove + + return img4, labels4 + + +def load_mosaic9(self, index): + # loads images in a 9-mosaic + + labels9 = [] + s = self.img_size + indices = [index] + [random.randint(0, len(self.labels) - 1) for _ in range(8)] # 8 additional image indices + for i, index in enumerate(indices): + # Load image + img, _, (h, w) = load_image(self, index) + + # place img in img9 + if i == 0: # center + img9 = np.full((s * 3, s * 3, img.shape[2]), 114, dtype=np.uint8) # base image with 4 tiles + h0, w0 = h, w + c = s, s, s + w, s + h # xmin, ymin, xmax, ymax (base) coordinates + elif i == 1: # top + c = s, s - h, s + w, s + elif i == 2: # top right + c = s + wp, s - h, s + wp + w, s + elif i == 3: # right + c = s + w0, s, s + w0 + w, s + h + elif i == 4: # bottom right + c = s + w0, s + hp, s + w0 + w, s + hp + h + elif i == 5: # bottom + c = s + w0 - w, s + h0, s + w0, s + h0 + h + elif i == 6: # bottom left + c = s + w0 - wp - w, s + h0, s + w0 - wp, s + h0 + h + elif i == 7: # left + c = s - w, s + h0 - h, s, s + h0 + elif i == 8: # top left + c = s - w, s + h0 - hp - h, s, s + h0 - hp + + padx, pady = c[:2] + x1, y1, x2, y2 = [max(x, 0) for x in c] # allocate coords + + # Labels + x = self.labels[index] + labels = x.copy() + if x.size > 0: # Normalized xywh to pixel xyxy format + labels[:, 1] = w * (x[:, 1] - x[:, 3] / 2) + padx + labels[:, 2] = h * (x[:, 2] - x[:, 4] / 2) + pady + labels[:, 3] = w * (x[:, 1] + x[:, 3] / 2) + padx + labels[:, 4] = h * (x[:, 2] + x[:, 4] / 2) + pady + labels9.append(labels) + + # Image + img9[y1:y2, x1:x2] = img[y1 - pady:, x1 - padx:] # img9[ymin:ymax, xmin:xmax] + hp, wp = h, w # height, width previous + + # Offset + yc, xc = [int(random.uniform(0, s)) for x in self.mosaic_border] # mosaic center x, y + img9 = img9[yc:yc + 2 * s, xc:xc + 2 * s] + + # Concat/clip labels + if len(labels9): + labels9 = np.concatenate(labels9, 0) + labels9[:, [1, 3]] -= xc + labels9[:, [2, 4]] -= yc + + np.clip(labels9[:, 1:], 0, 2 * s, out=labels9[:, 1:]) # use with random_perspective + # img9, labels9 = replicate(img9, labels9) # replicate + + # Augment + img9, labels9 = random_perspective(img9, labels9, + degrees=self.hyp['degrees'], + translate=self.hyp['translate'], + scale=self.hyp['scale'], + shear=self.hyp['shear'], + perspective=self.hyp['perspective'], + border=self.mosaic_border) # border to remove + + return img9, labels9 + + +def replicate(img, labels): + # Replicate labels + h, w = img.shape[:2] + boxes = labels[:, 1:].astype(int) + x1, y1, x2, y2 = boxes.T + s = ((x2 - x1) + (y2 - y1)) / 2 # side length (pixels) + for i in s.argsort()[:round(s.size * 0.5)]: # smallest indices + x1b, y1b, x2b, y2b = boxes[i] + bh, bw = y2b - y1b, x2b - x1b + yc, xc = int(random.uniform(0, h - bh)), int(random.uniform(0, w - bw)) # offset x, y + x1a, y1a, x2a, y2a = [xc, yc, xc + bw, yc + bh] + img[y1a:y2a, x1a:x2a] = img[y1b:y2b, x1b:x2b] # img4[ymin:ymax, xmin:xmax] + labels = np.append(labels, [[labels[i, 0], x1a, y1a, x2a, y2a]], axis=0) + + return img, labels + + +def letterbox(img, new_shape=(640, 640), color=(114, 114, 114), auto=True, scaleFill=False, scaleup=True, auto_size=32): + # Resize image to a 32-pixel-multiple rectangle https://github.com/ultralytics/yolov3/issues/232 + shape = img.shape[:2] # current shape [height, width] + if isinstance(new_shape, int): + new_shape = (new_shape, new_shape) + + # Scale ratio (new / old) + r = min(new_shape[0] / shape[0], new_shape[1] / shape[1]) + if not scaleup: # only scale down, do not scale up (for better test mAP) + r = min(r, 1.0) + + # Compute padding + ratio = r, r # width, height ratios + new_unpad = int(round(shape[1] * r)), int(round(shape[0] * r)) + dw, dh = new_shape[1] - new_unpad[0], new_shape[0] - new_unpad[1] # wh padding + if auto: # minimum rectangle + dw, dh = np.mod(dw, auto_size), np.mod(dh, auto_size) # wh padding + elif scaleFill: # stretch + dw, dh = 0.0, 0.0 + new_unpad = (new_shape[1], new_shape[0]) + ratio = new_shape[1] / shape[1], new_shape[0] / shape[0] # width, height ratios + + dw /= 2 # divide padding into 2 sides + dh /= 2 + + if shape[::-1] != new_unpad: # resize + img = cv2.resize(img, new_unpad, interpolation=cv2.INTER_LINEAR) + top, bottom = int(round(dh - 0.1)), int(round(dh + 0.1)) + left, right = int(round(dw - 0.1)), int(round(dw + 0.1)) + img = cv2.copyMakeBorder(img, top, bottom, left, right, cv2.BORDER_CONSTANT, value=color) # add border + return img, ratio, (dw, dh) + + +def random_perspective(img, targets=(), degrees=10, translate=.1, scale=.1, shear=10, perspective=0.0, border=(0, 0)): + # torchvision.transforms.RandomAffine(degrees=(-10, 10), translate=(.1, .1), scale=(.9, 1.1), shear=(-10, 10)) + # targets = [cls, xyxy] + + height = img.shape[0] + border[0] * 2 # shape(h,w,c) + width = img.shape[1] + border[1] * 2 + + # Center + C = np.eye(3) + C[0, 2] = -img.shape[1] / 2 # x translation (pixels) + C[1, 2] = -img.shape[0] / 2 # y translation (pixels) + + # Perspective + P = np.eye(3) + P[2, 0] = random.uniform(-perspective, perspective) # x perspective (about y) + P[2, 1] = random.uniform(-perspective, perspective) # y perspective (about x) + + # Rotation and Scale + R = np.eye(3) + a = random.uniform(-degrees, degrees) + # a += random.choice([-180, -90, 0, 90]) # add 90deg rotations to small rotations + s = random.uniform(1 - scale, 1 + scale) + # s = 2 ** random.uniform(-scale, scale) + R[:2] = cv2.getRotationMatrix2D(angle=a, center=(0, 0), scale=s) + + # Shear + S = np.eye(3) + S[0, 1] = math.tan(random.uniform(-shear, shear) * math.pi / 180) # x shear (deg) + S[1, 0] = math.tan(random.uniform(-shear, shear) * math.pi / 180) # y shear (deg) + + # Translation + T = np.eye(3) + T[0, 2] = random.uniform(0.5 - translate, 0.5 + translate) * width # x translation (pixels) + T[1, 2] = random.uniform(0.5 - translate, 0.5 + translate) * height # y translation (pixels) + + # Combined rotation matrix + M = T @ S @ R @ P @ C # order of operations (right to left) is IMPORTANT + if (border[0] != 0) or (border[1] != 0) or (M != np.eye(3)).any(): # image changed + if perspective: + img = cv2.warpPerspective(img, M, dsize=(width, height), borderValue=(114, 114, 114)) + else: # affine + img = cv2.warpAffine(img, M[:2], dsize=(width, height), borderValue=(114, 114, 114)) + + # Visualize + # import matplotlib.pyplot as plt + # ax = plt.subplots(1, 2, figsize=(12, 6))[1].ravel() + # ax[0].imshow(img[:, :, ::-1]) # base + # ax[1].imshow(img2[:, :, ::-1]) # warped + + # Transform label coordinates + n = len(targets) + if n: + # warp points + xy = np.ones((n * 4, 3)) + xy[:, :2] = targets[:, [1, 2, 3, 4, 1, 4, 3, 2]].reshape(n * 4, 2) # x1y1, x2y2, x1y2, x2y1 + xy = xy @ M.T # transform + if perspective: + xy = (xy[:, :2] / xy[:, 2:3]).reshape(n, 8) # rescale + else: # affine + xy = xy[:, :2].reshape(n, 8) + + # create new boxes + x = xy[:, [0, 2, 4, 6]] + y = xy[:, [1, 3, 5, 7]] + xy = np.concatenate((x.min(1), y.min(1), x.max(1), y.max(1))).reshape(4, n).T + + # # apply angle-based reduction of bounding boxes + # radians = a * math.pi / 180 + # reduction = max(abs(math.sin(radians)), abs(math.cos(radians))) ** 0.5 + # x = (xy[:, 2] + xy[:, 0]) / 2 + # y = (xy[:, 3] + xy[:, 1]) / 2 + # w = (xy[:, 2] - xy[:, 0]) * reduction + # h = (xy[:, 3] - xy[:, 1]) * reduction + # xy = np.concatenate((x - w / 2, y - h / 2, x + w / 2, y + h / 2)).reshape(4, n).T + + # clip boxes + xy[:, [0, 2]] = xy[:, [0, 2]].clip(0, width) + xy[:, [1, 3]] = xy[:, [1, 3]].clip(0, height) + + # filter candidates + i = box_candidates(box1=targets[:, 1:5].T * s, box2=xy.T) + targets = targets[i] + targets[:, 1:5] = xy[i] + + return img, targets + + +def box_candidates(box1, box2, wh_thr=2, ar_thr=20, area_thr=0.1): # box1(4,n), box2(4,n) + # Compute candidate boxes: box1 before augment, box2 after augment, wh_thr (pixels), aspect_ratio_thr, area_ratio + w1, h1 = box1[2] - box1[0], box1[3] - box1[1] + w2, h2 = box2[2] - box2[0], box2[3] - box2[1] + ar = np.maximum(w2 / (h2 + 1e-16), h2 / (w2 + 1e-16)) # aspect ratio + return (w2 > wh_thr) & (h2 > wh_thr) & (w2 * h2 / (w1 * h1 + 1e-16) > area_thr) & (ar < ar_thr) # candidates + + +def cutout(image, labels): + # Applies image cutout augmentation https://arxiv.org/abs/1708.04552 + h, w = image.shape[:2] + + def bbox_ioa(box1, box2): + # Returns the intersection over box2 area given box1, box2. box1 is 4, box2 is nx4. boxes are x1y1x2y2 + box2 = box2.transpose() + + # Get the coordinates of bounding boxes + b1_x1, b1_y1, b1_x2, b1_y2 = box1[0], box1[1], box1[2], box1[3] + b2_x1, b2_y1, b2_x2, b2_y2 = box2[0], box2[1], box2[2], box2[3] + + # Intersection area + inter_area = (np.minimum(b1_x2, b2_x2) - np.maximum(b1_x1, b2_x1)).clip(0) * \ + (np.minimum(b1_y2, b2_y2) - np.maximum(b1_y1, b2_y1)).clip(0) + + # box2 area + box2_area = (b2_x2 - b2_x1) * (b2_y2 - b2_y1) + 1e-16 + + # Intersection over box2 area + return inter_area / box2_area + + # create random masks + scales = [0.5] * 1 + [0.25] * 2 + [0.125] * 4 + [0.0625] * 8 + [0.03125] * 16 # image size fraction + for s in scales: + mask_h = random.randint(1, int(h * s)) + mask_w = random.randint(1, int(w * s)) + + # box + xmin = max(0, random.randint(0, w) - mask_w // 2) + ymin = max(0, random.randint(0, h) - mask_h // 2) + xmax = min(w, xmin + mask_w) + ymax = min(h, ymin + mask_h) + + # apply random color mask + image[ymin:ymax, xmin:xmax] = [random.randint(64, 191) for _ in range(3)] + + # return unobscured labels + if len(labels) and s > 0.03: + box = np.array([xmin, ymin, xmax, ymax], dtype=np.float32) + ioa = bbox_ioa(box, labels[:, 1:5]) # intersection over area + labels = labels[ioa < 0.60] # remove >60% obscured labels + + return labels + + +def create_folder(path='./new'): + # Create folder + if os.path.exists(path): + shutil.rmtree(path) # delete output folder + os.makedirs(path) # make new output folder + + +def flatten_recursive(path='../coco128'): + # Flatten a recursive directory by bringing all files to top level + new_path = Path(path + '_flat') + create_folder(new_path) + for file in tqdm(glob.glob(str(Path(path)) + '/**/*.*', recursive=True)): + shutil.copyfile(file, new_path / Path(file).name) + + diff --git a/utils/general.py b/utils/general.py new file mode 100644 index 0000000..13e4e5c --- /dev/null +++ b/utils/general.py @@ -0,0 +1,449 @@ +# General utils + +import glob +import logging +import math +import os +import platform +import random +import re +import subprocess +import time +from pathlib import Path + +import cv2 +import matplotlib +import numpy as np +import torch +import yaml + +from utils.google_utils import gsutil_getsize +from utils.metrics import fitness, fitness_p, fitness_r, fitness_ap50, fitness_ap, fitness_f +from utils.torch_utils import init_torch_seeds + +# Set printoptions +torch.set_printoptions(linewidth=320, precision=5, profile='long') +np.set_printoptions(linewidth=320, formatter={'float_kind': '{:11.5g}'.format}) # format short g, %precision=5 +matplotlib.rc('font', **{'size': 11}) + +# Prevent OpenCV from multithreading (to use PyTorch DataLoader) +cv2.setNumThreads(0) + + +def set_logging(rank=-1): + logging.basicConfig( + format="%(message)s", + level=logging.INFO if rank in [-1, 0] else logging.WARN) + + +def init_seeds(seed=0): + random.seed(seed) + np.random.seed(seed) + init_torch_seeds(seed) + + +def get_latest_run(search_dir='.'): + # Return path to most recent 'last.pt' in /runs (i.e. to --resume from) + last_list = glob.glob(f'{search_dir}/**/last*.pt', recursive=True) + return max(last_list, key=os.path.getctime) if last_list else '' + + +def check_git_status(): + # Suggest 'git pull' if repo is out of date + if platform.system() in ['Linux', 'Darwin'] and not os.path.isfile('/.dockerenv'): + s = subprocess.check_output('if [ -d .git ]; then git fetch && git status -uno; fi', shell=True).decode('utf-8') + if 'Your branch is behind' in s: + print(s[s.find('Your branch is behind'):s.find('\n\n')] + '\n') + + +def check_img_size(img_size, s=32): + # Verify img_size is a multiple of stride s + new_size = make_divisible(img_size, int(s)) # ceil gs-multiple + if new_size != img_size: + print('WARNING: --img-size %g must be multiple of max stride %g, updating to %g' % (img_size, s, new_size)) + return new_size + + +def check_file(file): + # Search for file if not found + if os.path.isfile(file) or file == '': + return file + else: + files = glob.glob('./**/' + file, recursive=True) # find file + assert len(files), 'File Not Found: %s' % file # assert file was found + assert len(files) == 1, "Multiple files match '%s', specify exact path: %s" % (file, files) # assert unique + return files[0] # return file + + +def check_dataset(dict): + # Download dataset if not found locally + val, s = dict.get('val'), dict.get('download') + if val and len(val): + val = [Path(x).resolve() for x in (val if isinstance(val, list) else [val])] # val path + if not all(x.exists() for x in val): + print('\nWARNING: Dataset not found, nonexistent paths: %s' % [str(x) for x in val if not x.exists()]) + if s and len(s): # download script + print('Downloading %s ...' % s) + if s.startswith('http') and s.endswith('.zip'): # URL + f = Path(s).name # filename + torch.hub.download_url_to_file(s, f) + r = os.system('unzip -q %s -d ../ && rm %s' % (f, f)) # unzip + else: # bash script + r = os.system(s) + print('Dataset autodownload %s\n' % ('success' if r == 0 else 'failure')) # analyze return value + else: + raise Exception('Dataset not found.') + + +def make_divisible(x, divisor): + # Returns x evenly divisible by divisor + return math.ceil(x / divisor) * divisor + + +def labels_to_class_weights(labels, nc=80): + # Get class weights (inverse frequency) from training labels + if labels[0] is None: # no labels loaded + return torch.Tensor() + + labels = np.concatenate(labels, 0) # labels.shape = (866643, 5) for COCO + classes = labels[:, 0].astype(np.int) # labels = [class xywh] + weights = np.bincount(classes, minlength=nc) # occurrences per class + + # Prepend gridpoint count (for uCE training) + # gpi = ((320 / 32 * np.array([1, 2, 4])) ** 2 * 3).sum() # gridpoints per image + # weights = np.hstack([gpi * len(labels) - weights.sum() * 9, weights * 9]) ** 0.5 # prepend gridpoints to start + + weights[weights == 0] = 1 # replace empty bins with 1 + weights = 1 / weights # number of targets per class + weights /= weights.sum() # normalize + return torch.from_numpy(weights) + + +def labels_to_image_weights(labels, nc=80, class_weights=np.ones(80)): + # Produces image weights based on class mAPs + n = len(labels) + class_counts = np.array([np.bincount(labels[i][:, 0].astype(np.int), minlength=nc) for i in range(n)]) + image_weights = (class_weights.reshape(1, nc) * class_counts).sum(1) + # index = random.choices(range(n), weights=image_weights, k=1) # weight image sample + return image_weights + + +def coco80_to_coco91_class(): # converts 80-index (val2014) to 91-index (paper) + # https://tech.amikelive.com/node-718/what-object-categories-labels-are-in-coco-dataset/ + # a = np.loadtxt('data/coco.names', dtype='str', delimiter='\n') + # b = np.loadtxt('data/coco_paper.names', dtype='str', delimiter='\n') + # x1 = [list(a[i] == b).index(True) + 1 for i in range(80)] # darknet to coco + # x2 = [list(b[i] == a).index(True) if any(b[i] == a) else None for i in range(91)] # coco to darknet + x = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 27, 28, 31, 32, 33, 34, + 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, + 64, 65, 67, 70, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 84, 85, 86, 87, 88, 89, 90] + return x + + +def xyxy2xywh(x): + # Convert nx4 boxes from [x1, y1, x2, y2] to [x, y, w, h] where xy1=top-left, xy2=bottom-right + y = x.clone() if isinstance(x, torch.Tensor) else np.copy(x) + y[:, 0] = (x[:, 0] + x[:, 2]) / 2 # x center + y[:, 1] = (x[:, 1] + x[:, 3]) / 2 # y center + y[:, 2] = x[:, 2] - x[:, 0] # width + y[:, 3] = x[:, 3] - x[:, 1] # height + return y + + +def xywh2xyxy(x): + # Convert nx4 boxes from [x, y, w, h] to [x1, y1, x2, y2] where xy1=top-left, xy2=bottom-right + y = x.clone() if isinstance(x, torch.Tensor) else np.copy(x) + y[:, 0] = x[:, 0] - x[:, 2] / 2 # top left x + y[:, 1] = x[:, 1] - x[:, 3] / 2 # top left y + y[:, 2] = x[:, 0] + x[:, 2] / 2 # bottom right x + y[:, 3] = x[:, 1] + x[:, 3] / 2 # bottom right y + return y + + +def scale_coords(img1_shape, coords, img0_shape, ratio_pad=None): + # Rescale coords (xyxy) from img1_shape to img0_shape + if ratio_pad is None: # calculate from img0_shape + gain = min(img1_shape[0] / img0_shape[0], img1_shape[1] / img0_shape[1]) # gain = old / new + pad = (img1_shape[1] - img0_shape[1] * gain) / 2, (img1_shape[0] - img0_shape[0] * gain) / 2 # wh padding + else: + gain = ratio_pad[0][0] + pad = ratio_pad[1] + + coords[:, [0, 2]] -= pad[0] # x padding + coords[:, [1, 3]] -= pad[1] # y padding + coords[:, :4] /= gain + clip_coords(coords, img0_shape) + return coords + + +def clip_coords(boxes, img_shape): + # Clip bounding xyxy bounding boxes to image shape (height, width) + boxes[:, 0].clamp_(0, img_shape[1]) # x1 + boxes[:, 1].clamp_(0, img_shape[0]) # y1 + boxes[:, 2].clamp_(0, img_shape[1]) # x2 + boxes[:, 3].clamp_(0, img_shape[0]) # y2 + + +def bbox_iou(box1, box2, x1y1x2y2=True, GIoU=False, DIoU=False, CIoU=False, EIoU=False, ECIoU=False, eps=1e-9): + # Returns the IoU of box1 to box2. box1 is 4, box2 is nx4 + box2 = box2.T + + # Get the coordinates of bounding boxes + if x1y1x2y2: # x1, y1, x2, y2 = box1 + b1_x1, b1_y1, b1_x2, b1_y2 = box1[0], box1[1], box1[2], box1[3] + b2_x1, b2_y1, b2_x2, b2_y2 = box2[0], box2[1], box2[2], box2[3] + else: # transform from xywh to xyxy + b1_x1, b1_x2 = box1[0] - box1[2] / 2, box1[0] + box1[2] / 2 + b1_y1, b1_y2 = box1[1] - box1[3] / 2, box1[1] + box1[3] / 2 + b2_x1, b2_x2 = box2[0] - box2[2] / 2, box2[0] + box2[2] / 2 + b2_y1, b2_y2 = box2[1] - box2[3] / 2, box2[1] + box2[3] / 2 + + # Intersection area + inter = (torch.min(b1_x2, b2_x2) - torch.max(b1_x1, b2_x1)).clamp(0) * \ + (torch.min(b1_y2, b2_y2) - torch.max(b1_y1, b2_y1)).clamp(0) + + # Union Area + w1, h1 = b1_x2 - b1_x1, b1_y2 - b1_y1 + eps + w2, h2 = b2_x2 - b2_x1, b2_y2 - b2_y1 + eps + union = w1 * h1 + w2 * h2 - inter + eps + + iou = inter / union + if GIoU or DIoU or CIoU or EIoU or ECIoU: + cw = torch.max(b1_x2, b2_x2) - torch.min(b1_x1, b2_x1) # convex (smallest enclosing box) width + ch = torch.max(b1_y2, b2_y2) - torch.min(b1_y1, b2_y1) # convex height + if CIoU or DIoU or EIoU or ECIoU: # Distance or Complete IoU https://arxiv.org/abs/1911.08287v1 + c2 = cw ** 2 + ch ** 2 + eps # convex diagonal squared + rho2 = ((b2_x1 + b2_x2 - b1_x1 - b1_x2) ** 2 + + (b2_y1 + b2_y2 - b1_y1 - b1_y2) ** 2) / 4 # center distance squared + if DIoU: + return iou - rho2 / c2 # DIoU + elif CIoU: # https://github.com/Zzh-tju/DIoU-SSD-pytorch/blob/master/utils/box/box_utils.py#L47 + v = (4 / math.pi ** 2) * torch.pow(torch.atan(w2 / h2) - torch.atan(w1 / h1), 2) + with torch.no_grad(): + alpha = v / ((1 + eps) - iou + v) + return iou - (rho2 / c2 + v * alpha) # CIoU + elif EIoU: # Efficient IoU https://arxiv.org/abs/2101.08158 + rho3 = (w1-w2) **2 + c3 = cw ** 2 + eps + rho4 = (h1-h2) **2 + c4 = ch ** 2 + eps + return iou - rho2 / c2 - rho3 / c3 - rho4 / c4 # EIoU + elif ECIoU: + v = (4 / math.pi ** 2) * torch.pow(torch.atan(w2 / h2) - torch.atan(w1 / h1), 2) + with torch.no_grad(): + alpha = v / ((1 + eps) - iou + v) + rho3 = (w1-w2) **2 + c3 = cw ** 2 + eps + rho4 = (h1-h2) **2 + c4 = ch ** 2 + eps + return iou - v * alpha - rho2 / c2 - rho3 / c3 - rho4 / c4 # ECIoU + else: # GIoU https://arxiv.org/pdf/1902.09630.pdf + c_area = cw * ch + eps # convex area + return iou - (c_area - union) / c_area # GIoU + else: + return iou # IoU + + +def box_iou(box1, box2): + # https://github.com/pytorch/vision/blob/master/torchvision/ops/boxes.py + """ + Return intersection-over-union (Jaccard index) of boxes. + Both sets of boxes are expected to be in (x1, y1, x2, y2) format. + Arguments: + box1 (Tensor[N, 4]) + box2 (Tensor[M, 4]) + Returns: + iou (Tensor[N, M]): the NxM matrix containing the pairwise + IoU values for every element in boxes1 and boxes2 + """ + + def box_area(box): + # box = 4xn + return (box[2] - box[0]) * (box[3] - box[1]) + + area1 = box_area(box1.T) + area2 = box_area(box2.T) + + # inter(N,M) = (rb(N,M,2) - lt(N,M,2)).clamp(0).prod(2) + inter = (torch.min(box1[:, None, 2:], box2[:, 2:]) - torch.max(box1[:, None, :2], box2[:, :2])).clamp(0).prod(2) + return inter / (area1[:, None] + area2 - inter) # iou = inter / (area1 + area2 - inter) + + +def wh_iou(wh1, wh2): + # Returns the nxm IoU matrix. wh1 is nx2, wh2 is mx2 + wh1 = wh1[:, None] # [N,1,2] + wh2 = wh2[None] # [1,M,2] + inter = torch.min(wh1, wh2).prod(2) # [N,M] + return inter / (wh1.prod(2) + wh2.prod(2) - inter) # iou = inter / (area1 + area2 - inter) + + +def non_max_suppression(prediction, conf_thres=0.1, iou_thres=0.6, merge=False, classes=None, agnostic=False): + """Performs Non-Maximum Suppression (NMS) on inference results + + Returns: + detections with shape: nx6 (x1, y1, x2, y2, conf, cls) + """ + + nc = prediction[0].shape[1] - 5 # number of classes + xc = prediction[..., 4] > conf_thres # candidates + + # Settings + min_wh, max_wh = 2, 4096 # (pixels) minimum and maximum box width and height + max_det = 300 # maximum number of detections per image + time_limit = 10.0 # seconds to quit after + redundant = True # require redundant detections + multi_label = nc > 1 # multiple labels per box (adds 0.5ms/img) + + t = time.time() + output = [torch.zeros(0, 6)] * prediction.shape[0] + for xi, x in enumerate(prediction): # image index, image inference + # Apply constraints + # x[((x[..., 2:4] < min_wh) | (x[..., 2:4] > max_wh)).any(1), 4] = 0 # width-height + x = x[xc[xi]] # confidence + + # If none remain process next image + if not x.shape[0]: + continue + + # Compute conf + x[:, 5:] *= x[:, 4:5] # conf = obj_conf * cls_conf + + # Box (center x, center y, width, height) to (x1, y1, x2, y2) + box = xywh2xyxy(x[:, :4]) + + # Detections matrix nx6 (xyxy, conf, cls) + if multi_label: + i, j = (x[:, 5:] > conf_thres).nonzero(as_tuple=False).T + x = torch.cat((box[i], x[i, j + 5, None], j[:, None].float()), 1) + else: # best class only + conf, j = x[:, 5:].max(1, keepdim=True) + x = torch.cat((box, conf, j.float()), 1)[conf.view(-1) > conf_thres] + + # Filter by class + if classes: + x = x[(x[:, 5:6] == torch.tensor(classes, device=x.device)).any(1)] + + # Apply finite constraint + # if not torch.isfinite(x).all(): + # x = x[torch.isfinite(x).all(1)] + + # If none remain process next image + n = x.shape[0] # number of boxes + if not n: + continue + + # Sort by confidence + # x = x[x[:, 4].argsort(descending=True)] + + # Batched NMS + c = x[:, 5:6] * (0 if agnostic else max_wh) # classes + boxes, scores = x[:, :4] + c, x[:, 4] # boxes (offset by class), scores + i = torch.ops.torchvision.nms(boxes, scores, iou_thres) + if i.shape[0] > max_det: # limit detections + i = i[:max_det] + if merge and (1 < n < 3E3): # Merge NMS (boxes merged using weighted mean) + # update boxes as boxes(i,4) = weights(i,n) * boxes(n,4) + iou = box_iou(boxes[i], boxes) > iou_thres # iou matrix + weights = iou * scores[None] # box weights + x[i, :4] = torch.mm(weights, x[:, :4]).float() / weights.sum(1, keepdim=True) # merged boxes + if redundant: + i = i[iou.sum(1) > 1] # require redundancy + + output[xi] = x[i] + if (time.time() - t) > time_limit: + break # time limit exceeded + + return output + + +def strip_optimizer(f='weights/best.pt', s=''): # from utils.general import *; strip_optimizer() + # Strip optimizer from 'f' to finalize training, optionally save as 's' + x = torch.load(f, map_location=torch.device('cpu')) + x['optimizer'] = None + x['training_results'] = None + x['epoch'] = -1 + #x['model'].half() # to FP16 + #for p in x['model'].parameters(): + # p.requires_grad = False + torch.save(x, s or f) + mb = os.path.getsize(s or f) / 1E6 # filesize + print('Optimizer stripped from %s,%s %.1fMB' % (f, (' saved as %s,' % s) if s else '', mb)) + + +def print_mutation(hyp, results, yaml_file='hyp_evolved.yaml', bucket=''): + # Print mutation results to evolve.txt (for use with train.py --evolve) + a = '%10s' * len(hyp) % tuple(hyp.keys()) # hyperparam keys + b = '%10.3g' * len(hyp) % tuple(hyp.values()) # hyperparam values + c = '%10.4g' * len(results) % results # results (P, R, mAP@0.5, mAP@0.5:0.95, val_losses x 3) + print('\n%s\n%s\nEvolved fitness: %s\n' % (a, b, c)) + + if bucket: + url = 'gs://%s/evolve.txt' % bucket + if gsutil_getsize(url) > (os.path.getsize('evolve.txt') if os.path.exists('evolve.txt') else 0): + os.system('gsutil cp %s .' % url) # download evolve.txt if larger than local + + with open('evolve.txt', 'a') as f: # append result + f.write(c + b + '\n') + x = np.unique(np.loadtxt('evolve.txt', ndmin=2), axis=0) # load unique rows + x = x[np.argsort(-fitness(x))] # sort + np.savetxt('evolve.txt', x, '%10.3g') # save sort by fitness + + # Save yaml + for i, k in enumerate(hyp.keys()): + hyp[k] = float(x[0, i + 7]) + with open(yaml_file, 'w') as f: + results = tuple(x[0, :7]) + c = '%10.4g' * len(results) % results # results (P, R, mAP@0.5, mAP@0.5:0.95, val_losses x 3) + f.write('# Hyperparameter Evolution Results\n# Generations: %g\n# Metrics: ' % len(x) + c + '\n\n') + yaml.dump(hyp, f, sort_keys=False) + + if bucket: + os.system('gsutil cp evolve.txt %s gs://%s' % (yaml_file, bucket)) # upload + + +def apply_classifier(x, model, img, im0): + # applies a second stage classifier to yolo outputs + im0 = [im0] if isinstance(im0, np.ndarray) else im0 + for i, d in enumerate(x): # per image + if d is not None and len(d): + d = d.clone() + + # Reshape and pad cutouts + b = xyxy2xywh(d[:, :4]) # boxes + b[:, 2:] = b[:, 2:].max(1)[0].unsqueeze(1) # rectangle to square + b[:, 2:] = b[:, 2:] * 1.3 + 30 # pad + d[:, :4] = xywh2xyxy(b).long() + + # Rescale boxes from img_size to im0 size + scale_coords(img.shape[2:], d[:, :4], im0[i].shape) + + # Classes + pred_cls1 = d[:, 5].long() + ims = [] + for j, a in enumerate(d): # per item + cutout = im0[i][int(a[1]):int(a[3]), int(a[0]):int(a[2])] + im = cv2.resize(cutout, (224, 224)) # BGR + # cv2.imwrite('test%i.jpg' % j, cutout) + + im = im[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416 + im = np.ascontiguousarray(im, dtype=np.float32) # uint8 to float32 + im /= 255.0 # 0 - 255 to 0.0 - 1.0 + ims.append(im) + + pred_cls2 = model(torch.Tensor(ims).to(d.device)).argmax(1) # classifier prediction + x[i] = x[i][pred_cls1 == pred_cls2] # retain matching class detections + + return x + + +def increment_path(path, exist_ok=True, sep=''): + # Increment path, i.e. runs/exp --> runs/exp{sep}0, runs/exp{sep}1 etc. + path = Path(path) # os-agnostic + if (path.exists() and exist_ok) or (not path.exists()): + return str(path) + else: + dirs = glob.glob(f"{path}{sep}*") # similar paths + matches = [re.search(rf"%s{sep}(\d+)" % path.stem, d) for d in dirs] + i = [int(m.groups()[0]) for m in matches if m] # indices + n = max(i) + 1 if i else 2 # increment number + return f"{path}{sep}{n}" # update path diff --git a/utils/google_utils.py b/utils/google_utils.py new file mode 100644 index 0000000..7037bf1 --- /dev/null +++ b/utils/google_utils.py @@ -0,0 +1,120 @@ +# Google utils: https://cloud.google.com/storage/docs/reference/libraries + +import os +import platform +import subprocess +import time +from pathlib import Path + +import torch + + +def gsutil_getsize(url=''): + # gs://bucket/file size https://cloud.google.com/storage/docs/gsutil/commands/du + s = subprocess.check_output('gsutil du %s' % url, shell=True).decode('utf-8') + return eval(s.split(' ')[0]) if len(s) else 0 # bytes + + +def attempt_download(weights): + # Attempt to download pretrained weights if not found locally + weights = weights.strip().replace("'", '') + file = Path(weights).name + + msg = weights + ' missing, try downloading from https://github.com/WongKinYiu/yolor/releases/' + models = ['yolor_p6.pt', 'yolor_w6.pt'] # available models + + if file in models and not os.path.isfile(weights): + + try: # GitHub + url = 'https://github.com/WongKinYiu/yolor/releases/download/v1.0/' + file + print('Downloading %s to %s...' % (url, weights)) + torch.hub.download_url_to_file(url, weights) + assert os.path.exists(weights) and os.path.getsize(weights) > 1E6 # check + except Exception as e: # GCP + print('ERROR: Download failure.') + print('') + + +def attempt_load(weights, map_location=None): + # Loads an ensemble of models weights=[a,b,c] or a single model weights=[a] or weights=a + model = Ensemble() + for w in weights if isinstance(weights, list) else [weights]: + attempt_download(w) + model.append(torch.load(w, map_location=map_location)['model'].float().fuse().eval()) # load FP32 model + + if len(model) == 1: + return model[-1] # return model + else: + print('Ensemble created with %s\n' % weights) + for k in ['names', 'stride']: + setattr(model, k, getattr(model[-1], k)) + return model # return ensemble + + +def gdrive_download(id='1n_oKgR81BJtqk75b00eAjdv03qVCQn2f', name='coco128.zip'): + # Downloads a file from Google Drive. from utils.google_utils import *; gdrive_download() + t = time.time() + + print('Downloading https://drive.google.com/uc?export=download&id=%s as %s... ' % (id, name), end='') + os.remove(name) if os.path.exists(name) else None # remove existing + os.remove('cookie') if os.path.exists('cookie') else None + + # Attempt file download + out = "NUL" if platform.system() == "Windows" else "/dev/null" + os.system('curl -c ./cookie -s -L "drive.google.com/uc?export=download&id=%s" > %s ' % (id, out)) + if os.path.exists('cookie'): # large file + s = 'curl -Lb ./cookie "drive.google.com/uc?export=download&confirm=%s&id=%s" -o %s' % (get_token(), id, name) + else: # small file + s = 'curl -s -L -o %s "drive.google.com/uc?export=download&id=%s"' % (name, id) + r = os.system(s) # execute, capture return + os.remove('cookie') if os.path.exists('cookie') else None + + # Error check + if r != 0: + os.remove(name) if os.path.exists(name) else None # remove partial + print('Download error ') # raise Exception('Download error') + return r + + # Unzip if archive + if name.endswith('.zip'): + print('unzipping... ', end='') + os.system('unzip -q %s' % name) # unzip + os.remove(name) # remove zip to free space + + print('Done (%.1fs)' % (time.time() - t)) + return r + + +def get_token(cookie="./cookie"): + with open(cookie) as f: + for line in f: + if "download" in line: + return line.split()[-1] + return "" + +# def upload_blob(bucket_name, source_file_name, destination_blob_name): +# # Uploads a file to a bucket +# # https://cloud.google.com/storage/docs/uploading-objects#storage-upload-object-python +# +# storage_client = storage.Client() +# bucket = storage_client.get_bucket(bucket_name) +# blob = bucket.blob(destination_blob_name) +# +# blob.upload_from_filename(source_file_name) +# +# print('File {} uploaded to {}.'.format( +# source_file_name, +# destination_blob_name)) +# +# +# def download_blob(bucket_name, source_blob_name, destination_file_name): +# # Uploads a blob from a bucket +# storage_client = storage.Client() +# bucket = storage_client.get_bucket(bucket_name) +# blob = bucket.blob(source_blob_name) +# +# blob.download_to_filename(destination_file_name) +# +# print('Blob {} downloaded to {}.'.format( +# source_blob_name, +# destination_file_name)) diff --git a/utils/layers.py b/utils/layers.py new file mode 100644 index 0000000..bc1183a --- /dev/null +++ b/utils/layers.py @@ -0,0 +1,534 @@ +import torch.nn.functional as F + +from utils.general import * + +import torch +from torch import nn + +try: + from mish_cuda import MishCuda as Mish + +except: + class Mish(nn.Module): # https://github.com/digantamisra98/Mish + def forward(self, x): + return x * F.softplus(x).tanh() + +try: + from pytorch_wavelets import DWTForward, DWTInverse + + class DWT(nn.Module): + def __init__(self): + super(DWT, self).__init__() + self.xfm = DWTForward(J=1, wave='db1', mode='zero') + + def forward(self, x): + b,c,w,h = x.shape + yl, yh = self.xfm(x) + return torch.cat([yl/2., yh[0].view(b,-1,w//2,h//2)/2.+.5], 1) + +except: # using Reorg instead + class DWT(nn.Module): + def forward(self, x): + return torch.cat([x[..., ::2, ::2], x[..., 1::2, ::2], x[..., ::2, 1::2], x[..., 1::2, 1::2]], 1) + + +class Reorg(nn.Module): + def forward(self, x): + return torch.cat([x[..., ::2, ::2], x[..., 1::2, ::2], x[..., ::2, 1::2], x[..., 1::2, 1::2]], 1) + + +def make_divisible(v, divisor): + # Function ensures all layers have a channel number that is divisible by 8 + # https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet/mobilenet.py + return math.ceil(v / divisor) * divisor + + +class Flatten(nn.Module): + # Use after nn.AdaptiveAvgPool2d(1) to remove last 2 dimensions + def forward(self, x): + return x.view(x.size(0), -1) + + +class Concat(nn.Module): + # Concatenate a list of tensors along dimension + def __init__(self, dimension=1): + super(Concat, self).__init__() + self.d = dimension + + def forward(self, x): + return torch.cat(x, self.d) + + +class FeatureConcat(nn.Module): + def __init__(self, layers): + super(FeatureConcat, self).__init__() + self.layers = layers # layer indices + self.multiple = len(layers) > 1 # multiple layers flag + + def forward(self, x, outputs): + return torch.cat([outputs[i] for i in self.layers], 1) if self.multiple else outputs[self.layers[0]] + + +class FeatureConcat2(nn.Module): + def __init__(self, layers): + super(FeatureConcat2, self).__init__() + self.layers = layers # layer indices + self.multiple = len(layers) > 1 # multiple layers flag + + def forward(self, x, outputs): + return torch.cat([outputs[self.layers[0]], outputs[self.layers[1]].detach()], 1) + + +class FeatureConcat3(nn.Module): + def __init__(self, layers): + super(FeatureConcat3, self).__init__() + self.layers = layers # layer indices + self.multiple = len(layers) > 1 # multiple layers flag + + def forward(self, x, outputs): + return torch.cat([outputs[self.layers[0]], outputs[self.layers[1]].detach(), outputs[self.layers[2]].detach()], 1) + + +class FeatureConcat_l(nn.Module): + def __init__(self, layers): + super(FeatureConcat_l, self).__init__() + self.layers = layers # layer indices + self.multiple = len(layers) > 1 # multiple layers flag + + def forward(self, x, outputs): + return torch.cat([outputs[i][:,:outputs[i].shape[1]//2,:,:] for i in self.layers], 1) if self.multiple else outputs[self.layers[0]][:,:outputs[self.layers[0]].shape[1]//2,:,:] + + +class WeightedFeatureFusion(nn.Module): # weighted sum of 2 or more layers https://arxiv.org/abs/1911.09070 + def __init__(self, layers, weight=False): + super(WeightedFeatureFusion, self).__init__() + self.layers = layers # layer indices + self.weight = weight # apply weights boolean + self.n = len(layers) + 1 # number of layers + if weight: + self.w = nn.Parameter(torch.zeros(self.n), requires_grad=True) # layer weights + + def forward(self, x, outputs): + # Weights + if self.weight: + w = torch.sigmoid(self.w) * (2 / self.n) # sigmoid weights (0-1) + x = x * w[0] + + # Fusion + nx = x.shape[1] # input channels + for i in range(self.n - 1): + a = outputs[self.layers[i]] * w[i + 1] if self.weight else outputs[self.layers[i]] # feature to add + na = a.shape[1] # feature channels + + # Adjust channels + if nx == na: # same shape + x = x + a + elif nx > na: # slice input + x[:, :na] = x[:, :na] + a # or a = nn.ZeroPad2d((0, 0, 0, 0, 0, dc))(a); x = x + a + else: # slice feature + x = x + a[:, :nx] + + return x + + +class MixConv2d(nn.Module): # MixConv: Mixed Depthwise Convolutional Kernels https://arxiv.org/abs/1907.09595 + def __init__(self, in_ch, out_ch, k=(3, 5, 7), stride=1, dilation=1, bias=True, method='equal_params'): + super(MixConv2d, self).__init__() + + groups = len(k) + if method == 'equal_ch': # equal channels per group + i = torch.linspace(0, groups - 1E-6, out_ch).floor() # out_ch indices + ch = [(i == g).sum() for g in range(groups)] + else: # 'equal_params': equal parameter count per group + b = [out_ch] + [0] * groups + a = np.eye(groups + 1, groups, k=-1) + a -= np.roll(a, 1, axis=1) + a *= np.array(k) ** 2 + a[0] = 1 + ch = np.linalg.lstsq(a, b, rcond=None)[0].round().astype(int) # solve for equal weight indices, ax = b + + self.m = nn.ModuleList([nn.Conv2d(in_channels=in_ch, + out_channels=ch[g], + kernel_size=k[g], + stride=stride, + padding=k[g] // 2, # 'same' pad + dilation=dilation, + bias=bias) for g in range(groups)]) + + def forward(self, x): + return torch.cat([m(x) for m in self.m], 1) + + +# Activation functions below ------------------------------------------------------------------------------------------- +class SwishImplementation(torch.autograd.Function): + @staticmethod + def forward(ctx, x): + ctx.save_for_backward(x) + return x * torch.sigmoid(x) + + @staticmethod + def backward(ctx, grad_output): + x = ctx.saved_tensors[0] + sx = torch.sigmoid(x) # sigmoid(ctx) + return grad_output * (sx * (1 + x * (1 - sx))) + + +class MishImplementation(torch.autograd.Function): + @staticmethod + def forward(ctx, x): + ctx.save_for_backward(x) + return x.mul(torch.tanh(F.softplus(x))) # x * tanh(ln(1 + exp(x))) + + @staticmethod + def backward(ctx, grad_output): + x = ctx.saved_tensors[0] + sx = torch.sigmoid(x) + fx = F.softplus(x).tanh() + return grad_output * (fx + x * sx * (1 - fx * fx)) + + +class MemoryEfficientSwish(nn.Module): + def forward(self, x): + return SwishImplementation.apply(x) + + +class MemoryEfficientMish(nn.Module): + def forward(self, x): + return MishImplementation.apply(x) + + +class Swish(nn.Module): + def forward(self, x): + return x * torch.sigmoid(x) + + +class HardSwish(nn.Module): # https://arxiv.org/pdf/1905.02244.pdf + def forward(self, x): + return x * F.hardtanh(x + 3, 0., 6., True) / 6. + + +class DeformConv2d(nn.Module): + def __init__(self, inc, outc, kernel_size=3, padding=1, stride=1, bias=None, modulation=False): + """ + Args: + modulation (bool, optional): If True, Modulated Defomable Convolution (Deformable ConvNets v2). + """ + super(DeformConv2d, self).__init__() + self.kernel_size = kernel_size + self.padding = padding + self.stride = stride + self.zero_padding = nn.ZeroPad2d(padding) + self.conv = nn.Conv2d(inc, outc, kernel_size=kernel_size, stride=kernel_size, bias=bias) + + self.p_conv = nn.Conv2d(inc, 2*kernel_size*kernel_size, kernel_size=3, padding=1, stride=stride) + nn.init.constant_(self.p_conv.weight, 0) + self.p_conv.register_backward_hook(self._set_lr) + + self.modulation = modulation + if modulation: + self.m_conv = nn.Conv2d(inc, kernel_size*kernel_size, kernel_size=3, padding=1, stride=stride) + nn.init.constant_(self.m_conv.weight, 0) + self.m_conv.register_backward_hook(self._set_lr) + + @staticmethod + def _set_lr(module, grad_input, grad_output): + grad_input = (grad_input[i] * 0.1 for i in range(len(grad_input))) + grad_output = (grad_output[i] * 0.1 for i in range(len(grad_output))) + + def forward(self, x): + offset = self.p_conv(x) + if self.modulation: + m = torch.sigmoid(self.m_conv(x)) + + dtype = offset.data.type() + ks = self.kernel_size + N = offset.size(1) // 2 + + if self.padding: + x = self.zero_padding(x) + + # (b, 2N, h, w) + p = self._get_p(offset, dtype) + + # (b, h, w, 2N) + p = p.contiguous().permute(0, 2, 3, 1) + q_lt = p.detach().floor() + q_rb = q_lt + 1 + + q_lt = torch.cat([torch.clamp(q_lt[..., :N], 0, x.size(2)-1), torch.clamp(q_lt[..., N:], 0, x.size(3)-1)], dim=-1).long() + q_rb = torch.cat([torch.clamp(q_rb[..., :N], 0, x.size(2)-1), torch.clamp(q_rb[..., N:], 0, x.size(3)-1)], dim=-1).long() + q_lb = torch.cat([q_lt[..., :N], q_rb[..., N:]], dim=-1) + q_rt = torch.cat([q_rb[..., :N], q_lt[..., N:]], dim=-1) + + # clip p + p = torch.cat([torch.clamp(p[..., :N], 0, x.size(2)-1), torch.clamp(p[..., N:], 0, x.size(3)-1)], dim=-1) + + # bilinear kernel (b, h, w, N) + g_lt = (1 + (q_lt[..., :N].type_as(p) - p[..., :N])) * (1 + (q_lt[..., N:].type_as(p) - p[..., N:])) + g_rb = (1 - (q_rb[..., :N].type_as(p) - p[..., :N])) * (1 - (q_rb[..., N:].type_as(p) - p[..., N:])) + g_lb = (1 + (q_lb[..., :N].type_as(p) - p[..., :N])) * (1 - (q_lb[..., N:].type_as(p) - p[..., N:])) + g_rt = (1 - (q_rt[..., :N].type_as(p) - p[..., :N])) * (1 + (q_rt[..., N:].type_as(p) - p[..., N:])) + + # (b, c, h, w, N) + x_q_lt = self._get_x_q(x, q_lt, N) + x_q_rb = self._get_x_q(x, q_rb, N) + x_q_lb = self._get_x_q(x, q_lb, N) + x_q_rt = self._get_x_q(x, q_rt, N) + + # (b, c, h, w, N) + x_offset = g_lt.unsqueeze(dim=1) * x_q_lt + \ + g_rb.unsqueeze(dim=1) * x_q_rb + \ + g_lb.unsqueeze(dim=1) * x_q_lb + \ + g_rt.unsqueeze(dim=1) * x_q_rt + + # modulation + if self.modulation: + m = m.contiguous().permute(0, 2, 3, 1) + m = m.unsqueeze(dim=1) + m = torch.cat([m for _ in range(x_offset.size(1))], dim=1) + x_offset *= m + + x_offset = self._reshape_x_offset(x_offset, ks) + out = self.conv(x_offset) + + return out + + def _get_p_n(self, N, dtype): + p_n_x, p_n_y = torch.meshgrid( + torch.arange(-(self.kernel_size-1)//2, (self.kernel_size-1)//2+1), + torch.arange(-(self.kernel_size-1)//2, (self.kernel_size-1)//2+1)) + # (2N, 1) + p_n = torch.cat([torch.flatten(p_n_x), torch.flatten(p_n_y)], 0) + p_n = p_n.view(1, 2*N, 1, 1).type(dtype) + + return p_n + + def _get_p_0(self, h, w, N, dtype): + p_0_x, p_0_y = torch.meshgrid( + torch.arange(1, h*self.stride+1, self.stride), + torch.arange(1, w*self.stride+1, self.stride)) + p_0_x = torch.flatten(p_0_x).view(1, 1, h, w).repeat(1, N, 1, 1) + p_0_y = torch.flatten(p_0_y).view(1, 1, h, w).repeat(1, N, 1, 1) + p_0 = torch.cat([p_0_x, p_0_y], 1).type(dtype) + + return p_0 + + def _get_p(self, offset, dtype): + N, h, w = offset.size(1)//2, offset.size(2), offset.size(3) + + # (1, 2N, 1, 1) + p_n = self._get_p_n(N, dtype) + # (1, 2N, h, w) + p_0 = self._get_p_0(h, w, N, dtype) + p = p_0 + p_n + offset + return p + + def _get_x_q(self, x, q, N): + b, h, w, _ = q.size() + padded_w = x.size(3) + c = x.size(1) + # (b, c, h*w) + x = x.contiguous().view(b, c, -1) + + # (b, h, w, N) + index = q[..., :N]*padded_w + q[..., N:] # offset_x*w + offset_y + # (b, c, h*w*N) + index = index.contiguous().unsqueeze(dim=1).expand(-1, c, -1, -1, -1).contiguous().view(b, c, -1) + + x_offset = x.gather(dim=-1, index=index).contiguous().view(b, c, h, w, N) + + return x_offset + + @staticmethod + def _reshape_x_offset(x_offset, ks): + b, c, h, w, N = x_offset.size() + x_offset = torch.cat([x_offset[..., s:s+ks].contiguous().view(b, c, h, w*ks) for s in range(0, N, ks)], dim=-1) + x_offset = x_offset.contiguous().view(b, c, h*ks, w*ks) + + return x_offset + + +class GAP(nn.Module): + def __init__(self): + super(GAP, self).__init__() + self.avg_pool = nn.AdaptiveAvgPool2d(1) + def forward(self, x): + #b, c, _, _ = x.size() + return self.avg_pool(x)#.view(b, c) + + +class Silence(nn.Module): + def __init__(self): + super(Silence, self).__init__() + def forward(self, x): + return x + + +class ScaleChannel(nn.Module): # weighted sum of 2 or more layers https://arxiv.org/abs/1911.09070 + def __init__(self, layers): + super(ScaleChannel, self).__init__() + self.layers = layers # layer indices + + def forward(self, x, outputs): + a = outputs[self.layers[0]] + return x.expand_as(a) * a + + +class ShiftChannel(nn.Module): # weighted sum of 2 or more layers https://arxiv.org/abs/1911.09070 + def __init__(self, layers): + super(ShiftChannel, self).__init__() + self.layers = layers # layer indices + + def forward(self, x, outputs): + a = outputs[self.layers[0]] + return a.expand_as(x) + x + + +class ShiftChannel2D(nn.Module): # weighted sum of 2 or more layers https://arxiv.org/abs/1911.09070 + def __init__(self, layers): + super(ShiftChannel2D, self).__init__() + self.layers = layers # layer indices + + def forward(self, x, outputs): + a = outputs[self.layers[0]].view(1,-1,1,1) + return a.expand_as(x) + x + + +class ControlChannel(nn.Module): # weighted sum of 2 or more layers https://arxiv.org/abs/1911.09070 + def __init__(self, layers): + super(ControlChannel, self).__init__() + self.layers = layers # layer indices + + def forward(self, x, outputs): + a = outputs[self.layers[0]] + return a.expand_as(x) * x + + +class ControlChannel2D(nn.Module): # weighted sum of 2 or more layers https://arxiv.org/abs/1911.09070 + def __init__(self, layers): + super(ControlChannel2D, self).__init__() + self.layers = layers # layer indices + + def forward(self, x, outputs): + a = outputs[self.layers[0]].view(1,-1,1,1) + return a.expand_as(x) * x + + +class AlternateChannel(nn.Module): # weighted sum of 2 or more layers https://arxiv.org/abs/1911.09070 + def __init__(self, layers): + super(AlternateChannel, self).__init__() + self.layers = layers # layer indices + + def forward(self, x, outputs): + a = outputs[self.layers[0]] + return torch.cat([a.expand_as(x), x], dim=1) + + +class AlternateChannel2D(nn.Module): # weighted sum of 2 or more layers https://arxiv.org/abs/1911.09070 + def __init__(self, layers): + super(AlternateChannel2D, self).__init__() + self.layers = layers # layer indices + + def forward(self, x, outputs): + a = outputs[self.layers[0]].view(1,-1,1,1) + return torch.cat([a.expand_as(x), x], dim=1) + + +class SelectChannel(nn.Module): # weighted sum of 2 or more layers https://arxiv.org/abs/1911.09070 + def __init__(self, layers): + super(SelectChannel, self).__init__() + self.layers = layers # layer indices + + def forward(self, x, outputs): + a = outputs[self.layers[0]] + return a.sigmoid().expand_as(x) * x + + +class SelectChannel2D(nn.Module): # weighted sum of 2 or more layers https://arxiv.org/abs/1911.09070 + def __init__(self, layers): + super(SelectChannel2D, self).__init__() + self.layers = layers # layer indices + + def forward(self, x, outputs): + a = outputs[self.layers[0]].view(1,-1,1,1) + return a.sigmoid().expand_as(x) * x + + +class ScaleSpatial(nn.Module): # weighted sum of 2 or more layers https://arxiv.org/abs/1911.09070 + def __init__(self, layers): + super(ScaleSpatial, self).__init__() + self.layers = layers # layer indices + + def forward(self, x, outputs): + a = outputs[self.layers[0]] + return x * a + + +class ImplicitA(nn.Module): + def __init__(self, channel): + super(ImplicitA, self).__init__() + self.channel = channel + self.implicit = nn.Parameter(torch.zeros(1, channel, 1, 1)) + nn.init.normal_(self.implicit, std=.02) + + def forward(self): + return self.implicit + + +class ImplicitC(nn.Module): + def __init__(self, channel): + super(ImplicitC, self).__init__() + self.channel = channel + self.implicit = nn.Parameter(torch.zeros(1, channel, 1, 1)) + nn.init.normal_(self.implicit, std=.02) + + def forward(self): + return self.implicit + + +class ImplicitM(nn.Module): + def __init__(self, channel): + super(ImplicitM, self).__init__() + self.channel = channel + self.implicit = nn.Parameter(torch.ones(1, channel, 1, 1)) + nn.init.normal_(self.implicit, mean=1., std=.02) + + def forward(self): + return self.implicit + + + +class Implicit2DA(nn.Module): + def __init__(self, atom, channel): + super(Implicit2DA, self).__init__() + self.channel = channel + self.implicit = nn.Parameter(torch.zeros(1, atom, channel, 1)) + nn.init.normal_(self.implicit, std=.02) + + def forward(self): + return self.implicit + + +class Implicit2DC(nn.Module): + def __init__(self, atom, channel): + super(Implicit2DC, self).__init__() + self.channel = channel + self.implicit = nn.Parameter(torch.zeros(1, atom, channel, 1)) + nn.init.normal_(self.implicit, std=.02) + + def forward(self): + return self.implicit + + +class Implicit2DM(nn.Module): + def __init__(self, atom, channel): + super(Implicit2DM, self).__init__() + self.channel = channel + self.implicit = nn.Parameter(torch.ones(1, atom, channel, 1)) + nn.init.normal_(self.implicit, mean=1., std=.02) + + def forward(self): + return self.implicit + + + \ No newline at end of file diff --git a/utils/loss.py b/utils/loss.py new file mode 100644 index 0000000..b8a0fc6 --- /dev/null +++ b/utils/loss.py @@ -0,0 +1,173 @@ +# Loss functions + +import torch +import torch.nn as nn + +from utils.general import bbox_iou +from utils.torch_utils import is_parallel + + +def smooth_BCE(eps=0.1): # https://github.com/ultralytics/yolov3/issues/238#issuecomment-598028441 + # return positive, negative label smoothing BCE targets + return 1.0 - 0.5 * eps, 0.5 * eps + + +class BCEBlurWithLogitsLoss(nn.Module): + # BCEwithLogitLoss() with reduced missing label effects. + def __init__(self, alpha=0.05): + super(BCEBlurWithLogitsLoss, self).__init__() + self.loss_fcn = nn.BCEWithLogitsLoss(reduction='none') # must be nn.BCEWithLogitsLoss() + self.alpha = alpha + + def forward(self, pred, true): + loss = self.loss_fcn(pred, true) + pred = torch.sigmoid(pred) # prob from logits + dx = pred - true # reduce only missing label effects + # dx = (pred - true).abs() # reduce missing label and false label effects + alpha_factor = 1 - torch.exp((dx - 1) / (self.alpha + 1e-4)) + loss *= alpha_factor + return loss.mean() + + +class FocalLoss(nn.Module): + # Wraps focal loss around existing loss_fcn(), i.e. criteria = FocalLoss(nn.BCEWithLogitsLoss(), gamma=1.5) + def __init__(self, loss_fcn, gamma=1.5, alpha=0.25): + super(FocalLoss, self).__init__() + self.loss_fcn = loss_fcn # must be nn.BCEWithLogitsLoss() + self.gamma = gamma + self.alpha = alpha + self.reduction = loss_fcn.reduction + self.loss_fcn.reduction = 'none' # required to apply FL to each element + + def forward(self, pred, true): + loss = self.loss_fcn(pred, true) + # p_t = torch.exp(-loss) + # loss *= self.alpha * (1.000001 - p_t) ** self.gamma # non-zero power for gradient stability + + # TF implementation https://github.com/tensorflow/addons/blob/v0.7.1/tensorflow_addons/losses/focal_loss.py + pred_prob = torch.sigmoid(pred) # prob from logits + p_t = true * pred_prob + (1 - true) * (1 - pred_prob) + alpha_factor = true * self.alpha + (1 - true) * (1 - self.alpha) + modulating_factor = (1.0 - p_t) ** self.gamma + loss *= alpha_factor * modulating_factor + + if self.reduction == 'mean': + return loss.mean() + elif self.reduction == 'sum': + return loss.sum() + else: # 'none' + return loss + + +def compute_loss(p, targets, model): # predictions, targets, model + device = targets.device + #print(device) + lcls, lbox, lobj = torch.zeros(1, device=device), torch.zeros(1, device=device), torch.zeros(1, device=device) + tcls, tbox, indices, anchors = build_targets(p, targets, model) # targets + h = model.hyp # hyperparameters + + # Define criteria + BCEcls = nn.BCEWithLogitsLoss(pos_weight=torch.Tensor([h['cls_pw']])).to(device) + BCEobj = nn.BCEWithLogitsLoss(pos_weight=torch.Tensor([h['obj_pw']])).to(device) + + # Class label smoothing https://arxiv.org/pdf/1902.04103.pdf eqn 3 + cp, cn = smooth_BCE(eps=0.0) + + # Focal loss + g = h['fl_gamma'] # focal loss gamma + if g > 0: + BCEcls, BCEobj = FocalLoss(BCEcls, g), FocalLoss(BCEobj, g) + + # Losses + nt = 0 # number of targets + no = len(p) # number of outputs + balance = [4.0, 1.0, 0.4] if no == 3 else [4.0, 1.0, 0.4, 0.1] # P3-5 or P3-6 + balance = [4.0, 1.0, 0.5, 0.4, 0.1] if no == 5 else balance + for i, pi in enumerate(p): # layer index, layer predictions + b, a, gj, gi = indices[i] # image, anchor, gridy, gridx + tobj = torch.zeros_like(pi[..., 0], device=device) # target obj + + n = b.shape[0] # number of targets + if n: + nt += n # cumulative targets + ps = pi[b, a, gj, gi] # prediction subset corresponding to targets + + # Regression + pxy = ps[:, :2].sigmoid() * 2. - 0.5 + pwh = (ps[:, 2:4].sigmoid() * 2) ** 2 * anchors[i] + pbox = torch.cat((pxy, pwh), 1).to(device) # predicted box + iou = bbox_iou(pbox.T, tbox[i], x1y1x2y2=False, CIoU=True) # iou(prediction, target) + lbox += (1.0 - iou).mean() # iou loss + + # Objectness + tobj[b, a, gj, gi] = (1.0 - model.gr) + model.gr * iou.detach().clamp(0).type(tobj.dtype) # iou ratio + + # Classification + if model.nc > 1: # cls loss (only if multiple classes) + t = torch.full_like(ps[:, 5:], cn, device=device) # targets + t[range(n), tcls[i]] = cp + lcls += BCEcls(ps[:, 5:], t) # BCE + + # Append targets to text file + # with open('targets.txt', 'a') as file: + # [file.write('%11.5g ' * 4 % tuple(x) + '\n') for x in torch.cat((txy[i], twh[i]), 1)] + + lobj += BCEobj(pi[..., 4], tobj) * balance[i] # obj loss + + s = 3 / no # output count scaling + lbox *= h['box'] * s + lobj *= h['obj'] * s * (1.4 if no >= 4 else 1.) + lcls *= h['cls'] * s + bs = tobj.shape[0] # batch size + + loss = lbox + lobj + lcls + return loss * bs, torch.cat((lbox, lobj, lcls, loss)).detach() + + +def build_targets(p, targets, model): + nt = targets.shape[0] # number of anchors, targets + tcls, tbox, indices, anch = [], [], [], [] + gain = torch.ones(6, device=targets.device) # normalized to gridspace gain + off = torch.tensor([[1, 0], [0, 1], [-1, 0], [0, -1]], device=targets.device).float() # overlap offsets + + g = 0.5 # offset + multi_gpu = is_parallel(model) + for i, jj in enumerate(model.module.yolo_layers if multi_gpu else model.yolo_layers): + # get number of grid points and anchor vec for this yolo layer + anchors = model.module.module_list[jj].anchor_vec if multi_gpu else model.module_list[jj].anchor_vec + gain[2:] = torch.tensor(p[i].shape)[[3, 2, 3, 2]] # xyxy gain + + # Match targets to anchors + a, t, offsets = [], targets * gain, 0 + if nt: + na = anchors.shape[0] # number of anchors + at = torch.arange(na).view(na, 1).repeat(1, nt) # anchor tensor, same as .repeat_interleave(nt) + r = t[None, :, 4:6] / anchors[:, None] # wh ratio + j = torch.max(r, 1. / r).max(2)[0] < model.hyp['anchor_t'] # compare + # j = wh_iou(anchors, t[:, 4:6]) > model.hyp['iou_t'] # iou(3,n) = wh_iou(anchors(3,2), gwh(n,2)) + a, t = at[j], t.repeat(na, 1, 1)[j] # filter + + # overlaps + gxy = t[:, 2:4] # grid xy + z = torch.zeros_like(gxy) + j, k = ((gxy % 1. < g) & (gxy > 1.)).T + l, m = ((gxy % 1. > (1 - g)) & (gxy < (gain[[2, 3]] - 1.))).T + a, t = torch.cat((a, a[j], a[k], a[l], a[m]), 0), torch.cat((t, t[j], t[k], t[l], t[m]), 0) + offsets = torch.cat((z, z[j] + off[0], z[k] + off[1], z[l] + off[2], z[m] + off[3]), 0) * g + + # Define + b, c = t[:, :2].long().T # image, class + gxy = t[:, 2:4] # grid xy + gwh = t[:, 4:6] # grid wh + gij = (gxy - offsets).long() + gi, gj = gij.T # grid xy indices + + # Append + #indices.append((b, a, gj, gi)) # image, anchor, grid indices + indices.append((b, a, gj.clamp_(0, gain[3] - 1), gi.clamp_(0, gain[2] - 1))) # image, anchor, grid indices + tbox.append(torch.cat((gxy - gij, gwh), 1)) # box + anch.append(anchors[a]) # anchors + tcls.append(c) # class + + return tcls, tbox, indices, anch + diff --git a/utils/metrics.py b/utils/metrics.py new file mode 100644 index 0000000..ede487a --- /dev/null +++ b/utils/metrics.py @@ -0,0 +1,140 @@ +# Model validation metrics + +import matplotlib.pyplot as plt +import numpy as np + + +def fitness(x): + # Model fitness as a weighted combination of metrics + w = [0.0, 0.0, 0.1, 0.9] # weights for [P, R, mAP@0.5, mAP@0.5:0.95] + return (x[:, :4] * w).sum(1) + + +def fitness_p(x): + # Model fitness as a weighted combination of metrics + w = [1.0, 0.0, 0.0, 0.0] # weights for [P, R, mAP@0.5, mAP@0.5:0.95] + return (x[:, :4] * w).sum(1) + + +def fitness_r(x): + # Model fitness as a weighted combination of metrics + w = [0.0, 1.0, 0.0, 0.0] # weights for [P, R, mAP@0.5, mAP@0.5:0.95] + return (x[:, :4] * w).sum(1) + + +def fitness_ap50(x): + # Model fitness as a weighted combination of metrics + w = [0.0, 0.0, 1.0, 0.0] # weights for [P, R, mAP@0.5, mAP@0.5:0.95] + return (x[:, :4] * w).sum(1) + + +def fitness_ap(x): + # Model fitness as a weighted combination of metrics + w = [0.0, 0.0, 0.0, 1.0] # weights for [P, R, mAP@0.5, mAP@0.5:0.95] + return (x[:, :4] * w).sum(1) + + +def fitness_f(x): + # Model fitness as a weighted combination of metrics + #w = [0.0, 0.0, 0.0, 1.0] # weights for [P, R, mAP@0.5, mAP@0.5:0.95] + return ((x[:, 0]*x[:, 1])/(x[:, 0]+x[:, 1])) + + +def ap_per_class(tp, conf, pred_cls, target_cls, plot=False, fname='precision-recall_curve.png'): + """ Compute the average precision, given the recall and precision curves. + Source: https://github.com/rafaelpadilla/Object-Detection-Metrics. + # Arguments + tp: True positives (nparray, nx1 or nx10). + conf: Objectness value from 0-1 (nparray). + pred_cls: Predicted object classes (nparray). + target_cls: True object classes (nparray). + plot: Plot precision-recall curve at mAP@0.5 + fname: Plot filename + # Returns + The average precision as computed in py-faster-rcnn. + """ + + # Sort by objectness + i = np.argsort(-conf) + tp, conf, pred_cls = tp[i], conf[i], pred_cls[i] + + # Find unique classes + unique_classes = np.unique(target_cls) + + # Create Precision-Recall curve and compute AP for each class + px, py = np.linspace(0, 1, 1000), [] # for plotting + pr_score = 0.1 # score to evaluate P and R https://github.com/ultralytics/yolov3/issues/898 + s = [unique_classes.shape[0], tp.shape[1]] # number class, number iou thresholds (i.e. 10 for mAP0.5...0.95) + ap, p, r = np.zeros(s), np.zeros(s), np.zeros(s) + for ci, c in enumerate(unique_classes): + i = pred_cls == c + n_l = (target_cls == c).sum() # number of labels + n_p = i.sum() # number of predictions + + if n_p == 0 or n_l == 0: + continue + else: + # Accumulate FPs and TPs + fpc = (1 - tp[i]).cumsum(0) + tpc = tp[i].cumsum(0) + + # Recall + recall = tpc / (n_l + 1e-16) # recall curve + r[ci] = np.interp(-pr_score, -conf[i], recall[:, 0]) # r at pr_score, negative x, xp because xp decreases + + # Precision + precision = tpc / (tpc + fpc) # precision curve + p[ci] = np.interp(-pr_score, -conf[i], precision[:, 0]) # p at pr_score + + # AP from recall-precision curve + for j in range(tp.shape[1]): + ap[ci, j], mpre, mrec = compute_ap(recall[:, j], precision[:, j]) + if j == 0: + py.append(np.interp(px, mrec, mpre)) # precision at mAP@0.5 + + # Compute F1 score (harmonic mean of precision and recall) + f1 = 2 * p * r / (p + r + 1e-16) + + if plot: + py = np.stack(py, axis=1) + fig, ax = plt.subplots(1, 1, figsize=(5, 5)) + ax.plot(px, py, linewidth=0.5, color='grey') # plot(recall, precision) + ax.plot(px, py.mean(1), linewidth=2, color='blue', label='all classes %.3f mAP@0.5' % ap[:, 0].mean()) + ax.set_xlabel('Recall') + ax.set_ylabel('Precision') + ax.set_xlim(0, 1) + ax.set_ylim(0, 1) + plt.legend() + fig.tight_layout() + fig.savefig(fname, dpi=200) + + return p, r, ap, f1, unique_classes.astype('int32') + + +def compute_ap(recall, precision): + """ Compute the average precision, given the recall and precision curves. + Source: https://github.com/rbgirshick/py-faster-rcnn. + # Arguments + recall: The recall curve (list). + precision: The precision curve (list). + # Returns + The average precision as computed in py-faster-rcnn. + """ + + # Append sentinel values to beginning and end + mrec = recall # np.concatenate(([0.], recall, [recall[-1] + 1E-3])) + mpre = precision # np.concatenate(([0.], precision, [0.])) + + # Compute the precision envelope + mpre = np.flip(np.maximum.accumulate(np.flip(mpre))) + + # Integrate area under curve + method = 'interp' # methods: 'continuous', 'interp' + if method == 'interp': + x = np.linspace(0, 1, 101) # 101-point interp (COCO) + ap = np.trapz(np.interp(x, mrec, mpre), x) # integrate + else: # 'continuous' + i = np.where(mrec[1:] != mrec[:-1])[0] # points where x axis (recall) changes + ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1]) # area under curve + + return ap, mpre, mrec diff --git a/utils/parse_config.py b/utils/parse_config.py new file mode 100644 index 0000000..b09e948 --- /dev/null +++ b/utils/parse_config.py @@ -0,0 +1,71 @@ +import os + +import numpy as np + + +def parse_model_cfg(path): + # Parse the yolo *.cfg file and return module definitions path may be 'cfg/yolov3.cfg', 'yolov3.cfg', or 'yolov3' + if not path.endswith('.cfg'): # add .cfg suffix if omitted + path += '.cfg' + if not os.path.exists(path) and os.path.exists('cfg' + os.sep + path): # add cfg/ prefix if omitted + path = 'cfg' + os.sep + path + + with open(path, 'r') as f: + lines = f.read().split('\n') + lines = [x for x in lines if x and not x.startswith('#')] + lines = [x.rstrip().lstrip() for x in lines] # get rid of fringe whitespaces + mdefs = [] # module definitions + for line in lines: + if line.startswith('['): # This marks the start of a new block + mdefs.append({}) + mdefs[-1]['type'] = line[1:-1].rstrip() + if mdefs[-1]['type'] == 'convolutional': + mdefs[-1]['batch_normalize'] = 0 # pre-populate with zeros (may be overwritten later) + + else: + key, val = line.split("=") + key = key.rstrip() + + if key == 'anchors': # return nparray + mdefs[-1][key] = np.array([float(x) for x in val.split(',')]).reshape((-1, 2)) # np anchors + elif (key in ['from', 'layers', 'mask']) or (key == 'size' and ',' in val): # return array + mdefs[-1][key] = [int(x) for x in val.split(',')] + else: + val = val.strip() + if val.isnumeric(): # return int or float + mdefs[-1][key] = int(val) if (int(val) - float(val)) == 0 else float(val) + else: + mdefs[-1][key] = val # return string + + # Check all fields are supported + supported = ['type', 'batch_normalize', 'filters', 'size', 'stride', 'pad', 'activation', 'layers', 'groups', + 'from', 'mask', 'anchors', 'classes', 'num', 'jitter', 'ignore_thresh', 'truth_thresh', 'random', + 'stride_x', 'stride_y', 'weights_type', 'weights_normalization', 'scale_x_y', 'beta_nms', 'nms_kind', + 'iou_loss', 'iou_normalizer', 'cls_normalizer', 'iou_thresh', 'atoms', 'na', 'nc'] + + f = [] # fields + for x in mdefs[1:]: + [f.append(k) for k in x if k not in f] + u = [x for x in f if x not in supported] # unsupported fields + assert not any(u), "Unsupported fields %s in %s. See https://github.com/ultralytics/yolov3/issues/631" % (u, path) + + return mdefs + + +def parse_data_cfg(path): + # Parses the data configuration file + if not os.path.exists(path) and os.path.exists('data' + os.sep + path): # add data/ prefix if omitted + path = 'data' + os.sep + path + + with open(path, 'r') as f: + lines = f.readlines() + + options = dict() + for line in lines: + line = line.strip() + if line == '' or line.startswith('#'): + continue + key, val = line.split('=') + options[key.strip()] = val.strip() + + return options diff --git a/utils/plots.py b/utils/plots.py new file mode 100644 index 0000000..3e50838 --- /dev/null +++ b/utils/plots.py @@ -0,0 +1,380 @@ +# Plotting utils + +import glob +import math +import os +import random +from copy import copy +from pathlib import Path + +import cv2 +import matplotlib +import matplotlib.pyplot as plt +import numpy as np +import torch +import yaml +from PIL import Image +from scipy.signal import butter, filtfilt + +from utils.general import xywh2xyxy, xyxy2xywh +from utils.metrics import fitness + +# Settings +matplotlib.use('Agg') # for writing to files only + + +def color_list(): + # Return first 10 plt colors as (r,g,b) https://stackoverflow.com/questions/51350872/python-from-color-name-to-rgb + def hex2rgb(h): + return tuple(int(h[1 + i:1 + i + 2], 16) for i in (0, 2, 4)) + + return [hex2rgb(h) for h in plt.rcParams['axes.prop_cycle'].by_key()['color']] + + +def hist2d(x, y, n=100): + # 2d histogram used in labels.png and evolve.png + xedges, yedges = np.linspace(x.min(), x.max(), n), np.linspace(y.min(), y.max(), n) + hist, xedges, yedges = np.histogram2d(x, y, (xedges, yedges)) + xidx = np.clip(np.digitize(x, xedges) - 1, 0, hist.shape[0] - 1) + yidx = np.clip(np.digitize(y, yedges) - 1, 0, hist.shape[1] - 1) + return np.log(hist[xidx, yidx]) + + +def butter_lowpass_filtfilt(data, cutoff=1500, fs=50000, order=5): + # https://stackoverflow.com/questions/28536191/how-to-filter-smooth-with-scipy-numpy + def butter_lowpass(cutoff, fs, order): + nyq = 0.5 * fs + normal_cutoff = cutoff / nyq + return butter(order, normal_cutoff, btype='low', analog=False) + + b, a = butter_lowpass(cutoff, fs, order=order) + return filtfilt(b, a, data) # forward-backward filter + + +def plot_one_box(x, img, color=None, label=None, line_thickness=None): + # Plots one bounding box on image img + tl = line_thickness or round(0.002 * (img.shape[0] + img.shape[1]) / 2) + 1 # line/font thickness + color = color or [random.randint(0, 255) for _ in range(3)] + c1, c2 = (int(x[0]), int(x[1])), (int(x[2]), int(x[3])) + cv2.rectangle(img, c1, c2, color, thickness=tl, lineType=cv2.LINE_AA) + if label: + tf = max(tl - 1, 1) # font thickness + t_size = cv2.getTextSize(label, 0, fontScale=tl / 3, thickness=tf)[0] + c2 = c1[0] + t_size[0], c1[1] - t_size[1] - 3 + cv2.rectangle(img, c1, c2, color, -1, cv2.LINE_AA) # filled + cv2.putText(img, label, (c1[0], c1[1] - 2), 0, tl / 3, [225, 255, 255], thickness=tf, lineType=cv2.LINE_AA) + + +def plot_wh_methods(): # from utils.general import *; plot_wh_methods() + # Compares the two methods for width-height anchor multiplication + # https://github.com/ultralytics/yolov3/issues/168 + x = np.arange(-4.0, 4.0, .1) + ya = np.exp(x) + yb = torch.sigmoid(torch.from_numpy(x)).numpy() * 2 + + fig = plt.figure(figsize=(6, 3), dpi=150) + plt.plot(x, ya, '.-', label='YOLO') + plt.plot(x, yb ** 2, '.-', label='YOLO ^2') + plt.plot(x, yb ** 1.6, '.-', label='YOLO ^1.6') + plt.xlim(left=-4, right=4) + plt.ylim(bottom=0, top=6) + plt.xlabel('input') + plt.ylabel('output') + plt.grid() + plt.legend() + fig.tight_layout() + fig.savefig('comparison.png', dpi=200) + + +def output_to_target(output, width, height): + # Convert model output to target format [batch_id, class_id, x, y, w, h, conf] + if isinstance(output, torch.Tensor): + output = output.cpu().numpy() + + targets = [] + for i, o in enumerate(output): + if o is not None: + for pred in o: + box = pred[:4] + w = (box[2] - box[0]) / width + h = (box[3] - box[1]) / height + x = box[0] / width + w / 2 + y = box[1] / height + h / 2 + conf = pred[4] + cls = int(pred[5]) + + targets.append([i, cls, x, y, w, h, conf]) + + return np.array(targets) + + +def plot_images(images, targets, paths=None, fname='images.jpg', names=None, max_size=640, max_subplots=16): + # Plot image grid with labels + + if isinstance(images, torch.Tensor): + images = images.cpu().float().numpy() + if isinstance(targets, torch.Tensor): + targets = targets.cpu().numpy() + + # un-normalise + if np.max(images[0]) <= 1: + images *= 255 + + tl = 3 # line thickness + tf = max(tl - 1, 1) # font thickness + bs, _, h, w = images.shape # batch size, _, height, width + bs = min(bs, max_subplots) # limit plot images + ns = np.ceil(bs ** 0.5) # number of subplots (square) + + # Check if we should resize + scale_factor = max_size / max(h, w) + if scale_factor < 1: + h = math.ceil(scale_factor * h) + w = math.ceil(scale_factor * w) + + colors = color_list() # list of colors + mosaic = np.full((int(ns * h), int(ns * w), 3), 255, dtype=np.uint8) # init + for i, img in enumerate(images): + if i == max_subplots: # if last batch has fewer images than we expect + break + + block_x = int(w * (i // ns)) + block_y = int(h * (i % ns)) + + img = img.transpose(1, 2, 0) + if scale_factor < 1: + img = cv2.resize(img, (w, h)) + + mosaic[block_y:block_y + h, block_x:block_x + w, :] = img + if len(targets) > 0: + image_targets = targets[targets[:, 0] == i] + boxes = xywh2xyxy(image_targets[:, 2:6]).T + classes = image_targets[:, 1].astype('int') + labels = image_targets.shape[1] == 6 # labels if no conf column + conf = None if labels else image_targets[:, 6] # check for confidence presence (label vs pred) + + boxes[[0, 2]] *= w + boxes[[0, 2]] += block_x + boxes[[1, 3]] *= h + boxes[[1, 3]] += block_y + for j, box in enumerate(boxes.T): + cls = int(classes[j]) + color = colors[cls % len(colors)] + cls = names[cls] if names else cls + if labels or conf[j] > 0.25: # 0.25 conf thresh + label = '%s' % cls if labels else '%s %.1f' % (cls, conf[j]) + plot_one_box(box, mosaic, label=label, color=color, line_thickness=tl) + + # Draw image filename labels + if paths: + label = Path(paths[i]).name[:40] # trim to 40 char + t_size = cv2.getTextSize(label, 0, fontScale=tl / 3, thickness=tf)[0] + cv2.putText(mosaic, label, (block_x + 5, block_y + t_size[1] + 5), 0, tl / 3, [220, 220, 220], thickness=tf, + lineType=cv2.LINE_AA) + + # Image border + cv2.rectangle(mosaic, (block_x, block_y), (block_x + w, block_y + h), (255, 255, 255), thickness=3) + + if fname: + r = min(1280. / max(h, w) / ns, 1.0) # ratio to limit image size + mosaic = cv2.resize(mosaic, (int(ns * w * r), int(ns * h * r)), interpolation=cv2.INTER_AREA) + # cv2.imwrite(fname, cv2.cvtColor(mosaic, cv2.COLOR_BGR2RGB)) # cv2 save + Image.fromarray(mosaic).save(fname) # PIL save + return mosaic + + +def plot_lr_scheduler(optimizer, scheduler, epochs=300, save_dir=''): + # Plot LR simulating training for full epochs + optimizer, scheduler = copy(optimizer), copy(scheduler) # do not modify originals + y = [] + for _ in range(epochs): + scheduler.step() + y.append(optimizer.param_groups[0]['lr']) + plt.plot(y, '.-', label='LR') + plt.xlabel('epoch') + plt.ylabel('LR') + plt.grid() + plt.xlim(0, epochs) + plt.ylim(0) + plt.tight_layout() + plt.savefig(Path(save_dir) / 'LR.png', dpi=200) + + +def plot_test_txt(): # from utils.general import *; plot_test() + # Plot test.txt histograms + x = np.loadtxt('test.txt', dtype=np.float32) + box = xyxy2xywh(x[:, :4]) + cx, cy = box[:, 0], box[:, 1] + + fig, ax = plt.subplots(1, 1, figsize=(6, 6), tight_layout=True) + ax.hist2d(cx, cy, bins=600, cmax=10, cmin=0) + ax.set_aspect('equal') + plt.savefig('hist2d.png', dpi=300) + + fig, ax = plt.subplots(1, 2, figsize=(12, 6), tight_layout=True) + ax[0].hist(cx, bins=600) + ax[1].hist(cy, bins=600) + plt.savefig('hist1d.png', dpi=200) + + +def plot_targets_txt(): # from utils.general import *; plot_targets_txt() + # Plot targets.txt histograms + x = np.loadtxt('targets.txt', dtype=np.float32).T + s = ['x targets', 'y targets', 'width targets', 'height targets'] + fig, ax = plt.subplots(2, 2, figsize=(8, 8), tight_layout=True) + ax = ax.ravel() + for i in range(4): + ax[i].hist(x[i], bins=100, label='%.3g +/- %.3g' % (x[i].mean(), x[i].std())) + ax[i].legend() + ax[i].set_title(s[i]) + plt.savefig('targets.jpg', dpi=200) + + +def plot_study_txt(f='study.txt', x=None): # from utils.general import *; plot_study_txt() + # Plot study.txt generated by test.py + fig, ax = plt.subplots(2, 4, figsize=(10, 6), tight_layout=True) + ax = ax.ravel() + + fig2, ax2 = plt.subplots(1, 1, figsize=(8, 4), tight_layout=True) + for f in ['study/study_coco_yolo%s.txt' % x for x in ['s', 'm', 'l', 'x']]: + y = np.loadtxt(f, dtype=np.float32, usecols=[0, 1, 2, 3, 7, 8, 9], ndmin=2).T + x = np.arange(y.shape[1]) if x is None else np.array(x) + s = ['P', 'R', 'mAP@.5', 'mAP@.5:.95', 't_inference (ms/img)', 't_NMS (ms/img)', 't_total (ms/img)'] + for i in range(7): + ax[i].plot(x, y[i], '.-', linewidth=2, markersize=8) + ax[i].set_title(s[i]) + + j = y[3].argmax() + 1 + ax2.plot(y[6, :j], y[3, :j] * 1E2, '.-', linewidth=2, markersize=8, + label=Path(f).stem.replace('study_coco_', '').replace('yolo', 'YOLO')) + + ax2.plot(1E3 / np.array([209, 140, 97, 58, 35, 18]), [34.6, 40.5, 43.0, 47.5, 49.7, 51.5], + 'k.-', linewidth=2, markersize=8, alpha=.25, label='EfficientDet') + + ax2.grid() + ax2.set_xlim(0, 30) + ax2.set_ylim(28, 50) + ax2.set_yticks(np.arange(30, 55, 5)) + ax2.set_xlabel('GPU Speed (ms/img)') + ax2.set_ylabel('COCO AP val') + ax2.legend(loc='lower right') + plt.savefig('study_mAP_latency.png', dpi=300) + plt.savefig(f.replace('.txt', '.png'), dpi=300) + + +def plot_labels(labels, save_dir=''): + # plot dataset labels + c, b = labels[:, 0], labels[:, 1:].transpose() # classes, boxes + nc = int(c.max() + 1) # number of classes + + fig, ax = plt.subplots(2, 2, figsize=(8, 8), tight_layout=True) + ax = ax.ravel() + ax[0].hist(c, bins=np.linspace(0, nc, nc + 1) - 0.5, rwidth=0.8) + ax[0].set_xlabel('classes') + ax[1].scatter(b[0], b[1], c=hist2d(b[0], b[1], 90), cmap='jet') + ax[1].set_xlabel('x') + ax[1].set_ylabel('y') + ax[2].scatter(b[2], b[3], c=hist2d(b[2], b[3], 90), cmap='jet') + ax[2].set_xlabel('width') + ax[2].set_ylabel('height') + plt.savefig(Path(save_dir) / 'labels.png', dpi=200) + plt.close() + + # seaborn correlogram + try: + import seaborn as sns + import pandas as pd + x = pd.DataFrame(b.transpose(), columns=['x', 'y', 'width', 'height']) + sns.pairplot(x, corner=True, diag_kind='hist', kind='scatter', markers='o', + plot_kws=dict(s=3, edgecolor=None, linewidth=1, alpha=0.02), + diag_kws=dict(bins=50)) + plt.savefig(Path(save_dir) / 'labels_correlogram.png', dpi=200) + plt.close() + except Exception as e: + pass + + +def plot_evolution(yaml_file='data/hyp.finetune.yaml'): # from utils.general import *; plot_evolution() + # Plot hyperparameter evolution results in evolve.txt + with open(yaml_file) as f: + hyp = yaml.load(f, Loader=yaml.FullLoader) + x = np.loadtxt('evolve.txt', ndmin=2) + f = fitness(x) + # weights = (f - f.min()) ** 2 # for weighted results + plt.figure(figsize=(10, 12), tight_layout=True) + matplotlib.rc('font', **{'size': 8}) + for i, (k, v) in enumerate(hyp.items()): + y = x[:, i + 7] + # mu = (y * weights).sum() / weights.sum() # best weighted result + mu = y[f.argmax()] # best single result + plt.subplot(6, 5, i + 1) + plt.scatter(y, f, c=hist2d(y, f, 20), cmap='viridis', alpha=.8, edgecolors='none') + plt.plot(mu, f.max(), 'k+', markersize=15) + plt.title('%s = %.3g' % (k, mu), fontdict={'size': 9}) # limit to 40 characters + if i % 5 != 0: + plt.yticks([]) + print('%15s: %.3g' % (k, mu)) + plt.savefig('evolve.png', dpi=200) + print('\nPlot saved as evolve.png') + + +def plot_results_overlay(start=0, stop=0): # from utils.general import *; plot_results_overlay() + # Plot training 'results*.txt', overlaying train and val losses + s = ['train', 'train', 'train', 'Precision', 'mAP@0.5', 'val', 'val', 'val', 'Recall', 'mAP@0.5:0.95'] # legends + t = ['Box', 'Objectness', 'Classification', 'P-R', 'mAP-F1'] # titles + for f in sorted(glob.glob('results*.txt') + glob.glob('../../Downloads/results*.txt')): + results = np.loadtxt(f, usecols=[2, 3, 4, 8, 9, 12, 13, 14, 10, 11], ndmin=2).T + n = results.shape[1] # number of rows + x = range(start, min(stop, n) if stop else n) + fig, ax = plt.subplots(1, 5, figsize=(14, 3.5), tight_layout=True) + ax = ax.ravel() + for i in range(5): + for j in [i, i + 5]: + y = results[j, x] + ax[i].plot(x, y, marker='.', label=s[j]) + # y_smooth = butter_lowpass_filtfilt(y) + # ax[i].plot(x, np.gradient(y_smooth), marker='.', label=s[j]) + + ax[i].set_title(t[i]) + ax[i].legend() + ax[i].set_ylabel(f) if i == 0 else None # add filename + fig.savefig(f.replace('.txt', '.png'), dpi=200) + + +def plot_results(start=0, stop=0, bucket='', id=(), labels=(), save_dir=''): + # from utils.general import *; plot_results(save_dir='runs/train/exp0') + # Plot training 'results*.txt' + fig, ax = plt.subplots(2, 5, figsize=(12, 6)) + ax = ax.ravel() + s = ['Box', 'Objectness', 'Classification', 'Precision', 'Recall', + 'val Box', 'val Objectness', 'val Classification', 'mAP@0.5', 'mAP@0.5:0.95'] + if bucket: + # os.system('rm -rf storage.googleapis.com') + # files = ['https://storage.googleapis.com/%s/results%g.txt' % (bucket, x) for x in id] + files = ['%g.txt' % x for x in id] + c = ('gsutil cp ' + '%s ' * len(files) + '.') % tuple('gs://%s/%g.txt' % (bucket, x) for x in id) + os.system(c) + else: + files = glob.glob(str(Path(save_dir) / '*.txt')) + glob.glob('../../Downloads/results*.txt') + assert len(files), 'No results.txt files found in %s, nothing to plot.' % os.path.abspath(save_dir) + for fi, f in enumerate(files): + try: + results = np.loadtxt(f, usecols=[2, 3, 4, 8, 9, 12, 13, 14, 10, 11], ndmin=2).T + n = results.shape[1] # number of rows + x = range(start, min(stop, n) if stop else n) + for i in range(10): + y = results[i, x] + if i in [0, 1, 2, 5, 6, 7]: + y[y == 0] = np.nan # don't show zero loss values + # y /= y[0] # normalize + label = labels[fi] if len(labels) else Path(f).stem + ax[i].plot(x, y, marker='.', label=label, linewidth=1, markersize=6) + ax[i].set_title(s[i]) + # if i in [5, 6, 7]: # share train and val loss y axes + # ax[i].get_shared_y_axes().join(ax[i], ax[i - 5]) + except Exception as e: + print('Warning: Plotting error for %s; %s' % (f, e)) + + fig.tight_layout() + ax[1].legend() + fig.savefig(Path(save_dir) / 'results.png', dpi=200) diff --git a/utils/torch_utils.py b/utils/torch_utils.py new file mode 100644 index 0000000..9bbdc48 --- /dev/null +++ b/utils/torch_utils.py @@ -0,0 +1,240 @@ +# PyTorch utils + +import logging +import math +import os +import time +from contextlib import contextmanager +from copy import deepcopy + +import torch +import torch.backends.cudnn as cudnn +import torch.nn as nn +import torch.nn.functional as F +import torchvision + +logger = logging.getLogger(__name__) + + +@contextmanager +def torch_distributed_zero_first(local_rank: int): + """ + Decorator to make all processes in distributed training wait for each local_master to do something. + """ + if local_rank not in [-1, 0]: + torch.distributed.barrier() + yield + if local_rank == 0: + torch.distributed.barrier() + + +def init_torch_seeds(seed=0): + # Speed-reproducibility tradeoff https://pytorch.org/docs/stable/notes/randomness.html + torch.manual_seed(seed) + if seed == 0: # slower, more reproducible + cudnn.deterministic = True + cudnn.benchmark = False + else: # faster, less reproducible + cudnn.deterministic = False + cudnn.benchmark = True + + +def select_device(device='', batch_size=None): + # device = 'cpu' or '0' or '0,1,2,3' + cpu_request = device.lower() == 'cpu' + if device and not cpu_request: # if device requested other than 'cpu' + os.environ['CUDA_VISIBLE_DEVICES'] = device # set environment variable + assert torch.cuda.is_available(), 'CUDA unavailable, invalid device %s requested' % device # check availablity + + cuda = False if cpu_request else torch.cuda.is_available() + if cuda: + c = 1024 ** 2 # bytes to MB + ng = torch.cuda.device_count() + if ng > 1 and batch_size: # check that batch_size is compatible with device_count + assert batch_size % ng == 0, 'batch-size %g not multiple of GPU count %g' % (batch_size, ng) + x = [torch.cuda.get_device_properties(i) for i in range(ng)] + s = f'Using torch {torch.__version__} ' + for i in range(0, ng): + if i == 1: + s = ' ' * len(s) + logger.info("%sCUDA:%g (%s, %dMB)" % (s, i, x[i].name, x[i].total_memory / c)) + else: + logger.info(f'Using torch {torch.__version__} CPU') + + logger.info('') # skip a line + return torch.device('cuda:0' if cuda else 'cpu') + + +def time_synchronized(): + torch.cuda.synchronize() if torch.cuda.is_available() else None + return time.time() + + +def is_parallel(model): + return type(model) in (nn.parallel.DataParallel, nn.parallel.DistributedDataParallel) + + +def intersect_dicts(da, db, exclude=()): + # Dictionary intersection of matching keys and shapes, omitting 'exclude' keys, using da values + return {k: v for k, v in da.items() if k in db and not any(x in k for x in exclude) and v.shape == db[k].shape} + + +def initialize_weights(model): + for m in model.modules(): + t = type(m) + if t is nn.Conv2d: + pass # nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif t is nn.BatchNorm2d: + m.eps = 1e-3 + m.momentum = 0.03 + elif t in [nn.Hardswish, nn.LeakyReLU, nn.ReLU, nn.ReLU6]: + m.inplace = True + + +def find_modules(model, mclass=nn.Conv2d): + # Finds layer indices matching module class 'mclass' + return [i for i, m in enumerate(model.module_list) if isinstance(m, mclass)] + + +def sparsity(model): + # Return global model sparsity + a, b = 0., 0. + for p in model.parameters(): + a += p.numel() + b += (p == 0).sum() + return b / a + + +def prune(model, amount=0.3): + # Prune model to requested global sparsity + import torch.nn.utils.prune as prune + print('Pruning model... ', end='') + for name, m in model.named_modules(): + if isinstance(m, nn.Conv2d): + prune.l1_unstructured(m, name='weight', amount=amount) # prune + prune.remove(m, 'weight') # make permanent + print(' %.3g global sparsity' % sparsity(model)) + + +def fuse_conv_and_bn(conv, bn): + # Fuse convolution and batchnorm layers https://tehnokv.com/posts/fusing-batchnorm-and-conv/ + fusedconv = nn.Conv2d(conv.in_channels, + conv.out_channels, + kernel_size=conv.kernel_size, + stride=conv.stride, + padding=conv.padding, + groups=conv.groups, + bias=True).requires_grad_(False).to(conv.weight.device) + + # prepare filters + w_conv = conv.weight.clone().view(conv.out_channels, -1) + w_bn = torch.diag(bn.weight.div(torch.sqrt(bn.eps + bn.running_var))) + fusedconv.weight.copy_(torch.mm(w_bn, w_conv).view(fusedconv.weight.size())) + + # prepare spatial bias + b_conv = torch.zeros(conv.weight.size(0), device=conv.weight.device) if conv.bias is None else conv.bias + b_bn = bn.bias - bn.weight.mul(bn.running_mean).div(torch.sqrt(bn.running_var + bn.eps)) + fusedconv.bias.copy_(torch.mm(w_bn, b_conv.reshape(-1, 1)).reshape(-1) + b_bn) + + return fusedconv + + +def model_info(model, verbose=False, img_size=640): + # Model information. img_size may be int or list, i.e. img_size=640 or img_size=[640, 320] + n_p = sum(x.numel() for x in model.parameters()) # number parameters + n_g = sum(x.numel() for x in model.parameters() if x.requires_grad) # number gradients + if verbose: + print('%5s %40s %9s %12s %20s %10s %10s' % ('layer', 'name', 'gradient', 'parameters', 'shape', 'mu', 'sigma')) + for i, (name, p) in enumerate(model.named_parameters()): + name = name.replace('module_list.', '') + print('%5g %40s %9s %12g %20s %10.3g %10.3g' % + (i, name, p.requires_grad, p.numel(), list(p.shape), p.mean(), p.std())) + + try: # FLOPS + from thop import profile + flops = profile(deepcopy(model), inputs=(torch.zeros(1, 3, img_size, img_size),), verbose=False)[0] / 1E9 * 2 + img_size = img_size if isinstance(img_size, list) else [img_size, img_size] # expand if int/float + fs = ', %.9f GFLOPS' % (flops) # 640x640 FLOPS + except (ImportError, Exception): + fs = '' + + logger.info(f"Model Summary: {len(list(model.modules()))} layers, {n_p} parameters, {n_g} gradients{fs}") + + +def load_classifier(name='resnet101', n=2): + # Loads a pretrained model reshaped to n-class output + model = torchvision.models.__dict__[name](pretrained=True) + + # ResNet model properties + # input_size = [3, 224, 224] + # input_space = 'RGB' + # input_range = [0, 1] + # mean = [0.485, 0.456, 0.406] + # std = [0.229, 0.224, 0.225] + + # Reshape output to n classes + filters = model.fc.weight.shape[1] + model.fc.bias = nn.Parameter(torch.zeros(n), requires_grad=True) + model.fc.weight = nn.Parameter(torch.zeros(n, filters), requires_grad=True) + model.fc.out_features = n + return model + + +def scale_img(img, ratio=1.0, same_shape=False): # img(16,3,256,416), r=ratio + # scales img(bs,3,y,x) by ratio + if ratio == 1.0: + return img + else: + h, w = img.shape[2:] + s = (int(h * ratio), int(w * ratio)) # new size + img = F.interpolate(img, size=s, mode='bilinear', align_corners=False) # resize + if not same_shape: # pad/crop img + gs = 32 # (pixels) grid size + h, w = [math.ceil(x * ratio / gs) * gs for x in (h, w)] + return F.pad(img, [0, w - s[1], 0, h - s[0]], value=0.447) # value = imagenet mean + + +def copy_attr(a, b, include=(), exclude=()): + # Copy attributes from b to a, options to only include [...] and to exclude [...] + for k, v in b.__dict__.items(): + if (len(include) and k not in include) or k.startswith('_') or k in exclude: + continue + else: + setattr(a, k, v) + + +class ModelEMA: + """ Model Exponential Moving Average from https://github.com/rwightman/pytorch-image-models + Keep a moving average of everything in the model state_dict (parameters and buffers). + This is intended to allow functionality like + https://www.tensorflow.org/api_docs/python/tf/train/ExponentialMovingAverage + A smoothed version of the weights is necessary for some training schemes to perform well. + This class is sensitive where it is initialized in the sequence of model init, + GPU assignment and distributed training wrappers. + """ + + def __init__(self, model, decay=0.9999, updates=0): + # Create EMA + self.ema = deepcopy(model.module if is_parallel(model) else model).eval() # FP32 EMA + # if next(model.parameters()).device.type != 'cpu': + # self.ema.half() # FP16 EMA + self.updates = updates # number of EMA updates + self.decay = lambda x: decay * (1 - math.exp(-x / 2000)) # decay exponential ramp (to help early epochs) + for p in self.ema.parameters(): + p.requires_grad_(False) + + def update(self, model): + # Update EMA parameters + with torch.no_grad(): + self.updates += 1 + d = self.decay(self.updates) + + msd = model.module.state_dict() if is_parallel(model) else model.state_dict() # model state_dict + for k, v in self.ema.state_dict().items(): + if v.dtype.is_floating_point: + v *= d + v += (1. - d) * msd[k].detach() + + def update_attr(self, model, include=(), exclude=('process_group', 'reducer')): + # Update EMA attributes + copy_attr(self.ema, model, include, exclude)