From 81bbf66aab8af5b76771c081b411e0b674849c55 Mon Sep 17 00:00:00 2001 From: Mathias Claassen Date: Fri, 3 Mar 2023 10:07:54 -0300 Subject: [PATCH] Initial codebase (#1) * Add project code * Logger improvements * Improvements to web demo code * added create_wlasl_landmarks_dataset.py and xtract_mediapipe_landmarks.py * Fix rotation augmentation * fixed error in docstring, and removed unnecessary replace -1 -> 0 * Readme updates * Share base notebooks * Add notebooks and unify for different datasets * requirements update * fixes * Make evaluate more deterministic * Allow training with clearml * refactor preprocessing and apply linter * Minor fixes * Minor notebook tweaks * Readme updates * Fix PR comments * Remove unneeded code * Add banner to Readme --------- Co-authored-by: Gabriel Lema --- .flake8 | 6 + Dockerfile | 13 + README.md | 137 +++++ __init__.py | 0 assets/accuracy.png | Bin 0 -> 10730 bytes assets/banner.png | Bin 0 -> 41338 bytes assets/scatter_plot.png | Bin 0 -> 125899 bytes augmentations/__init__.py | 1 + augmentations/augment.py | 228 ++++++++ conversion_requirements.txt | 21 + convert.py | 123 +++++ datasets/__init__.py | 3 + datasets/clearml_dataset_loader.py | 8 + datasets/czech_slr_dataset.py | 72 +++ datasets/dataset_loader.py | 17 + datasets/datasets_utils.py | 133 +++++ datasets/embedding_dataset.py | 103 ++++ models/__init__.py | 4 + models/spoter_embedding_model.py | 41 ++ models/spoter_model.py | 66 +++ models/utils.py | 280 ++++++++++ normalization/blazepose_mapping.py | 92 ++++ normalization/body_normalization.py | 241 +++++++++ normalization/hand_normalization.py | 195 +++++++ normalization/main.py | 47 ++ notebooks/embeddings_evaluation.ipynb | 411 +++++++++++++++ notebooks/visualize_embeddings.ipynb | 491 ++++++++++++++++++ preprocessing.py | 21 + preprocessing/__init__.py | 0 .../create_wlasl_landmarks_dataset.py | 155 ++++++ preprocessing/extract_mediapipe_landmarks.py | 154 ++++++ requirements.txt | 14 + tests/__init__.py | 0 tests/test_batch_sorter.py | 104 ++++ tracking/__init__.py | 0 tracking/clearml_tracker.py | 21 + tracking/tracker.py | 28 + train.py | 287 ++++++++++ train.sh | 24 + training/__init__.py | 0 training/batch_sorter.py | 215 ++++++++ training/batching_scheduler.py | 62 +++ training/gaussian_noise.py | 18 + training/online_batch_mining.py | 105 ++++ training/train_arguments.py | 84 +++ training/train_utils.py | 71 +++ utils.py | 40 ++ web/README.md | 8 + web/index.html | 61 +++ 49 files changed, 4205 insertions(+) create mode 100644 .flake8 create mode 100644 Dockerfile create mode 100644 README.md create mode 100644 __init__.py create mode 100644 assets/accuracy.png create mode 100644 assets/banner.png create mode 100644 assets/scatter_plot.png create mode 100644 augmentations/__init__.py create mode 100644 augmentations/augment.py create mode 100644 conversion_requirements.txt create mode 100644 convert.py create mode 100644 datasets/__init__.py create mode 100644 datasets/clearml_dataset_loader.py create mode 100644 datasets/czech_slr_dataset.py create mode 100644 datasets/dataset_loader.py create mode 100644 datasets/datasets_utils.py create mode 100644 datasets/embedding_dataset.py create mode 100644 models/__init__.py create mode 100644 models/spoter_embedding_model.py create mode 100644 models/spoter_model.py create mode 100644 models/utils.py create mode 100644 normalization/blazepose_mapping.py create mode 100644 normalization/body_normalization.py create mode 100644 normalization/hand_normalization.py create mode 100644 normalization/main.py create mode 100644 notebooks/embeddings_evaluation.ipynb create mode 100644 notebooks/visualize_embeddings.ipynb create mode 100644 preprocessing.py create mode 100644 preprocessing/__init__.py create mode 100644 preprocessing/create_wlasl_landmarks_dataset.py create mode 100644 preprocessing/extract_mediapipe_landmarks.py create mode 100644 requirements.txt create mode 100644 tests/__init__.py create mode 100644 tests/test_batch_sorter.py create mode 100644 tracking/__init__.py create mode 100644 tracking/clearml_tracker.py create mode 100644 tracking/tracker.py create mode 100644 train.py create mode 100755 train.sh create mode 100644 training/__init__.py create mode 100644 training/batch_sorter.py create mode 100644 training/batching_scheduler.py create mode 100644 training/gaussian_noise.py create mode 100644 training/online_batch_mining.py create mode 100644 training/train_arguments.py create mode 100644 training/train_utils.py create mode 100644 utils.py create mode 100644 web/README.md create mode 100644 web/index.html diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..c32d95e --- /dev/null +++ b/.flake8 @@ -0,0 +1,6 @@ +[flake8] +max-line-length = 130 +per-file-ignores = + __init__.py: F401 +exclude = + .git,__pycache__, diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..93e7add --- /dev/null +++ b/Dockerfile @@ -0,0 +1,13 @@ +FROM pytorch/pytorch + +WORKDIR /app +COPY ./requirements.txt /app/ + +RUN pip install -r requirements.txt +RUN apt-get -y update +RUN apt-get -y install git +RUN apt-get install ffmpeg libsm6 libxext6 -y + +COPY . /app/ +RUN git config --global --add safe.directory /app +CMD ./train.sh diff --git a/README.md b/README.md new file mode 100644 index 0000000..f3e99e6 --- /dev/null +++ b/README.md @@ -0,0 +1,137 @@ + + + +# SPOTER Embeddings + +This repository contains code for the Spoter embedding model. + +The model is heavily based on [Spoter] which was presented in +[Sign Pose-Based Transformer for Word-Level Sign Language Recognition](https://openaccess.thecvf.com/content/WACV2022W/HADCV/html/Bohacek_Sign_Pose-Based_Transformer_for_Word-Level_Sign_Language_Recognition_WACVW_2022_paper.html) with one of the main modifications being +that this is an embedding model instead of a classification model. +This allows for several zero-shot tasks on unseen Sign Language datasets from around the world. + + +## Modifications on [SPOTER](https://github.com/matyasbohacek/spoter) +Here is a list of the main modifications made on Spoter code and model architecture: + +* The output layer is a linear layer but trained using triplet loss instead of CrossEntropyLoss. The output of the model +is therefore an embedding vector that can be used for several downstream tasks. +* We started using the keypoints dataset published by Spoter but later created new datasets using BlazePose from Mediapipe (as it is done in [Spoter 2](https://arxiv.org/abs/2210.00893)). This improves results considerably. +* We select batches in a way that they contain several hard triplets and then compute the loss on all hard triplets found in each batch. +* Some code refactoring to acomodate new classes we implemented. +* Minor code fix when using rotate augmentation to avoid exceptions. + + + + +## Results + +![Scatter plot of dataset embeddings](/assets/scatter_plot.png) + +We used the silhouette score to measure how well the clusters are defined during the training step. +Silhouette score will be high (close to 1) when all clusters of different classes are well separated from each other, and it will be low (close to -1) for the opposite. +Our best model reached 0.7 on the train set and 0.1 on validation. + +### Classification accuracy +While the model was not trained with classification specifically in mind, it can still be used for that purpose. +Here we show top-1 and top-5 classifications which are calculated by taking the 1 (or 5) nearest vector of different classes, to the target vector. + +To estimate the accuracy for LSA, we take a “train” set as given and then classify the holdout set based on the closest vectors from the “train” set. +This is done using the model trained on WLASL100 dataset only, to show how our model has zero-shot capabilities. + +![Accuracy table](/assets/accuracy.png) + + + + +## Get Started + +The recommended way of running code from this repo is by using **Docker**. + +Clone this repository and run: +``` +docker build -t spoter_embeddings . +docker run --rm -it --entrypoint=bash --gpus=all -v $PWD:/app spoter_embeddings +``` + +> Running without specifying the `entrypoint` will train the model with the hyperparameters specified in `train.sh` + +If you prefer running in a **virtual environment** instead, then first install dependencies: + +```shell +pip install -r requirements.txt +``` + +> We tested this using Python 3.7.13. Other versions may work. + +To train the model, run `train.sh` in Docker or your virtual env. + +The hyperparameters with their descriptions can be found in the [train.py](link...) file. + + +## Data + +Same as with SPOTER, this model works on top of sequences of signers' skeletal data extracted from videos. +This means that the input data has a much lower dimension compared to using videos directly, and therefore the model is +quicker and lighter, while you can choose any SOTA body pose model to preprocess video. +This makes our model lightweight and able to run in real-time (for example, it takes around 40ms to process a 4-second +25 FPS video inside a web browser using onnxruntime) + +![Sign Language Dataset Overview](http://spoter.signlanguagerecognition.com/img/datasets_overview.gif) + +For ready to use datasets refer to the [Spoter] repository. + +For best results, we recommend building your own dataset by downloading a Sign language video dataset such as [WLASL] and then using the `extract_mediapipe_landmarks.py` and `create_wlasl_landmarks_dataset.py` scripts to create a body keypoints datasets that can be used to train the Spoter embeddings model. + +You can run these scripts as follows: +```bash +# This will extract landmarks from the downloaded videos +python3 preprocessing.py extract -videos --output-landmarks + +# This will create a dataset (csv file) with the first 100 classes, splitting 20% of it to the test set, and 80% for train +python3 preprocessing.py create -videos -lmks --dataset-folder= --create-new-split -ts=0.2 +``` + + +## Example notebooks +There are two Jupyter notebooks included in the `notebooks` folder. +* embeddings_evaluation.ipynb: This notebook shows how to evaluate a model +* visualize_embeddings.ipynb: Model embeddings visualization, optionally with embedded input video + + +## Tracking experiments with ClearML +The code supports tracking experiments, datasets, and models in a ClearML server. +If you want to do this make sure to pass the following arguments to train.py: + +``` + --dataset_loader=clearml + --tracker=clearml +``` + +Also make sure to correctly configure your clearml.conf file. +If using Docker, you can map it into Docker adding these volumes when running `docker run`: + +``` +-v $HOME/clearml.conf:/root/clearml.conf -v $HOME/.clearml:/root/.clearml +``` + +## Model conversion + +Follow these steps to convert your model to ONNX, TF or TFlite: +* Install the additional dependencies listed in `conversion_requirements.txt`. This is best done inside the Docker container. +* Run `python convert.py -c `. Add `-tf` if you want to export TensorFlow and TFlite models too. +* The output models should be generated in a folder named `converted_models`. + +> You can test your model's performance in a web browser. Check out the README in the [web](/web/) folder. + + +## License + +The **code** is published under the [Apache License 2.0](./LICENSE) which allows for both academic and commercial use if +relevant License and copyright notice is included, our work is cited and all changes are stated. + +The license for the [WLASL](https://arxiv.org/pdf/1910.11006.pdf) and [LSA64](https://core.ac.uk/download/pdf/76495887.pdf) datasets used for experiments is, however, the [Attribution-NonCommercial 4.0 International (CC BY-NC 4.0)](https://creativecommons.org/licenses/by-nc/4.0/) license which allows only for non-commercial usage. + + +[Spoter]: (https://github.com/matyasbohacek/spoter) +[WLASL]: (https://dxli94.github.io/WLASL/) \ No newline at end of file diff --git a/__init__.py b/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/assets/accuracy.png b/assets/accuracy.png new file mode 100644 index 0000000000000000000000000000000000000000..18095a9af04df961335818c76c015e414525612f GIT binary patch literal 10730 zcmc(_XH-*N@Gl%xEHuRif`G!KAV`&>p$S4z0i|~ms`T&(BuGm@1eK~%1tADX3C#cr z1_(g`QIJ3&lmHg{SV@VyC?=;ebW4@{i zSuxjp?XEbhRr7=Xslo8P=TcBoIDhKHWl=32`SZbjD?iSO{n9g-%e?yPHCen><`yVMh-F$@jyaeDl0PrvYzyknWQ#__{#AyA$!@#^&0=!uv zq#&~#8}>4&*NQw4GW#fNxW`CW#q`ta>4Qas3@y?!{qXg2H?d*IznZmws2}?~uY9Wk z&akF#Up~CO$3!6~oVR0E#R117FCTSn*BIa97I<+Ba9p*aKaJS^b@yB($9?Wu`az)D zUrs>Fwg39f%xD<^VrAW7o6J2n(Pz-#&D^}-uz52qfu58+C@^>jAlP#u>@b*qc-Wps z^?f#@R>N4*CLM~^&G>}wsntY;+=si)BOsFPLSMV)b#9jh{M|u)Q}Rv{%_imE+o)19 zx-8)YxT(8nmhNAHX9gRyV(kwa!?yiBqI*Nj)1I7DzQeMoW&|T|5Ng~i96~4Ts|kaye2ws(u9mi|o6B~2vm@bb5CuH)q+VJ`3_&J=;*=P<)@b z856f(Wqv=s#R5qD&== zYFA~C-3k^=o`noY4F#18?dH6AYR>#b#&Qgkt+?d|C-nQP8xja*U=l=tAT06tzy1| z&T*8t-w;h*ICw?+beQy6p5Lcb!T*6nkA7q3vKYgh#K%Dk!7qb-fLMGi?Tud&r*C^T z`kJ@%L_8L0E*j;-byHF3zX!a(jFzfQJ44vb?sDOW@@m^!j$ z)BJbIhi969#sheoZA#ZyDwK`Bl&5_F4R~dx->O2c7F(ot#)pGfG=I5U2A}wb{dE0$ zuL(e~Y|V1x&~6XLco+rG+})x$#K@pbMYQ>gcv1*gN9b_bGOp#?7BcGDZF?`gjabW( zIZ&{w|FEsD3iI^nt~_}}&#{45WnWfipTsZ@ulpr)`qovW?a$2*9qv*gK`8B1`cn9{ zM@ym$9YczoXPlP6FD9IWIiX`+Ra^eFgIf8z51`W>Irsbhz~KuTwN4@Qrf$5~#fnL? z=Z|ac$8grRj=>xmV)vzUVnEBa-|ug~YJ>&)wQNWU$ao$7tb*z)GaWKhkch=NBJ{DI znpBeU=~9hHYTiXJmYO2&r!Xw6RPu{m(*objvnVu;-1-kE&EEk&+zs^w0`UdF@0qQ| zk3UfLM1&q$kblCZ%it+Y8iGKArX2<8-Oi*dQnGYTfSU~@a!{Jol2Tr;->CZ_`p*Bc}357M@E9Y0yY_PB5u82hV!`n()bEzPO?X!XGGO(~QZpO>v%Tbl2GUKD*%SOstr9ZFK6s2M zZ#%FF1#2Ekm6wO7GPHN{_t`s`+3J?px!#*YF)3B}LHi=EfTft=YR6Sh$D*b`4Ei@RrFRq(vC_M}iQA*I#jdYkqR{Okn(W7Q6Q=hYJi3|RP>X`V zbkg)%*9bLx_KB*hF1IA4`;Z+<$FzyYrMQa<#jMU{jF@pF>}&HA&ZNAo*7+uJc0AnDl~^L2O*9Y0;& zqyKLT8n;f$$-jyUTd!W+Ce8YJ;Hy^X(cprP0?~qi5>z{1tdquvlTJV~aFy${vJ1s6 zWOtUvj31xq`{~pwdWYzL@Sbq&r0EwE_g)|0tYMSMu#F*^O4rQp9hxj?c1&h!?^w6U}zot??S+bKCXAc-|!COF?rhZe2*8@d@`l z2G^Uqhr!%M8&!L4?}~7Jdt95E(w)-?OYM|3p2UU9Bo+vKiiF}$eo~h3^JKc9QLp~a z<%{!Es*d^%yIW6=qpMsf)6v82bD65zTFWKi#B%A1+;$;;XtG8?e9 z@pe3L@y?Fv*n625{g{gf&g*uI3DU;X?mp+Hf#-bV6+&wc*ls2(|z_I3AWy5}(m+1Dl<$5-Slpi{8 z)Cchp>egI0k-~SS%pH4TR`NEFtsbeDekcGYwae8nCt8~>KL`%^1@N+c}TLNX$wWdM; zE)F{j7oH=WwN%x%dH!C_CKM4r9GHJZ#~|W z?L`%tmT2P(_9B@dL%wY>Yx~M&8x#T-9mRpk+Dqk!arhT&)Hbl^!tb%cwC36m*t0d@ z94irW&^u$;b>8e}Dd_l?kFiQSZS#LeM`{mBN`2dmj@wA7uusD`Zy4v)sYf}} zDDRSXm-`3f4`O!d_yfr9=?u93Lhv+Jb!pGoXa46t&Df$QDzXKnxTywx4R_*ts)OC+ z#2Gz1e~@_M^R(LVZKwDhOFq8~k%~o9y1~zrHn>-76aI4QzI zmMYR{K<}7>Uz_8f(0QqYlfNL*Q#JfIRSxN&85r69<~_ka_d3UWnj&~Go`GecgF8k-I}ruWt8$=J`72nc~brb13c2mt2$SHUCqoKQ@f^{rW9^5al{? z1eWQp|LZPf?C+is>qyhILbLDG9Fn6%YJ!R01Il`G1MAklwi0dgLaWNpM&B|j`CVfRu^uJj+KY1m14;+i!t9Ud!U zC=+b{E$tGURgcQ1WtcG^KrBD*D%xG(ZoX}@8huN9KceJ5^_OkPDR;*nRcQn%(}kZw z7|-Cs7I~a}wCuT|Wr3G%VgGQc@9@)knyq{VCz`Jzi!Do;tLoZf>j;>+u7}{v5v-|q zh*K|c;@nN-xhmOLN%+(AmD6@!YA=0+pFR_YKe4*ONSKfh+7i$nS$-ICbI;l*k)%>s z{e8UrHu|uX%WgSn*|rpaQHg8K^R40SRa^e+gU za8{fG)%VIPyuN~>hgrtX`bb)?I$(yddAC8^Z3HProhm zPV#5{#NFP*`0A;`A2%8|-^2*~3+E^z;ktg6->~anTpJu7BB{V$am@vNCnF)+%O@1Q zjpv2_u+kg6f{hcAg@~bX!`j^k%)_ORW14RmP37q~^{0E_3X2d{q=E;4O zaX+7hcG|DBH!7TLW#(vR-&-dIpmtW)8~mHEc_UTbD4!4=XY*c+m+6ds<@W@~cOo@V zsg1jdawP;ihZp$$Ktn-$-yicx2wEOG44ak!iGps!xu~EuR}T?it}q85D+B&;>d&u} z21uem{5y!D%PMA^6LIs<_-mvLsHx>e92dCr!QC3~t??gfP<7cYQxij+7uTJw$v~B4 z$ahw)8}b%dAR5kvg(+(PUL=!_Y?gi44OmqFwYu4(@m8*Y$=D08fwPCCW}1?J9-$q8 zCGO`xxA<;tnQtjf;7(Mi!VYP~d_Tfy{=k>~jgcBh9rLrsvw1P?{WiNI`g7@*ED!AE zY&rW68DkFUb6HuZ9o_LHnJVSWqlF~03w3XE7&|6gD*ZW!r&b5MQ9y|A=c+^YoQ@CbWIJ1ui{M)zj zmHWa-i9>dd#b{LMZ znVOIuvzK~+5rQ((LO|mB zc71_wi;B`i{n^FjwNh;Va)f}`{E}Uwys%uiqvXz7=L5{G*%s*TV7JQ4DX{_v_5*ht z^GUAWrMrapbzzB{ogPs|W1ZNtDvtt}_Dh2y!SFCU5zN-978z&=>_?!;DaM)y@H1H{}?@M>}Es4cb(MmL#9%;JP@9ObI^q*xD`OZ#4`+}y|9@H%H-{&$( z`T~(n&zIL?ji%vgFLrv^C1e9j7sp5QHf3LGkKs}tN9f)qkPGIjsV*+? zEwWRHCoq;>1v4wl;m4VLB$rUX(30U0*WDeowcX90)TyC#F^ap9m>$^+_3H@?#}+2| z+XE8GU-z}7SK=HyKCkEHbqo{hm4vv)Ee#Ho>OYoV&DF9`eSma|oSuk+6f`i*w9bEaEq!iQe{7(^*Aea`aHY_9h_HF!s{G|T zroA;)#PL8%Hm1`%cTirWp^NpXy^E;A7d1FDRcDKFFEPGC(P8b)*v_FtYR^?!P!@Bfqq679g1VE0kahz^7d*5BpVR0@T( z#C-?V-Bi2(o(B-S*_jqb?S`^OBMxJ{u(HX|%p0^ucJ5*giQo2h##AGwM7Ps4lii0j zPHI-o=*$IgjBTj=nvI5kIOU{$ch_#Kugs7dD6f%&jF9j#yfoflKbHUXE)a#~f{W+Bz<9u}P8X^qC zl2?O9Rdr#G%!zgZI!wfbnsREUdN%hEG`(-uu(d$tj$nKkF3d=dxU)a}-&Ph$Vb+qu zJlN@NSeBtMOXT~WgLXqLGlG`H93wP^DKTpF-$XeBk=YCxF+3aZki0QAyjA+9<_oCV z;{K7??~aafl^Paaqn3{l?ggl_udLPHFzWVGWCKojm-4R=C;WudeRXuU0Lc+fFT*5N zgk`^lS><0_lL**W2j+X7j-#KAOlvI0<;1alKo6tsN(gga=G#!(-*q%}8-XhdLm$8ZD63U(vzu$p+uO-%hDeZZM-)-|1p}uZ$ z-_8?Gs=-TGur()MrG zdRnrzq@NFd@A%iJ4MhKSJ4(nSet^PXEpPpvmCyT4%Z+yLu1~ADz;iFX=uh+Gb_fD* zsz@1UB)M!DO4ykhNvg`1Vt}Rd{b+g`YkX>PdA(O(pa2q0RpPMmXA^ey%9>d#f9HS3 zRG}T%&>Iodv>t`D(=SYFKZYt>WsID-U67{&G+Q=e)sIo$+w8kCzl#=Z=aWiCp>rM! z%?Fzht|8@|Tb7GS3K~cY#jVEl=tQVf@5+muL+8%GY!&;icH01Y`;%24rw1JpFQC@` z9zlQ3k}gUuV7=`!cyC6JC~;(}IBDCnLn!oi2UG6_y(Yv?DajQ+WBqs^NOToCU*c+> z4D~aL$2t)Bzt(bOCQ`g=M?5%El^tJDPe0n#?Sa4bvM@xwz3kyrZ*HFO_f+wb-9W38 zw!6c#$9)nGw5}bY>YNgICYlu5h|u=Mi}nx83$uC^T{<)R--OmKl%k43h7t51CBX&KT~^i=L^+fl+zG%e3$i;)B(Eb z0;(}+6JzkfpaV@qLYQ*|dHY9qi^7t6)9uX)V_Jn2BzC-#EzZ7@1 z#-+U=aj7-OnkX;Dre_CK8V0%uthe2MygxsM^T917nX7bx4XW$~Ou59dpY?<-Q-}4V zG7;NrWdC|%A4YaZ=vgXXjBE|JSHQ9{rf(2?NWrUdsL244w{$)MuI?-8jbbcB``neCO{uD9}xc zvp~ckj=bv9Nc2^y1;x;#DXY<|LF{5hnk%6f!V_DQ4v9@oxi^%3#jOlgCaNS%uanaZ zv+y!~jfm;hy^nnM z+&85grr(daNDMNcMGLxhS-rV&Omyq6yA`UqNNdsTK3Ob@lui&R~hG{E<5(y@W0`yj8SNK{q~Qle-zS@q8^A$K@++z?ixXj+!BVZnQ- zQ0KzW8DItuXUgRIkYSbReIet)Fbn%}rI*3OmlIz}iJ2`)Wg3rsgDLL@3HHWmR>3|l zv5fYzT;TVn)|^^FWD0%mu22M5R|(}xP?yz5BFHauGBvI5%U3VATAVN7nZ>o-PxG9v z3)jj&r<$C*+;?s9xpWWO;mEyx#}%s4WPeoT{5GifC5L1K-5x!~zN0~B@66dfMS9rM zD2Z?xbV_BGxP|@aq%QcqpKcm;lf%EFE`DsO+Flcyk&t)$E|zTi$-1+UDR-9?9@g^n zj3Lp*{7=1rr#VT1{tH9|8Xf~h2rD?AuBeC@4)(WR6u_n%pzthy3D-&yGGglkne__EHpZ(F>FLgIOO?az%TW?0d_{vTNjU(FbJ!?ddD){ zbt70IwEc^}MFAvYUS=y98u|DsJaXI4!sFBE^iT5zPrj?w-ZpXn{`(6s&xIGB`e4a z>-el!rYcmR`kPtqC%Y(BWJm6X&-|>VeO_Ov9uC`<^~+Dagu8e%wt_0%F;m~L;-5e0 ziu73Qt@lx)eaIS`91TP+inY&5+F_L{$-qvYJRQCW@WRv0x={JY1`oW`;5Z||Kv&;+ ze4lAd2gAg6QH70Y%IJ|5(6unZ>!^KI#JKm>M)wmaXRH&AnW@Zun;Tm&@5iPhuuWMz zFw8GG@#>qz>QagA!I!0XYVNn5Yug&gI8$l2|Cu-hhP=Uk5;H}*xFrbC;| zuNGhUXSl?hC(-3?M=@M)Pxq~rUv1?OCw_=E0{wU981BZvJzVXLV+tc%l__GvkulJn z#aVWovApvLQst9l-7}t{W#4^*{fqEP!cUmvK0WF*|j&(ish^?nU9B0%&F|o*Q;p;peYGA5<7#w#R!&>8G2OYZnK6 zra2vW9n4cJA}bnx^~-#+tP?LS@H#Vc>j5D4RE}JvMcqZce%YqY387E2dF8XY4&i^!19mJTO}Gam$z1n%gF*ANY%NEvJ3da{E#bVwwCWq zg2A%w8AvdTwqiWpf5Xj>N*wx`AgFpSZ_tGA+wc^c68a!bO*0q|E&-Pzc`H8l=^p&0 zC%UV^Th9vnz181ApT<4xr9l`wrB1aKkDVG!3p0!5q#gSz9-@aVn?xbtp0gUB4!9Lk z+q(hTn1#>Cv-p6B8eS!Iv7Op^JGy)Fw)rW+3|}Rt-iBpx8?ES05S6?>SUTG$zKh1S zJvBH7rVy5azj}V<(RzzC^rwc)-;jezs4f7DZ__{#~%2_GuT44$1Nh=L%3LcIY%L~xviD`C+i&HO-(K>U9Y!G zqA8ZbSc)VSI$D&lpZ&3;bk8i?Tg@`K^?uEigj0c=b!Qf8fG_>;&8Fw^LL3RP#>{vm zijXYVn(YVu%7?2V|a837~tBYzUS1%jT-)Sey1V4D(Y{qsl5fCtl-@yppspJ!#E zFzjYG7J?WV2;La!{jWBq*{ZfeY2v6F#Fz8`Tcq+oUi$I>7e1uT@y`e`@827CkB*9G O0FU&a=vC@Ez5HL2n%B?( literal 0 HcmV?d00001 diff --git a/assets/banner.png b/assets/banner.png new file mode 100644 index 0000000000000000000000000000000000000000..641f630ed3a98397a5093cf965ae9454132539ee GIT binary patch literal 41338 zcmeFYc{r5s|296PyiqAaSz9bcNQJUZNVY7=jGaREecwl+l2Ak<24$PUWE<;@rI?VN z8T(kXjIlG!n3-q1Ki}W?_#Mad-}Copj>F-YoBO)2`*mH{>pEZOd0z3xhT3P?`Pl&g zz!_bghb903Ycv4BEdI|a#+6F^=M=^-HXj`ee*l1k`}l_mkool@<06y4iM9ryyjO6A z@rA`%-9Q}xs7U5GdVUfB$m7&~sBRj}O~2=HoA|;H(ouT%LEP?#&75%AM~Y3ORLN)=N;1Pv=z#9{#9Z zS+S^9@EuzgH)~&o$yfi4N%T3cvA%08v4GEn}JxN`Q z#-Mc+cBgd7!imw#|9<`(f&WI}zY+Lv1pXU=|3=`y5%_P`W?C!yQ0hf7`*Qrk6&3?Wjk?vXD-L}EdqB#W5kS~C~bTX3UtnD zj_Lvzli8@+MdXxwTU!n(v>tg-XH znXbJ+>7i&)+lYr{U({71)cuWnFHB9{T1@@)@uOT6rCnsoB#F0|f>w9n5tXDyE8vfc zGRQxS4hv>mznEt&ulb2Ubc)5KPCZR+9(Q2e6Zw|q79il|wsC>Ap2u+gf-rQs%=+d4 zoNarx208)+?sdKhA}_qtr*gYCmRb@|Qc?{*oI6IAfNGNrQiJ29wv?jm-K`0oPJE2o z`H0Pyz!h21&Jf~bqT1g&#@pjR08T}pZ+V*DY$biT}{FW8%aQUN@tj;<33vlNo?OWC1}D6zb#(Bm6)LuNz( zfk2ATmkU3f!yfIAF62owUZP)H<__S2cIUp*c#ddL@_ofzmr5`YveV1GpN7b;oM@(Q z<%1t+lkW)N3b~gN2MfMATy2?`oAMwT)3jk z_bF~+W1a#DA87C4$h(iA<-Dn#Diz)QJyST5lcBT?f5RBAT%&&@lgHDSxr>EIW{a0e z5T{v_>^}VPR46OOY7%RzH7q-4_%JxBOV&YuO{Z!i=leY=IxHsufBi_KZ}wxfA^-?fEg9g%!ShD<; zYZQk%^16clT|dv_x%7hw7CSxpyuf5HswO#DS>SqEb@uV_vFZR$JjikcH6soyZkwTZmE7mt{YHcr^JMRd*@p3pRf851q1B0F)JhIei(8kNZRJ(;{%JOAOkgr5%1b39MWR1s&vEZ zKOKl}ewO@pD)-jSYDP;BjC^%7G^c6Ap8st_OJm!NLKR2e2M-2mj%-Bi zsuqh?OJ@AE)hk%Y9k&#SF6@{8DG^x6>j6Y^uEHjYnOS=%JSL&j=1+uQ~~P;hOy%ka1Xa_v+&a` z`6XSopGJ%QJ5}S`TRw5nk4IOv!0|<*zm>e=^u15w(cJo8w;9h8K2DF8xz(`h?ynP* z?w$DMa86;)zCF=R>9pjc>C$z1z~}F(zxB%Ru5uN9_x|UV*w}_EuzJMZZGXI*aGbK1 zZ}gtp4Hy~U!LmQ+oSAIqU<_*a@u0S9SUD~Qb*MtZr4I#q^q3_&xdq-xLR6G2rn~`8 zPqYD6uHVF@rk*phF;nrcxbKK^ZhX^DY~-SQQJbv``)kx)bPgl2{ribNd5!5b;G~JN z8R*yQJVhk5$T_w7i5lBlilg@-wrh69&J2EPw?3gJ2u<5rTFX)!qnx2H|BY;Aym+*h z?piFbCYrv!DE&o>fIsi-HO^=>Opxsv;JR^VirKCTV;VQQuU#Xs>M=u=>f6o=*LmeG zq{J-xy$A_w2y2U0^Dn{n!Zt^~;rL!3_s8G27iK9q24mSA=mgEyJ1TfGE~+LmdV9gq zXUHr#1D+=T5k-1Ak2U(lqIN{)&&sq0G19u~JjC@+v~cYxwwH0k1E4-v@Tm1|zk|O#z#kZF~msu7etA8HWgE?C{@B?jB=5GLBA3Lmr+s zw(zv26zut51F3mOZmNcpD`XE&q^xbnS4LL9dW6sgkK6ipDp)EXFX4ncsutN7C+|kI zNjS4khoD9!XB4CAl~k1dxoP<9WUUwVzI9Eic z#~!$XTt73ZuC7;b5)YfBehTC1>4}FsISaCk)_(wcRZEJXZnHk>rc}z>Q^nWGk(UFS znU*c&I-CIjMwiK{mUlV5U~7}N?jGs6g8WJyJEf3=nJIwjd!}RM5Dm7=vDOnf%kw$Z&crKhrK|XVcc$Fn!_gf3 zh?uO`^m}zH$e4Enah~IhJUYP4|G@5_5!B}iof|h2>wdW+dMo#rPkUNr%Y%9bgMN%t z_*|uyH@qm9OY018KH=FsjxPW}&Ox$mcYdK9S#R-xn7jp*hKA#Y`wka2^jUH4+mN>V zjeUS!)}pJNimNv-flf}&^lw%-_dfoL7_t2U9AmNi#I($63OKQB0aFkXZ16NI-nRL2 zRAH)j)^aZB`eFFk3$w96`lfQwP6xdcZ0j2%p8)0|H$xtZJf$jYmgRy@mO5zi{JUx{ z58AewD@=^n@L1Ib zRYCr|-=K9XvzA(b4_J!W-!*UVrLHk$R*g$QN|^rz8h>86RW^kC6x$aWmX8L~gWs%C zlA9bHtmx153*6?uqz`)a4QGv@PUU)+)_K&}1QUbXG}UO|-JlVE_f#Me+dkKgZ+(9M zY4UO44-4YRj+$+|{doZD70a#Y@-(psDKRttz`yG{T-}uoGDo%B?+BjM5&X8U9dy&? zX+`(Z48lcT|IsHOG<8Bmz)5*eH>^7z)UqP!eLP?Qr<9f`Y@I&aq@t}IH&<(dg*CN< z0M}&XQFhi|)rZ@BEe*!!Oe!Fb6Khp{hKHg)BWmP%*$9(5H|IW-2rPzLr2;8pjc zZk4+YflV2|S0M$MdVk!veBSPYUyri&*V)+YK!@V4X$Z>(>zdUBWE8bFVvdw$;7M#U zrWJT#69&*)L?3c0kENXFMsxUKsU40`??w!DXEkVn9DU1LMRan)B#wrTF(m--;hX3W zC<)H*jdPbpNKv#$ZC8%h)jp6C(i>X#H-!jH(^fxe;D8RKVjl4ANnr@i>qX1_&p$HP$%0&hioX)<4Ypv?h7|9_Kz z>Kip2G$k;uL|KBDPhJFX-k1#^Nwf3NP2142a|g#wu2LZ*<9ex6-?YJFu24Wu0@#2M z7`Zbcu_hp0Y>XAPj>mI|&y?o;i?9Ct?7Kud6y1!a`_vk-cn^KO-76)JNaSg?1Vm2u zDLePPJTK(K}ttA_5K)EP~^mQk{81KgChs`c^@@p5h>G1FTliVR{} zu^>9EY$S)3)fsd|o>%ZZ!-LdKF+IYaZVK>0t>m%opg7Ua!HH3hJ%zdlJAJm!tR%ZE z!(Z}Dh>9gHz_ZYRJ^`RlHWMIv`ZH+EleG16ie^#q($m*=VsuOL&;238rwDBCs2bO8 zK>9UgLc0G_Qqf2&4U86(m7@VxW`$4vwrbRzn!1|#|t zFTxL#Bv;%SyQ61zRV<>^s%i}Tx)t}A3@GI;)A_^6+yh2X|5&Q0PQR(07ECGa9h?2W zuy5m0X@#yasmKHk>vhw|wv$EbJ0)BZRW?Qj(r!it38AhLC{^k*pA-05krX00n8MOz z8;{oDSuW^_`eBEaC6H3qEWUDW^s{6_ zTdNq_9s2bGjB@c<{4roHOK^EPkid!)$aKI4HaIHQXEqyBNRIiW0{y z8+OCK+M3OyzFkx)9;VDF#gKzv0OQa; z8q#{N+`qeAn^LJA(VYI%JH2{zG}$$2mnQH2WJw!pjjPAWV@l`ieur8DCr=>i=1w)P zTdJbD)yzPf5^7gUm-=Na1FU&7I^lLB_4^fOt~+zP^-K>3ZkkWLkd z_~WEGZ*jv^K({5!N^&7wBRi3_un=BOtrzoM8B&4BDCXUC7mFz=OF3&s|8rhdsoHcH?HFur(qtTMk zEEAbXe-Syg7XC(_R1=s=e%19oAQ|}J8ZENVx9RPs?Ix*^qeEK#r@1%ULA-v;wwSr$ zMV+Ny3T#{>XyR&?{+AuHZ$*W#3YhPbz~>{CLi~vZL0o$`8sSPf@!Yz$vf825e=xxs z7Z|Ch=LJ#h)1JRq8%4DJi;@xK*0(Jat3go%gc_c$+q!2o%G&2(Wl1ef=0Zow9$At5 zVGr*S2_|4GhmwrD9t zU2DUW8v>{8@UK{YaFtn1z*ihnKO52%GRL0Y2<@P&UW2G9IMTOf7V_;HVHKYaMudN< zo5!sSXKq89r3YT;b*T||9HnPcbtp-O;Eis2=BrzJNfPn~KZg8wVe(6!^Myvl1Wjj+ zgpDKg(?f?sXy*;+C!FsS_}~qC2;prG;;RLZ+kUJi%Z*$90hcf(+`}ihdK1YTh!i}O zLhgfB_1FQ^gyDVEm3+)iHNxmhhO4YQaZ#R7_kxX;zQv(s)cqc(Buj$ww zUdTJMLoJ+6kcyZq`isb334heAJV{6eBN*(*(VlL2N3c8u@nyY|;M~~vIgy4( zKt($L)K?r5B#2Tp%QDi!OyMr71OuX3f9#-Sc8F+GDaP3kH3*Hn4GY|HUVZi(u3N%$ zc7460R51wsE$&^t?PuF$#-b0L+a$PG-&=o$!*<7i*v;W7Z(Q*WaNSgZ2+r>XUBPVq zi0aTT|2lu~wc}=vms(PNd*p=Xx$u|s>4!;QmLs=b(htfuY3AQ@L)+<8o|-tp$-AYi zf3~~~^X)d(40zQPk}#|G4i3)GJ=PFUYKesX9e25V!CtE{IV|U4MvDePY62f^h0Ra|_Yn=U^^Z6uSX3i>h#kMJUIK zlVAGFj&i+->5-1guj=ERt}Wp@SXq)Fa2M#`51CP7Kt2JdaaBsE9`QYqh(!XcB*s=) zD9R2Zvg35Zuiz*fVby_OB=A5{} zTtvG>x&mcKeIqUo{l<|o$soYJiLTXF1G_y88aYuga;7RJOerXlBm6?~3Yp~=?@bOE za2}ts7!nl`OkkZo`i(F8I}~CkFhPE^>DwT(Q?p0PYvxv&$Hz<@g>RzuPgXfes7*eg zh56z}wK*)`7ZT!y{cR+6$D@LLP}DZ)(pJskGt%yibehC*L0cpI@syu_zDNb)J2Z@55JmAg!#LP%c!1eN$Hohob;JFjAvVjV9o#=^_>!?N_q zH}#%+vhMD=>gztYOZ;b#$**yPjhF&7A9DI|k*r5_YP|f`^-gC&oVYJ9A!R9+bG%yV zd_*nhIBjMCZ*hLNYN*dgMy|rS9E@F(+Zy`g&XffS-bg~{+hL7u{h`x!)5A2JG$C5+gksMXAizrS(?^V@@2+#Rz8{^Gkq9{B}Ki(+z zleBKmCh5J9yuVe0DqE%RGJL|?WI_OeQbGhkUC6$>$s31IBbw!e0b4uO(r7 z58T7vh~_FpmE2y{>*V%rGX5oKGbiWz_1kB+D3w&9)PowW9F8!A8f7>U`F#$_0h zd)LMkhSvS3GJB8;r-Kx5tST-^BV*5|#5$d<#0|D1qCFm~TLD)t8P{MG{owZ3&dfmf zYWg2$gW*PC zC$qeJ`WCTaeWBg`!`=5kZC7yREN0feF*d0?^l{x-|xo{tN^&jkTke(L4!XN7;l}i3sz%VlF&?OtP+ugiQQB z;Z5CL90#ZN5-F#DC3GXv#}KSl&`BEL7vY4BX;R%Z!|y^8=6)6_$;~N z=UX)R<>XcFLI3*8#c*8h3i4&iwdSzj4M`Kz?|#v&x7ox36GP|sM(%Ko_F{!sV}0{@ ze}6^PPicYrrAMq2=-WI_M75yoA3v_=*uk7|hXp5XrgFY7)Y)HY`+C=vdOttXujYXk zPTZ4~r(9pxuQ&p3#9aH_S8u+lN{DpdRYhDzAUHVTD6j~`wyW4@aleew+b~er2bv>K#NEPNEDvV;(`qL}w|Iz>_ zc~ZY1d(Fzo`DJ!c@klcUg|kJ2*nAT) z^?E9Bz2=+AhXf*1}Sx0JH27BE)%v-8^G=InxSIosN_mHb{v6fDz}ytW1YzU z{XSmJzvg^o6s6WEc7AwO^swNrX^ur^z{Tll4rw)IgJQBlfMK=bb$_|T2jC#X0Bb+{ z5{+#Vdl_?ZR^U-E<(Dw1Iirv9^l>UJb87Yb=XGcLF!VF0H_Bo@?*q$zJYJ4&m8G0# zX@h&yYT&HVZ9pdnk$wB#DlcvD*#vm@`$k02y(hweNeFe0(X%Rn4h=I-1)K}W3cs6w zEUa&ryMI^P7(RJ>2b57m7#-nrF>iSAjk}}0;nDKZICUo1;DuFhfIyNIFFY*)$-ja> zN@kV#X}wzzLts6s+3jbj3;T;Qx9#3n>+47dunjalO{khc&6r~z;@ko+uAWM?vZ`@t zohq>YF_X)SJR9nQF4H8(v%CC<25~zdN#1=znc;*yFY0SrIK$$71Y*;l==$kwP%bV^?G0BX>C+xnRH{?)x7TrAjv3;h+M2KlPQ6DFb% zIT0}kx4HK)L)(kVSoXh}^1ivxVq5XBe$)8-XGVR_e z#n4ry@g%MAs{}L@h(qIFpFEe4cR6IfuAQ|t*pege`(fAEJj#%t4;VF5YcNsUly;K6?Zuv{*w@@s0iG!_RT9! zY2IoWD)E;`sZG&WG4fMNj-Q|N27*lCF74KuYa^IvJmIL-u0+>b5qcF?V=2Hrb=H9) z2!%hYnC!1V5Ab6k*!GM*(H5mOPpYK z)1akR>$i-VA2&A(*@jKH?ed5|_7b0VL3`c|gp=oxIwFn2Iz8&dhf}u*cgbHQ&zG|O z5O0gRz5BlAgQfS1%DZ@U5p*Nx;Zy69GD!{;Eg)G{Jhl5HXV3BeigMGn7ITH&h9aLJ_mF$q|-Y5gF@J3#1=|@Fq`aFuXA3!+= znnms^!6{8u<7!i+nuENF4r$i5SFnJ|!LG|8Iwo#SOTe}7C;3C=t^|gAxn~;U3@~YN zj@egoB(iTKvDSBOQ``2MTG~_jZl>Ss|Dvlelxk?i{6EQ9gzH!1Kg-A0AZH}C1r z(9!h$U7PR5m$Jl+jB1P7%|Fc;&lsep828Lc7`x6%S)HsKz5LbRsAFmTQf(gJ#MUIu zStQtTTd~VbBCaJMA*JQ{;G)~?QW;CAq8K@!!_2J~k|M z`?S&WM85KJ*whV>Q~s(kv3a~Cf!BU1v2Mm~-uOjU;EyRfv^SDYn1(ytmvZfYMevA% zY9?BZTO6VV`QBTxpMABI`sq1-eMv%@JiQ%*nLfK@UPM67!$=;Af;ihC>XMSwp}I?;RVEc)vJY=;>_5T&zfmw%7O3%uZxYpPX5EnY5m#O`HkdHEulVJ!P;dt%Dl*w zqXn1wJX#g&n5f%j_*hMTwPQgR!^6Rl!!j*Hd;>gmcKW+!j9&!b!6-k^$y1$3UC(q5 zn7d~^)mR-|$y*gjh>)Ic{z;W(z--)*TUA3}wsPX=Z!ao^#V-R$L#1m^k?ux`b3J zYRa-=Tz3$v>shM?mRe-c77g@v!;1x?_v_Ynh#tK2I~C|5sPLHzU^{?^!}PH+RO-<` z0BSV*Gk~hD6xDPVu0Dm4;CutatDHH-U1jF(#66^)SzGJbGZxb6fpM4RtVW&r2EKp+ zwU76a3byuw4))~lq_w7dh)mvHOP%gK<`%vJ-RJda{7Ub@{PfL=TF<_in8Ihzz}a`7 z8w|qLhp(GHXUNPibMl<*_pS zuDN)KB;?LBHALI_sEPIJqb{5}rX9|pG$~mk`wn6d=*0o%dZ%e;%EzRofolw%+7C3w zN*+-QNRZbB6RWL6NON^Bz+#^|BJE&;IW&X7$;`_FJD%dYW7dLZyON2G!{hXsmxP;E zmuuGCL-(V`9&HC79`!la@>;nLKx8L5q-GIdqTp4tbq7`ZFZY+*oS12@N%|{dL#QqKLV<_2l z>2`LWJCEnz(p7$)-}7w%2~K?m?4Dwjh7BpF++gdV6tnQK;WEv|2P0{P8v13Lu z`y+p*mP7=Oo_WWdinLU>i%5e{An3L5GrI-XSMpR3bJPx;bp9!d|ByOMy4!!Y^O=6q zNNVfJ>h+&P~-~v#=3yIPTj{^!8;x~gSm%r^NufI?VV!z8$;AmD`P4g_cTRa3* zzhpz%RecTk$*|=7IIFcM+t2TIa;o&Q;^6nN$SsiTAyLymymY_4PHP@Ls;BIm_b#lH z9}}@Afft;ubK-au@NZ~b)s61(>f9*wL8sex??xTIM%uQdl=-P?e8WKu`v%?27uDu~ zO@3u+Cp*EhqH>Kv4UFwTsE>5sPSS{Xfwo4c5PC8V&bKVJfzTh&GXHPg)KEwD;mW9K zYGrNlD~g)~@4nbB(?7h5e||F)oQKNo$NJ4b+m(NfOjK|=s5k_&+8er zyI4u4!V*ZGHHo^Su|3UYao#7S$nqgzDM zXNHRK3~K=Iu_|}73_UdCwyj9tFlHpN&tSZ3b5d#9&O;Z1hnGI_aq;%l9IID&5yZxm zu&q*);p@cB(2IX8K~dO-xP7?E?GW5kjFZ_Z%FAG+^QIKTwuKo4JG-zNWYntF9fA8@ zv+x1T)9Wahl>v|Zn4~MngnOoM*WsDXYA2hluPz3}L|9VSI^VDK%>uOI_i8dj8m|^m z;-EvlU(+y*|EADLy=tn_9;b(_mUN?D5>k#l`pGb8xtvXsBFC3&jXQ(WQ$FY;Z0?i| zR;Vxvy1i1UE~1xvRp>*CqqY>r;hmUdVTRYG;OkFHpU7!1S-+K&HYo&i^vFZznuohjyRN=7gw)mZl}&ul(SGgdWB7!S#I1@YYcuGQGE{5PQLD7Kuz5e~!8A1U&U(ETu#WkJ~ zoACHp8CR&9{li0SDaDvN0T;>|*4kKJfR*l8Y=K(%p5lQAJ*%7c`<&PfrUymG45H^Y z>iM(k$1M|0G9t@$-{W-4^GwpwT3E zcMD?VXO6=CNFJXXwIXJW?mL0Zd4Aw7M6Ywfzgnl9GOviRG_4zMcRwr_L!mb|BDO)p z2^MvQh!N;O1)*IbkJ#p`1ss%D5nZUx-k3vZPB7N(^a)d4na_W3G|-mkC&F(smHyf| zc15kDcDk>;4jRV{YCP_@(Ede}K|+4mI!Do;wJY$nhEg1P@oO`~ zTI6~)WN0%f)8Cqi+eEvr`KKeA!E;6mKhdNZYwfD?|Q{{0cs!>i_XgJOCV6sa=AUQho;ErCX^ zcGUTJ+Fyal$oLKAE5g<@C4_O|B{VJQ0CNeWS5+xh@1&2Es%`p$6HTZS7o>hx7}c(> zo?p!<+F8{SabVZA;}F^t*K?vI3zw`{olR>;@4rYs=YHd`;+ua^GH^qCSjLi*r_MTP z%&n18av^D}-eV2&Swt8d?E>hRWUS!}dhnTH-o@P_@5ApKvNA4lTwG`;1eQyn_Kw=h zgr>T}nSWTlOV+ktmP_R9FbS0Puchhs3C7Rpb*4DPXx0*QGLJ@tHD}YgbfjIAhho|4 zex-i`(gqB`l5r?I%5A1KKlMLB{d(R8WIUa^7_^!i(1!4T3WPEkhSId{jT_M2G5tZ- zWusa0oq&!)akDdyFR^StD;N33UA6`WI`vrt^}*wRH%u3Uf}{sjyPzCZ>re8%t{xk8 z%krfkm;5`OO&p8>&8U@@%W;ruuvJD|T=!O3B#RI_7RKQb~3;Q?FB z(qwxCe91w|%|E2np!T<;A*FD-<=1ahstre!N?KHV%C6vzEkZL&^>YIzFn>*+P@x#` zY1`pR{xE7+NBw#Hp?LXUnQa3PN24k={!%r*l5Ypad%Ag#ovA&Y z+z{(FlfP1j0qilKczKOhGPseJ6RHGp5Tj@$Jx6r*#`{g~G=i#&QeUxIKJcqh9UO7; zssi?TH}Aww$y*)~vOp)lu2VxhTX>_Vs~K{g=AgG6$gr+@qB-&^OPKqLa0*3+Dlxc< z2Tz{3D*ZUjHzKmmNJzC3W}A4IZ|aVQAKAT#T?8+SWWVQinCJfm*7$d}E+4w6?09p5 zFh;38_kxO8o7k?wvTQZYAXEij{&7~Ghh<0D_ugRpNi;5
atBV`KQ
zy#$-za=qoRt*&&dd14;2h*-*dw};hRw0}8Wr}c8&8ks+6f>}LuMpVU65ZP(FXA;b6
zd}?Ne(?l8ZstPe0!g6WhjhsPDcKNFHkRH-Rgm>gBGtaQHN!vXdyY4k@kB76|j9fsa9*|#0I79ccqQHO0dW&K9_
z`>(X!p=4FQSvx5_c=kmLdd;VY(^6fIZ#&1KQ0$@Tf*0RdjtG9*Zz444|l?
zdvaS#p_da6V|Lz2o(Y#@yP0`+g>uaF!JN7n#m@pO(yAel#~Hmi5CM9ZsUoQ}30qBm
z#QwdoQ@_#WvN3mn2;4*i?&%4TUm0R~k{t0&UQiRbj$|+`v-HtAc1Jt2;siQ(vyp{gg_~FqL6Iu$3gZ9Cz(r;y{z4!
z^Pq)`5bRdt@2I^iq3fx`cCJs;VCC{}AoU%zRcFGg?&tdDqo;EPR>)4qT1uNkSpf+@
zY`O$V?Lne4d)|ba1LZ$&ZN8`e?2rWwlaZxKpfWX1yKPb%!zF$ve84qnJ-Mjr~S$vbg2%J>tf{o1vPRR&aBe~r)CPmT}BNmrRqp8efEXSH*)JQFD@
z8b#V8q$^vzf1DgiR#_8S!#{Q7%fbg@}{E1rEV1SB%Htyo>pGNklwns$t#F2i3#qoC7
zN-Vha^vmH$|45q~rGuVEl(G;N*^x(2?4{bl$grA*h(`!P
z^UMwI+mjz-&efAbUhl662_f{%jvg*dKmS~p&0%qrI)yRqypyx;Kb9#+(@27H4VLNU
zEJ+8P_RM>A^9F{5
zULxE|3+JCr;;ZHb^Sw!d46i+W+wH>CmsXhGbiy|o94I#w`2>O-A+;O&+h7eIU>tp1
z4$SSn`UV(P!x7GLr&^l9Ne6BS4BRs$p>;4@?OX0`=GRvLQX{Q?LK9h@FkOgd>@_bM
z*T*+TotKQpR))OCj>!YJhd^~KyMh3N5s10=A4b4T%SU8`!^()caUu8A5X0QEaN^6x
z*eDigj(DQ)B8EL$kT+oF1ifyYbw;p#6CYGZ>-pyB>Sd^qf4RF%&Bti?4i>Ywdsi0^
zBGFLW+Ysh^99+nNtKqFIlNYZ0jPySm@DLAQieZ(h=&@YkkGWLlMiLl_DJk&z8q{HD
zW^D6vGS2vg<&%*S@1xtQ;`(xSdky<`rpATKJEl^bJJP?BZ0p`ds8>T9nv>p3wXEZv
zUx0uYKy21PkwJxi39B3?nJ!#CH*is$aO##%3Am9soS|Z8yaB(wjhl=L?hTSzbj}vL@S6DU1It+}v`oJ^j`Dv=HKcqMe9Tag9j+^aQ^qys{>B>5-t>1y3v*zO_U=
zLQP1>&ez3Cc(8`X&P
zC{g}sYZCjeTr>Nz!{tQQDqqJji|zNx1l1?Wgq4Ut>#DTrP&w;zLM)KzkvWusIaZmZ(gNe7K))e9F_>lXH__$x{?=S-1p
zQCHqsPYF+?%IgP}CoRukwR%Me1TxHym3_yIXMq^x+qq}WHMi627I;9+kg8l{Fo&)o
zAp7R@(5~=lp2R3YY>2QkZk{c{^Qf}jHIYY9Yfx2D91@s#PXQxuk0mtKRMsxEUH$Mf
zQ4`9W^QWH{edR;bZPWXKlLrNIq3C{C=9>HN=%q%f<&0IeA=pU(<2=Jdm%cB7mZK8~
zlwL7Xwf)DGl;$vKv2r^j2M1Zb_GoS6wfZ7Bc<*mCXEa}$N^vmkp2+%Q6<5o1!TA%N
zWp0fyLeCJL#&HK^yOQBeV#f4Wyb7!B=#QGLF>n}vDk1CMdEhNK?rSYV!p%2_h1!rg
zRKu+09KhS8tH6z;$bkSM63`hqJ6>?o1dfXUAE35irF%Zo74RiW=FyS=E&YppuGq<^
znDxm5yj0>GYM|L$Tu*oVp;W_J>BMj=?llqwQqQ9cY&-#^P<%Wgw3Zsi0j|Zz)`;|u
zy`!EXJj4wli7u-6yY8V5`q6UnL31(R*@IXF5NzpTY?u%^^bVh9X6;Qt(7-weZX5V+
z8TaW)5dZ_bxk#QdE#Er9r@Am%KmUB=T{7*!UzxgMrT5QgYP55B$tMrohWD|nn)Us$
zt2zW&_Rp@m_{pil9rapU5BcvwDQ;AOeHL3#SO9)OFzQEi*BlmD`{NK07UL>AN8V*P)O5SZ`ZCSrLPMKig^wy*To)207?y
z@f&J2>rtdHrNyy?$VN0Aj;q>gs2y@o03ii4{u&Iw7K_dwqH@EZ1@G@W6HpPO)uVnq
z{Q#GsYic8Tf1u>TEvy>M()m+^}7c5CR*Qq{sAoOr3)6r
z*q_vv;yr!iFx_&L`8iC;0Z(BJbke~zN>hfE8vej
zp!&?B9evzrz2xr;qMq$n#R7}pIyCR}3RjB%qx($S6ejj2V!H)x)sG&jV3rkt{Atim
zt8~D@eC=com2K_><9D+XpnW5l{#sKQ?R&^vy@%kKZf6%*1ji6Nijt*F8hA%)^|yc<
z-@JQm>XrDWfYiQ&hvYlxicJ)h5Z`_aAxWp|}C^BXnQubnKi9911)n`6{?Av5}Y_^a1EfF^)>hzdK?h~K9q=@Z{$A8hIrX=+V(U!ov
z5Q(yF)LJFngW6K-G1bsB`z2O3`szo(DdX!PR=Z&0nLLB)>uiEtZ47&yC!f$MjdGJp
zN6bvW&&8YK$DZlYhl)gj#tZei6L+5DKWTfhx84$K3@hL_=l(TiWH3f`(9Asdp4>uQ
z-t~sfW7P*G*X}aTirus-j6yZJH6MvuLr5KYcj8zFE+7EhkPt@
zSUyoaUoi;h;ocbC#~F35XXL0=Pw>`}2n=uM{x8d)tRl%mo^2XmGQ}8XuaG_OsJ^1R
zC3x$K7a>nTEjwZoN(MS)+G4|Im@Mdqnw`GS?pf-4C?1OB7*X}iYgLT2k>WLbsE2MB
z-Lr2v;c~j-xl730a3{~TGCk8^exX_nHA8OX`#Sr0zNkL4RG!&x8yw*%txi=9{=P*5
zTTka}>5XC8Y4}W;0*-^y8*VH3l2zfF^%3Ot9$rkq9j3)g{Fr57{5#TXUg}~>vsL

VNJJHp<3zCB`)aB|(%G`n*nb+}cHWoH$J{Yd!;H7_Tzf%FUy}sax?F>N7iJXe-=Ne8{$%OX*`0CiAe>2 zzgM3lWszo}rg61FM=8C&3;!lP^C(fH09TeKt;RN)5>J}>69rB)oZhIobfgkcC4T%?{TWbX|xPz^utwJ+X z_m?lc2=6Sccz*ff&p@Su&VCdj#j%fwC+2}76+`r2Jm9!7l7i;pS>YH++6n*)n*C`I zHjtHgX+bGPV)|;d6B^cUzJ|bKL3&!vcH2TmnQ2FfmF1e@mNmD$@)#W0xFyj=W_J86 zd2IDOcN_Ws^&6$_Qg7_+Qi(!egJ@5!Wqmsj4o!%iwvyW!{vvjt6Pc_Vk{J$F21hi4 zKP_hri(sneU{r>lHC~=#DvnPmtn^ML@%YixcgkLSG>TucG(fz0<*mmO93Jf-KoD8u zQyYb2Q`G93hSEnnkYg7By7j6(w$F@}9U4>9f|8;Cq@{hoQHcsZozfu65Iwl|?Y6l; zj*2(vRE<-~EOqkg86kL+P=>}`&H3cXcB^UQ?P|)})L9mbcH@sY9#8>(^Uxn$UB_ur zWpnfX0NVw}f$*!~BgwI)m?7B%LN8js3A?gZFAJU{eakT4n=hmyb(De?4l1xWAkzdv zscp3H0bLsiAk_<;zFx`0g$r zqs+I>!cx)+<$^<4si~Vm5Q~pU@#!Kv4rvK$osmOkK56Pb2kP#+`@VvjrOpaMi&U&R z$nawcq*xF?0n5uS8tF5J?YpgDvpD6qG#Q<(NS$z#5BhCBI+K zeRHPjTfKctwXPqdFabjM2}{LN(x!MAbHh_!zh5DmYqtvgjWLgb*N7s5DXle*jlBaY zK-_1Om1(cN$OxN~Ij&$%$^r9#2>bU5M3K^8n+#PuXABrpV2LLpA>{V+lfRLeQM@jwWy~JctFvqO zu=Jepv6Ky!XU{%Mf@q-K=hXhnjFGjTqvu16Qi7lAIT+TTb7{d*Vz zkjtHR8lYs_0ND)oIJO}CkL$>TjQ2do=U_>$Q_tcjo>VUIh?J7QiTp!Z>gwGtOB45; z8xdw-7RoC1Lfls@9Vs2xzG)h+>X%xgTD@OC5UVYv)~KsG`qv{jAWkxSf^c{O7q{fd z=6Rb>FKtS%{})OxflGW87Q2g4cx-yRBGEnp&Qn;{8F6b~R~6Ipf5?09e>VR%Za6yh zYm}CvRilGavs$%9m$s-au_>x%XzYs67F86j+1hF+B38sGL8;m^cB-hoXAnuA)AzdW z=X(Bx`}KPA13!p!eCF{vj`#4@UuraRutc7%z9`Au>nV=%tR?Z3-*d0Ll6n1a9D&)7 z*j;s&RP|1vr498ERw%g_a7#*}TkLkVzu0Ah!n}VaG4T9OeT-dru91%P7p?VbUKj40 zQjY+f%MH^9F`UrKg8-*h| zw(;`rM|c~Aax4L)bcqY7gg$oV!7wC_sb|o3Fh{h;ljVEt0=(&a*JqsN*9BAS=8*pJ zK)qKIQv#z=E<;{jVf<&%c79J;*Gt=eV`PZ^K}n-0Qv(f@0J;jE>~tM8TOHt-TzsdoY+5`WfN{`-RaZl)$sqsXBd9 zL#Gg)HT$b?OUtW0;jKa$Gz z@Xs3LYN5OO)a{kOJy#;xzO-VMPMH5G#Wj&FwR%|epe7Lwie{n9ai^-pgePr>*F8V2 z*)OwG?6Xd`Y>$>{n7xAh!p!K>uk&hGNO^mf2QNy@RvbBk0KlWy z$_p#3__(oQ@R3I?$F>=aANtq@xBJ;vgx4$_D9}k%er&%CGMmk)Yt1K(mLw^? z0doYCG%yWI)>=GQDs6$GwO%dWj~LwbDy+)+%KRtCg5Dx@*1{U{D%ZKrW9M1v{5LiE z8xZ(C(*@Y z7sr(2Hct1l9M>F~oOv)$Ce%!bjrqEQUF(7t@HsJHq{@+E~S& ziEif2gA&ZqXlCApCvZM}t%%9E?_7xcPc&i&Yb6RoOYP!N!iKHPq?t&IzX>jq%9i_e z3q@+f)bFdoU)LC6ef=}2x<1O^suNlXE-flB$xkon-4y$BgtP+6|c{@9NaVx-@ss{?#6T>}KS zYDHK{f%c9_1|M4wP#R2K6I_YQGoz|hIqipJj2M$l<%gK=??zdpKGqm)g>c*rY)}GE zy0nV~?^f@L>l+a1)yg^<7W!?*XIk5z3wMvD?f1FppBOo_|F}q->5n1Y{YP^|o6uYu zj*}UnxAkN`BBpDUA`)?^1c#!WwVZJIUNQCc^0S%SxUv@f-aDNmuKZthnaeYM7Ae$i z;mf^sG)C^5mIiJr)|K3-^(mO!7;_@D`fL~+@!9;1*E{H1RJvNRqjcq?27_Eop*TW9 zBONGCkZKx?i4A7Yw|DHUOFo^d&)wPtnAi3#;qm+0&>Lssn#C3s>}~+;UX=9vko6i| z$}<0HhRh`3MApk=Te|?TpyxC=%6U}^)*Uu zJ2yJm@-_;R<$pBDjv})8xsIQKp;?Cr;xt6vk1-)eS!B~u479|D(bOUqDskb`EwnKx zr4F{>C!EB(zx{1P?!9Arz%G_v$Th}|G2d_g(OlZL#C=llgHDmpgP^UM4kz5gph8U& zX9I>i#s$AB$o%|$Yz~kKUEf@v`K-#$_Fm`}-b&<2Ii~CZ$A}0Hse-O4(m{$pV;DzsB=10$$68As42T>wH%x+jmwB;usa6j(yMLYqnO$W;4M=|ToUOdGxTrc6$ENe$>rS+Igb%9WQBI{i zN?KG`IfF}rX?tQ>ZTR&%kN4&6(lkjKrT{<-pr!`VN)~)Azvf`!_L#58xDY*chQ2HP zplAsBXDGVC4bL0kr4_R|l91G}8Ip>uwQb!V$4y+CAWQ)ghkV6kSJ+D`{fY z%J=CB#9VUn3~I3>R)z+YZx36=bK6THkj2eoqiBViz~vs?#l;vTN~I)%PV!_%u0oRee`X zdU#BxqqB*S<9a%BJA;TUIrnxZLf|%r&L2c(7VrP5y*S~o?nWuP7E<@3Jjl+jar+?G zAS@H8VAeqr8l`+(x=;v&4hjq6SJ=Qc@ zJbE(h`)<7%9XWZiLBj0*LdMVXumpFEk!JVf)N;g8h}Ykp{3kevVk&JvNYnf#cGMNf z&?RClYpYi=@wIOmOK1vU&Ku`7lt$^t(GeuNd|`OU51?C|1h83_v?L(o)NU5;M#egQ zix<-UC|D(T>f{=hy(*G3_zUg3vzGUY_Z^0=GA>tjTYI%j<(I)h-4R~S(f2Zg)&q;E zR*@4y3OXT?zXwbk9akMJUR+ZZcsOs-1IW?{<1I_kTL|_16?fIU)ne2?1G1TnnQBg( zsl{h<%{J|lO!)mCE~C@(b_p{pssS3Fl4dR~s$jR-X*#uxB9^FY{c&G|L#rDnNo1P0tehQ(trnD$^YnbQFr*Y0U_`Sj181O{@QF#|;Sz^3R%c zO@+M!S+D}ppp^6BF!8v=W(E29zV1aAtgrp{Svgry-y0$5nO2`Z-po$k)GMTU--Sv` zaG$^E+BUjzf1pRV`|D9uJfF_^-CYgKa-U%-1Be-c+Nm)EpV_AEBmDR#U! z9ypB`oEO)_#D82muvMFXS#Ty-^Um7_Xa$7`mpC-3be{DtPMdBWLYJLBL7Lz#g6QB1 zYn+E%ex2TDIm>u*!4J+i75i5}Tn`E}95QiwWe6uvkb^RQhiS^Wz_s@Gsqe&F{Tovw zhs?ZUViJ61KAXfF*xIm?_E5^v;w8^z&gYV~KEp$6e}b9e99`!li#WU!9k14tR1ZIU zD|K;ItR@j;RC1i3!^k*5%W6C($jflJi@FxkS{v`Wyeo;j)}qIy1xZ++G@`ejvnIAW zukxV+kgRe>2Zvu=S3i!Qv#kI_wE)$P++$XgXy7Xu8326dgCW`=1VX}c#eyos?|!B^Bb`Yb6V&{~|>B)}C!mR=?{YM#s34WACobpCaWUscdb@5|S; zQmv=skFi|%g1!6M=JZcG3P*qNfC`Vg@LNt#>{7M0*vaMlF(-@@=0u{@p?w9Kw@&5q zqE5L;qD(~pGb%-dj+lMH)Zak;J$Gt)iA6+H$eN)d;~h!nqM4bgg>tiB5A|_lTh`%- z5WYe9qs@`Y;~8Ehh=s-4{DGacS|DV0OBBpp!@j3@b7?WhGOeb|llDSlaiyekGexy}I!0Kxf&jo> z;Ohe(`WnyG8gDKuuJw#BZ=p3KnR6Q)(J-+IpLWt#KMuBAHce z9qudhFlHmVgS1Yz2%a+`6qrVe1zN6>PBVaNG?6Nwg_I+no;&MyxWv}6R;_M)MxbX* zMP91L!;~l5WGbS%tJ)ExPTn_-JfEzx7OA;-Vyye=_g5Bg)tw+br~teyJqE0QtQK-Z zuSi{n2U#HWLLNewbAEP8TrQu2-unj%*#MA7mB)CuU+sUo;L9vpkBhW!PNBazh<@CZ z(Gx}nA$HV4MjVd;oHf6POd<6)%XLd`Ezq-|#`s>WK)^v^eF+g z4RZuggm*SXGquUk-Twn4b2*{yVT+y)9Anp1%Lub3j0VN8Y|StD-QiSqn(oI5!7l~p zoTV&;GED;VYMjTqI&Yx++@oOANIoD$Gw64n2T0*~qJMkh;>#eCg0*MuF1+upnDCJ; z%Ys$y^XaZeO;t&y zxgh?UqLD6ZYFZjd=U-tjgBSL(YX>vnKcp_NjI$oCx@5m7$`N@u7M>A%hW5C6AVXPN z)^laBU1sM2&?_y4ifGi>0xs#yu{x5Ay20%Cy+=WNBJyU1iF{kzy~+U|F7 zKLl8>lcv(Y_QFZUF=yl;QH!^^qQ98C_xjWe(-61N+z!78>$)H?0n2_7(^z6Mu2I_^%AiJFDeVAEfCj z9Wv;vpA=I4%bWqSI)?fD7l6gloF)~ZW}}W1cNw^hLkzvh`WpOF_-zJQ!3iLXZ~Fiu zl6k73XTsj&MdtSS(u&r~g_*@IEN`!!$+ih~5g&V-w)6=pxmy>15m==Y>knE)!N0tdED>D?pI3Wzti`xP@wzfB9sM3_SDAj>!-QepEIuaP!Yy z(&S5}&EeGdGf9Ihkfae+%tbN@nAJ8D#+pqR{i`n7Ekr%ECe4+F{&PrCJ`F7A3X&Ed zB0u5sDIPpOtlP-~&+r!wIZU>DqaNa6V0WBqj&1l9P(D48ZGqi?arK-u;!2rd9LeQOjAC5EOqZoo>DZ$Q@rQow6ogD zFYZ>rq5bkJlPTHz52j~iam^wDF?ypcnY+h@)=1a8Rma>fqc&R2W3b_gC_ zgnYVPaB`cj!y?R@L2ZE$E&PnGVyAKy=Nunm*+J?)wNe^ z8W_LIYgI1>>7H%&8k z?CDk@*yDeNr`jr!iC8wBqUkf=$op@XJ}?@=^|;W%!uKQ;s=m3NB6nU{^iaFG)pfl6O`^`?knrMzd)|^xEhc(o#?7~LENj(=0@Kjb zHS#>T0)rrTJ}rvO&Ik5EHzyzQpA`uO{GC(-Q}U0ouVd-oPBfLiJABp1w6kz5e23cD z9&Ye(BrLV|-c@^SI#|8XdQ&{MpA)Qc$pDSveJK52O)cPBn-ARLZD?O$dBp63|766e zl7-}A7mBA^^;NIedQFD>@tSwLF1^2NpAv1oJpbzC?w1dNBFocx&F;BfF&3ypDgEXG7Dm!UjS zM0eZvnn}V1-jCuS%)On(IB~`!&`#6I=_hpmF5ai^6FY@4+E`)UivD+#-9&_gvK`m_ z(JBrA4LOzA+ZLtJcAWaI}h zzb3SJ5S`_pt}#JkZ>qhWJQw^Pqa@Pwdlvll6Hbg&Kb1HYK{t)Ld%fXz)t?8Er+4mb zG^R78xq~$VTW!q=S=)Q@4NPM( z+KVp*s$4k!lNwXuCUIFgDHYG6ob+_4?nT&d;}##{Ymx7XBc~s;3?-MoDfq2fktlTm zVsPJXtwf=_It>`?j^&m7P8ZCvhwFvH7-Mg&amwI)dX3G?%?zi_Nm!D=!&bLP-BC(v zU-~YZMFyB9NSLt#N3y35unFiWr{?X-X;sd}`hwHN5j_h6-vDi-=ZEd4p4mUYJpG=( z_fAVV@H)wQx3{OD0`+$uB9}-n6Fl}O@6{foK>Nzx4*KnVAxfFSoR03f-U}|B6r}6g zV4n+zU@2jAq@Ps1WrbOlu(E8Xn=j%DE@bGs&dS|3B^ssUhiAG~{t4#Q)QLedQcKA_ zv&ea%mil+yDdV5OnccgyUmRJ)-vj9Q)(B@|F{+S|T6n6vEkqXMmROB){3fJo?0d7C zgvvp6mmb5C^i1P)>d!QZxeC4#&@jH%%roELQwACKjR`c`{~a8a-ZOSVL~E2Tj|RE5 zfZxfKf&>4UAvy>#OOdLso1D{rIv4mYhF@p4aE$`YH}=PT+l?JaE$wbOXwhA%s(nrF z=RIbGD4>M%`h%(|!;Y9Gfh$0DVV$L>br_viRDVyD&|9oxxH(wwEF!7WE?JYDzU)$_ z!=-GsWtVzQ`E5w3=9xej`uKJEP|c2s6W^$jR_9)xB|Ym**mXH@yK&G0*IHF5KL;m_ zL2*_I3XcKZ9opvf&)#acM`nKp+L$f^ef%nU-u-*-^yHm3)gCnjtZKL-c5Vh<8W5j> z#jL%#s8cQ}3m^^EhevHm`rgpw_4N1_*xur2$!6p%K;qeQ392sMfaRIZ0!XW?Q)pxW zmt87kb{UK7u6BmbDtBMA1Cc_2;IH_$L#?Ajc3=b|OfVBN=gb2@x`&M(ysP@U)mW%A ze*H1lerD{>Q1}w1{;T*4EN-?x5FN7~Bz{U1LfnNGuvYiKgcvy-bW}O-`v4fh=`#L= z!BX$d0Enyspyj9~4oj-9Ob{;-*7?4>zT^-jD+T&xks$fBw|&{CE^Mi_q!Sb?g+93fx~TtE5EilDn{wOUbGqs5fe7^r@@Ts`*JKx&K zRmzZttyYexkW_iZl1@QRqCaK6x$cjM@W3`Q?L^|H1+10XwNJ)HB`@usM&Ri|*~S!c zhSwVN7-w3AU&op8@%ydbHyh)j)&yH+IY`!D@@!d!0W8TL;i)l^fiBffu}zUFd0iTT z^KVjQ97!8rq_6e8JTtn^N`3b(P5vYRSdYIH-zt|-eO=)X4J@l13*qC+i;E}-7V{p|&g zcB+Q%P_o&bzh+n+epCD46th~<3V5kmAS-_mL7Y>D&-l6iholQGis6&rN3?}%MWPZV zE@7kxF$dLdH2mpvnK0q)mt4_ujxHMCjg*-oMhN8u(nL--N&DQ3*#=Mw+$%gLdLXoW{*d z`s1oMWLZ4-vC=@W93GKAA<C0N^bv)*|Awpyqg}&dVTX4x`&QGcn(+lg50qEW^O0j%Q*+Iw ztM(nbM_&r67@xUB%H}>)vTIN^zi{`t##ZystZ_McJG%dA3~=sjg5D+UcX&(c8p$m+ z%j_5&pJF3Dd*_c*(fs`7{`%z{gx6=uIbTC-4xq+|3V2a6pv8Xk==TwAec-?b6EAhn ziW$b^3n<_nizV37D!aoaVKNFJF9v{N><^WDGhpZL*ymFnB_w<~ir5H`iLs|%CKN`y zU*t`PdfdN{u^Of4h$=mHrq_OI5R}qE;D^Y)${vfVI@{tUO%-WgvFL<9OsRWil*LA_>? zrd|oUm5YZMB(u3weX%fnIrOEGVQYxz3~d)sGe;yUNrg{C?@AoGNCC^VLg)RI{146U zzXMpjuan47Ga)GTMw8 zTu%F_;lm9<@{HejJXz|O>g7c!5+lvgtiSk-s`GzGC{3IY(7!%vKXv{Lt_r{1nh zzp~Ic`5Zwbsk;qjJzup&3v%y)=JIAZ_mJDc8*VQ22`3|J%4!zApdE4=<5i6En9Ywt zHXe>PI$+IxcHQHJq;?WxD7-##D~GvKj6NIp+=2DQrM$Kk%qpqE^?VYD_3wK6;Y80EkI_ zY-gutO7`u(RwSDM(xtRFmN!y46Rilp*PH4-@?5W!GZfcHt`K~2;{mxxx$OlEAm-9k zNuCOxlAn^iA(db!!U8oIwZ za7Ki3ZS!*7W``_vYUW8~F%9A>EujKPMuQmNynJem%j06MrSD!rH(nN9Aon-!PT8qh znnG(yf%!u{9fcNW+sJ0g3f+LJFX6S{c-QC?H(f*S>jlCP?%su;%Yh?)KB zqmEny4?Xdfv!Gt1#c$|b_v&-pjzcfG&Gvp&39ngOV;SY}2!N_GVt{(PO+--@=ERwS z$4x!rtFSJf?K;Pkx-bLyVwkIikB6l>P=NO22IfGo5bjQ;a;1T4?+k2-8@ z)TSa`j+GmuU>y|9*(6lH-3cThCqABhuhD7V!X%A^Nw@-xRLUU?5ExyROX7Wg2Gl7j zWwA|eTQa~~e3(fLbP+^pN|W-Jhz%MZjvI;X-7QD_@Xulxjr~g<6bLBxNdYsd*n-~J zAE1+liR-mer84G>zC%$h@Q#5|=tXKKobIm`E5aBiR?D;P+pLbNu!0@0Eq+tTnIe>; zq{g4nHhfyp;Uu}0y56$ss*_d)KM9gJMprssgWwN^+8-X8XvB}~RauAro$jPCluoqaDHGk4r$>%sxwU^v$G@Y?8Qz%aMiQX0DZWLCPe!Xl_O0|2%ow zqTW}<@>XZUjkgYX9=5jxrXR5d?`~u~b%$=E-P62P?RE5lnju58si!G*&y0iX{N5XrnrmkUWkG_4G(wd8K^u z%k{Z-A>X0ZPW;Q|-RF7T2+6V%J;-e3IY-c&S$%b5{YYnjRKZ_)qnOlE}V^Pz#hQ;@_Q635_+{}R_i7gXguck$xFERt3N=Q2S9%B z-hJM<25MQ4GuY{G+|#h03>h;uX?>)7&bzpzI{#vnDii?UgMI|6IsJAz-HtyI006LR z<>0*aF`nQn17|e;_-|b)<sW{VY=iEmOlO<}(GLFys&0W6U)v|~nbC2en!qyX=k3jj$x^WFg{Vb7FkI~WytL|rDyU-tS|XHxP_!I_YmFK^PEq7)lC-!kcoKi4;WBwU8sjDQ!1q5 z=wL)|+_+BO&{fw=mv+Og!vlo^Wjc~>`RL!4^J;Kx{FZEW882(VHTbNlQ6wmZ_*mJp zs4_PtHo+g6M&ng1Zs;G3YZz@B0MOyp*~RlU+&vP_P6R(pIEA6dwVFusB6Ut~e@SAv zw|%@Yz6*U5wI{wEyaN}!P)o0UsA{Us!{KJ`**59vmSc!sw4!u)^Ye>-OhC&VjHCYA zQ!Q|^Ur;&L_%t;0i^4-kXm7=zzpt~$E{*dCUnG5h5_f{N^#(gEoM}~1t^YCv%6w}< zsfhelUVDq@p8UdJs=D_&X?5KfV`VhJGL!gt(`0TOEMI%_nH|wGzeihu%)}P;P>CN{ zE_v&Gqw8KfDi8{ug#YoW5Au>^uFg^aS^c%O{Ldk<&f3Ni@L?PZ^ zU;85d%Xn`T@CI-`&gfi0fNVhgSlQ#*CL%slogn#f=^{M(bDFv3TVT+n&B1VHN?{Q) zoM~uG{d$WF++r|CG1&WyQ&0wkLt!mz0;A4Y+N9L{`6=zhy{pG>5*d_3&pS&~NvuOWPl*;FVw&sjDI+2rw=Cwdr|EYuz>ecT2j7 z3hSc&GU}<)e=BX0dvOXH6II@sUlkE#u1ka1s9k0k04J(<;dpnmMdfvf7+lC%s(kdy z?zXT_bOg#k)Ig+y^&`XJt->fSL0CE{mxUWOPIal)op(+Ra4{IY$pAV1Y}dcypQ}bx zMligXd57)kN+{@$Da5drS{hpRTONo$OaXw6ZQJP=wtD4;!7Ed|lhfg{JEslt)&%Z(e)IBW`|>%%8n)&2$nZ%8!Qm#KAl=da#Ypv;fb!B`nFC7f zk2dR#W01|&mw*sSw9}puA6e!#hfM)HcdwX*$i5KVVT zL)IjyPo&TqU__{8ebMPBCR2)7oHC$ljh8pxekyxcIJw;#2$TVfAYOcs&GRQ^G;=7! zUe+SovQ(GR`JMqL`jlHxvD-k70p|ubfErJtG$3G#y53S?`ii&DD>}}!wqRw%)PY~Q zE%xKMH|wqyS#7(_%xiIq|U^C1Ku?`Flmr|THt^zT&g3XX7g zfJuj2gGkR3o&r?Hi^Tn@sNvxe0IHO4KTNt!&5#i7Hb7 z0>Oq2EUuq?BQT>V&FPEuzTev*f>+TBZf&Qvkk?MDW~jN^H2*g_fX`|2pnUI~=N*tJ z_i>z%y(kN>!OEPMzz0$6YN_3vK9mfk>#;`0>NkzgvGvl2nfLjMhDDxOra!wWT)Y%h z1f5&oi)(}>KYL#}1U_$JWb0P{s74GL;kJu6ii&d=rfxA1-2*8h$R8P4^^|3c&J%eE zrQH$rgEtl1;aGymA81QtY`R;upQ0g+_Gsi9`F6)GEoSL3eD2>*)waRe5FP7`ekqH%VB!6dn zeQX+;?a`^&Nk=2%W6x^7np@XTla2nWGw$ntac8D3V9UZY7C_ToNPJXvMJDt0W9@`q z;=fX!2N^cTB4WUHvDN%K3@jB?c-zcOA5agupS`q}+3IqZVrq?>`;Kw@kdeBz9_BA` zU3z768CCpJcPzuzwisb%pw-?@uwe{~sC8)8;i6@HtrU(Z6}EJl4%nU=c4s52TDo|3 zIqg0mxup|M-orixwnv$MrlP@#^ZPCG2ufIqLrKol8`W&nT|l9DDk+%xd9tt^pOP6Zy}!V` zJIRl|lDleTv*x0u`%$UNOUZ9TAw0Fi6;rz*%Lf+Z>6cOseRmva!g&=q0u>0QkQ z34PN_?v3FbN;LNb@xzRgJRrt)8<(G$#5a@tXwe?As0S#M7&>duL;z(5*ZYg(;IhaQMMxX$i`Yh%O74z2Y?d!XHd)CI`QNZE831j*4G zup&nGhP-X$b>R=ibc?a2ryAE(rC*%GzJ2{HT@)^x%l#C5D?#_7dY1N=QS{w%WSzx* z`C8ub%2{zzM1jQ6yx;MU7`0>mlv=LNsi zx1Pxprzs@PAnM?EDX}@|G2Q(?kF!`Od07_R`7`{$#f_$fE{!m=T^i=)0S)mG7ifqa${Jy`bPOF9 zd&U&8(5oq{<`W)S(vKiko3?6ikqeHhq@!)PW%G1aLdPxz@hKk(csH6zFB@HmF{YVV zI=k|%&um9Wc?TZY8F~h~PRxWe07oKO`AM_F0H_wAA-Vqf2&)_&Z*w zDoS;)L2+%3!-B>-;acru$8^rBxz5p-UGe}ILt8U2Fj0uS?-Ei;K;g%F@ZCd5cRT1T zA6EWA<{_;EK6FyyGRP5{46`D;w*sf>z!&kHMjNY3U6OK6gU-c&GVu! z&cP6aS1#x1?khLj2;REr?)?p8$<+|`0QaFht$QWk@Bmz7r*) zWhPPT-j{{2KM?Mc0+TLAO<}DVJKN<66$tOHoJ}{F4I#6%oy^W>kqE}$(XStL8MYnl zi8)>T=gb=`ZWMu|w$-c7at^8R@l2W^UdRRzb5|9`2yK%K6oy>3(hK0vR@b`#!qu205Vsu3W{3jrqFceVv_p$Xp$f~mu}hO`a`8Xq0SeWznVfh7V8n?u z8$qq{s`>5sLgSD5l@dazgo#%}a`oqSm$mPEmRD^w%c{9NlwbnAx_TVfgti0j276x< z9=`jvlJ|*k>pnUsKAwl;yTLt8v{szKQ;s?qZIrzio%ZS8V*IIBR?pgRK3$;qzRb5v zy)nnQ1k1C{%>^5o-^BdZj$7b|TbRR!6nSGz6Pr8nbIkL$&Pqaekk`udW{kI5iyGRb z$L#7|l74F)QeStU{n{N%at{;2tEsIRrraoD0(r6wk0%S(DxhCrWPV6fP7U1S6Cc8s z^K$0dCG7~LO4TG+f(;2zFn}RVa>912Ha&%CCM&;I{!`{7cx|(EGSzP8=ggpj+**m^ z6A_ejdhN*m*O>^gOw}S93Be5ng@$>Q=zY{O&mQdm!O%-?8Ck z5^fN@QBIKdqED>5^0M`?IO+a=RzCOg4yNLV`(n_6OmHW!{9ikb`GJE=OxB=EeYCgjf9+~7h+_=2_skMM4ni4^9Wnh8)8Bz&VB@h%(4vTT(J3|(rXN|@ zF^Tx2T1#%6fnI z8Bb=c@S67~DSr@G8A!}eVsf||=Cy3tUjr?3oWJ|eC-c=EBEX)2;d<5qCkTRL?Yx$L z#{y7OL?8f#-aQ~emQ^MP6Ip$%d06oIc5`<>U6Fd==|f(4!cwf__8tI*VM1`=)O4D6 z0&vaD#s9qnbGA=UTl)*FZmZkCJgTye{@M4zu3I@EamK8r>$EP2qX#u0=EBr?hQ!#W=!VxPNJLFJWvsldCboh zt;@IbDEDx0KI7f3}yfO?HgZ@by>e^FU+`4 zyngrBJ8sQ0enWv0(}9*m=XO1$n5?9I%m<@!I^AoEArb9iPL>!X)be> zfT#1nmZ1N6GRGRgt2rDH-#gqtP9MkrvnG!^M2-cm-^4orHv-%_F93dg|9y?jb6H$o zmGi5B@f!@tQ!Vox8#uGO*D4CVjcbn5)rR_B2FCWW(d~aedKZPkX`_LwXTDRc*?-o( zLYRRMmbEj0=oFX^*Z%j`p-KP@r#7}h^MjboSVuyG%mKpdc0kO)7&&RE2lNJd4G0DP z^AWYMPU3bRp5a5rt*J(c4QKMD+$*{@$VdyNscG{kT}o;yG4~fLO~3m zRe%Ta-wXBLHhh9o?wSxl9!(g^j|?djCWw?1=-F*uLCOQwgXK=`!R{KH3;*T zqwv8{n)eT9CQMH&k^=&%(9HAkzot-$C^n`lDHkR792eYpK_HmV*Z;Ff^fH>}u5@qd z*}L2BeFVe-3evV3PY#b7*9W$*A%UffwjETalhusflN|E=G{S_gR&50WCy>%V|GTTX zJ!RHyGyKo_jDLRTKH30`G*bjLPfY>`xMQ!_u~=4U8F@o`c%+d!JOpNg4Z$ zX)U*+Pt(#_XK0QXKxa?S2MR&() zDy|+a4e%jg+usR^Bl|lkM8I`C^9SI&`QHv(@B?2`d46ogSbE;U=Si$kf)@3XaELKh z)tY`3ue(1quF%se67Vn=dmJ=|FKWcrgWjzDe@00{g+ocYX15r$EG4y^0hw-MP(PUA zJ(owN>j;HIsN1b~Rg;R6A%Z7Cy}19)QGbI1UaL#7O@-w?@9Gf$r$7JuO#j#aYM>AN zuDkIBp1XX)>g)8yQr{=Ch*OpVZ#Vy8y?*N>NaloQFI|$B@wnxmPftJs3`^&)Jbs>b zp3nP{d$z54^`6;R;X|tH*Yn+!qe(hly&G-=M#UXHmI2{V~$?S3Pl#+ z-)vJmK9s@LxV{2~#pL{+!z6bEd~(=wzKm)eWJ#L$(D!sZ?5zInKRoV3uA!`3TMr|O zlPrBLivyqS`ryG6XE$9xxgCwJ^33Whh)36fPJlog-sg@3naMoN^sKs{s>A+YFIoTk zx^bK%zd+)Q{LKSiiDw_D6q*MRi}*ObbhZH%f`kN}OeR03lZR|Z8rB8|T4mn%B@CNJ zPgbr475u2Dn~CR0MovC-p9u)u?l}2m(Skp;><|Yd3|qSk=J)gm?JZd=53Ano@xH0M zo_{iMRL%)#k@CK6b3_()+RKRGSDIMpY1I<+-b7&MtjhrTmAoWbOZ4Z>qppnj$UC_# zx8r5puUp<}vH6GSzW9X=cnOeo4&V+B3W)5U&i?dp%;(3{z$UfZK2qrlUxiNO=4d;r z8>Px>AsqJYrpx8`;>mpDnV#>Js%XD#zsJ$amuL=VujCKQ0 z9HjD0!I?>uY#AS|rIhDiXKayU-)+e|eXY&P@Zu|Ft2Mm%9qyMw7RM*4(V2kzTZl6N zyY_}Q>fx9ecp>%b`=K{QY&kVzx9t2r4qzoX-W_Db*A)Y<=Qu%F&Us$8$8Nf>`1vj>|0)%+86HaTK(TM)QUaU2NP zQuklxmh9bXUQ(wFUUnu;sDQo$5*W_3B7+~j}R>Yp!3?HE7o!#>}D@#$@&8E#_W{^P-!`JW|9 zz1_p3lLOz4Kewi{wKO*Ao@X^B?9D1(*8DuX+?))(W3ZD_PM>4&ZgKkou} z6r79#ewO6rg}S;SYj=Ol<~yt#e0V*`#dXDKy@O0D-K+V_Efy0Qt|@!_N^NO$M9TlGWI zes8A}6K2C~JX^Ep35COgGXtpbD%0k`Pu%Z4N@O%uK;2hNi=^aJyGe9~_Jt(N;Y) zcj^7rN-Lg0oqXn^I*)r&JQWU(0zarL`LKk_G}w%lj_{n*A3vU!_-ImWWJrebxc)8GG8v;D!VJ>}{RDe6(l z#v<`y+od?W&Z+T|YPJUm+`1`T&E40uAUzS_D>ldV(HL!x?k=;Ad*-^G_xPivyyw=F z#31*%XJ(^KP`dwgZ+?w`FAdHnRg2vYV9yQm*@C<8+KM}hP~qpM z)!~tk=XW@~&OX2^Q1#h^qaAA0`fzI>t}|i=SRsB@yaA3Jhnfs6`>+`1f%`yR5SZ2u z57IQ=+-ABjk^om7wELMJMu68XO^&X*$0&`@`h*e`JJBc{ntz@(F&aN!;VKZ~a^BZW zt(exCkx}7FS;B-XKSN!qJwdy_Kjw`albXgZA{+&MYnbZQqAl#l>K}?wPyFyh-4-%+{;FLyOY^35neuHPJJy3kLY-F~Wb?*k zkRjIux;RGG`d}~R@)ID+uktzZ90bbDuW5qm(amBwc?Cno0@V(xlO7CS2+)q<;w5vR z{8IQ_V$T&~k!r~(eYqk%4GuMCC%bd<&15e#eQ$5o&Rs9MYc3~E9DS_xelmHfD_XB? z$&nbKTGPF2{lmDZFhn}gIn5+ASPxd`mX=t*ytwIyb+LZ0+e#j&30nJNA7~x@5KpZr z##ak_az3+~tREux9cXDAH7Ye{nhbcys z`6E_mCO5R7qOLw4afW~1VWee`ek5ZAS-bd31veNtt~z1+2|w;yANckK-qhD>zQxnS z*}C1>bG3G|ODZnL$^Yxo*0bHE8tiot=+F4K)3U&NQCtas5Hrv-CDa?h6>+OjPm7^* zp0SEy8Bd);xndk><=zkjEAo0hly;6TR`HR8td>kHdgCA>$R?c ziBoFBIoVNOD#``)uB;cPxK7UX`CEm)%4@q`p++%}^x{DGzqHK?|S7(mKHx?Id zOfDK8+7^w}l@tdMR1{&CcwVbGt{?IzBW`pnbgJ+A3`gw>?J{#nD#HoKn>RX=_Mhtmc?%h$)DndJR=I_0}~HsrHy9X-g7QL^-4y zqBTVma}Xp15g`)p)_>yua`vzLd!GG0>sjl4*SprY*TZqyW5>cg=b3X}E*x@iM*d1Cz&m9q zF3X}wX=r`etgKa!J!f@5RNBDpG-6@_pP^pCx~J8zN*JyC$QotqnqR6Fk(V@%rj2;( z)4#v`03*+SG{#<(#r*PT344FzUq<+%r%wZP-KvzP=kN5VQC?2S3^NLG;C&SUmm#VF z%AHZz4xL>*euV(C<3)kR;*{E-C{u%s!h`T&H|ogrSujl21>t6MHJdG})KIu=U#&lgWFr0_&qO3*`|I=IG+cHlz3m?# z?wjzgiwJoPyczwRXZGV|>sReT)4ph)RwD!%#57yP+9KmiU=kAnh0FLFi&X%dp2_|Q zZsc1-wiyi0n%>Ah89iZ02;ZYxsy(scEd_qfb7gsu9=7E~epFZin_OLqG{C&fTt{lu z`yVL`XN`JZ$n8(+p8q20WoK8+rhNIUYsun*Hp;}Zx21wzG;5DOTDp_AJ~9}Z2{mkc zqBS#6%NkqwscpgfMV5lhEH7iXt@B?iY;t#K(^(!_$!UjiQ&(x28oIvV;P?}j{$hqQ zuP&l>fY3ZfFB0I;n7y!`P0*d!28KPTcexFYT;0M=L-b>oA>dz|kBk-LiGwyX zTDj;{6OAz<-S~qoFoyJLq|2r3!F3%99V%o=z~RxPIM z`M93lwIF%f5omOzlloHgfj)Qs`5>yyx<|yVLCwax5~Zmc!!0KIH<^<=o5lr37(x2X zVvFX@C}f$rJ;H5&nfx&(w{uC=;B;*C@kD0rSCL8sGH`IVrDkuX`hwj#Xf?%Z%j2H` zsjpd|#eIa_PXKK**07qC=M&;y>z7kl;g1)GGZNwz1tuzH#P6reKMF-=cDo z?sapxcNLWzWIL;zejm&^dWrPz{^Q)Iu9F&aX!*1M|!PvQE04cx!#Y0|$pRc(*UXl*c#c_0I%&udOUd_h!yLe?Jt1!Xnf9U4)5su%u#IJtnS9a@!yQ=|r`kNZ4W zH}ES}>`MQ(xWGEOSz>EBi$i!+F4@#|e!P|=wgT~e=z6Nv>yUq`$qdx-ku$<}xE+oBPp zy3Duin4=*|NZ;-GJ*>@&{XEale)R0d1dV8AeCqH-?kB2N&y%Gk+waQ4h` z#dkf1!2oR1E&2hex_P@A$6D~0-qYd*GhP`Z-gV>N=Yn%P^8upmqBF(At9=r2T_M)I z=+1MiSo?ZX1pxx8BdJY_7}f#L3P~C|%hI1?y-xIpE`(n$D7~i?=mr^=-;(dX|3c%d zD?un%4v(WY<+L*kPcpZQDvy69zYrc?Yh!o9Hfnh2~RohV!JaSW>Vpl!!^Q|(`PT*C+KyG=M zyA;Hk@2b8fqsF{S+Tlo+x2;EpK4~$>eE~1!t9F5_>OSH=Gv-!PRGx!C3AgxN6Wtm- zED;|j!Ml4{8EQ^yiu)$;c!K6uctxH#@A=POa@c@iKCK@;b)3#g~{fdLcDM&Bx{vttJYCJk<=Gr&e(5x)l#97S2aVn%g!#jAHKMas|_&|2F zhgBCG>N;8L3{xLTWS$ENJ`rnR3zi-gzwhR*cQ&00ck+RTtRt}gO;;=Y)y)sp@X=-9u&hhHqF^j_aK zoJ?76C5PzaA9&;{@b8~$eU}ipm3WgoOWGylC`Jwa{|>I6oIf?OB-k@b9*PU*>zw+c z>zlj|@~8d!Cd5XBLMwQOYXp`39b+u_y0lciD{ z1GDc|IhCn`$=m#`9u!%3v_Zkg{`VIMnSFR{=hcE$R`hZBIw+0G{yC3 zL)i3#ZX1p9wg%^CCaO1ryE8k#g&X~vsTYCyMWW&^AA`Pj)O|=YCl8CX`OxUT=Jn7@ zIWj8cH4};sUaps=(5Ue#$3!_Vms+z*gYv1;^x|Weam0rq=6k%6x$yWBExMJ{O64?v z_hf-Avuhx3f~jzJnce;1tI74&L#(*#ALc?%0E(eF{SMjfy6FSB(o{MPETO*GuE4fUd;9%%Xg-a zaZ=wZCBNMcsHRL->}w;;&P*5s>A>I5-xm1W0{?FdbmT83h{FXhe>cI z#K64QoO9pjKF|3*=hol*to5w-UF-gbS z?hwl#Kj#~B$^3v`{r)V{ro8hzx6nyKTz9=8(y6e`&1><&=3)>(s{R3Y(&kD1;lp$1 zFA|IW&5v8;TR6P8;Kb{6ug+oN(EhhS#MED@`oaF&kN)vLZggT<5#%N*a)19IVp@bT z(SQH_zkP|AAr7(CeDiyYtN&)TIK0Cb{$`PwF9|2d@~3d)VvLvko84l>AWm@bw>t$N z%mXIG{qK2}dmBFK zof9o&%rc)D@0q*|7!Kl-$eFliY*+Juyt)|I^<6xpj>S!HucC+Q>mNR+wO`fJkwgrV zY00>JuR629@lm`atYVc@*i(G{td3JdVrBdqj$2a3K&+*kLRbPp9~st7yrK$j|5Ua` ztj0KONE|+~mFe@?y!m@0II>lbU-=nv)8PxSe4cc6yjN;=l@@VtTWziATykJjQ!;NP zczqN0H0YARp_8jX>9ecGb_s{fUCH2Wf&|s3A2L}__EN_=J~tLlgp-Z`v1VecVN{m0 z&MVcXiIJW-CGh$~rmP0*7OGl^Qm_QKsKVAub5R+*{Y)wA$u2$O-m%(q^6BZJ=il(2 zgT+pKz&9AVh{J2+rCGx&mNm0sv-F)&4E$y4sKI$cuZcv0#F2}`V|i!Ez@(@Xr9hR2?F1r*qT2m}1p1Q{*jg$tkKGg4acmul+;=dc?#x&pgw zu-0r7Xa=exASnpn*AxHA?@p6v!eu$8=k)AKz=>5|wX&HCUapOCJyG&W?8F($n@}?()-@y4$hFkrQMA9PIFs&4-;yL4? zyZ1vKdK{HH7(~%fv&}K_a?!8)Y{xD;B}EZlGkw2v+^U-60a{URh7b$dZh9!a3=2G9Y{>%a*US-sG@73252fdFoZ$(GdBO5avH=1=UCNJo8 z>z*4-ZfIqdwXY62%pOjY1KEG2}vZl>+A;>R%K2vVPn zk;Q9jq%T;z+mL>wIG-w}0ex`NV5>Um7qgclHhQ+-vJNO#NZ0Xa^W@L`+& z$0Z3^!WoLwz@bB=dSQq>*I7Nf>tus-D2Sqd(>fLjPc}MRvKDl0n22m|Y}KIhIF_p3 z`~kOgpEG5+>h$1a(LrT=q z$YNOQbAq7f-?320^0@Fzx5&Uj3f>sKk6%>L>CYB^!N(ZB6ImtJ8$==V;bndKXr5e2 z`3PI~VPO!wH*KYTKH;Xh_pHSwJ1bB0my*3k&>b86WVp?V6iqoErJ!;+JTG`OjAVjXt$dR>kn?hL-D+~N8s@t-FaiuN|L?cW zV+?b-oPin?ueMojfYh#%8(*-k0BESQ$m32bjLnU^lxV60ByXUiO$osm2V1bwgdJPd zwnpQofzzBp^?nz&)9J#Lx|vj5SJfZt|K0Si9e5t?!2igY!IV&Z1iXSZ?v7^+4Sf<} z4dJyfW2wq}IXO;)GnBIFfu4)xl-vP&9z*-M%Y+xs*=OY+CTm$z1B^yul zrSsx2HritUNN^biA6$EVfqTNd zF-HshlLQ0sI@{YC|8Wcd#V!0NdiYQD@C(ty|EijVTV-v_8-;8vyDL0#6U={xJrK}g z!|$ATyi%^SpB+CE?3(<+`Ct@FrKAGs;NJbT>LzwW^Zn&n59I^r?M`7M_Cgw0u@k%930 zpxzf)dn;jUD-OZ>Mc=CzMZ8w3hQAdEa%KqwFm+?{M>tqJwJM+h1f>kAviV)or z`ISI6=dBu-rTwK*x(4dn{B6IslXP!~R+h_HFrRWlHh2LgWh)`ynXBHT!rH4S?>Yhq zljog+3FqyFlRj_i?Y+UVq?}xl^-mt+dUg*EZ9~V-t__Cl@*zlVOubEBY$ZT7v>hct z;M#I)>8TAc-k)I7HNL-?cRW7E(qpUAeltA}O@)_OyYw~hpX8yVnLMho1l-3Hz$*$<`_ah=8C zjXHrJx{8l=%QGKV7AjqU`J7y}F?DeOiv_OHK?W?gi*3o&5jd1FLb_i}Ok$Eg2fM~j zo*b}C1ivke&t}c2=R8^s+h-fX?by8OG~>Xyp0dfa#=FAH1xe;NS&v_sZq>Y02YtE# zb+7bT4mW@QSUq<;&B}OE-J$3h-7Z%p{sZHJR>NzH>Z$Vt;y?6fRg3SioJ^ zHWZiHtfl?PNar1k%3$H#H2-QE+tO;XPjnr%#&WCEO=hz2y+1zXHkdkWBq_ud_Y2EE3^EKWUm47xFPpeW zV;k+hIOky2Gk2HuEL+AsCcmLk)~7{FCusjbC=Q$A0zZy#NZ2iS5ARUGz)0Cl6}T1I zxY5zO=`$noeAo>m?@XzNfUhl3^7oPkxogon!LKTGHom9ZzPF=CYswRRFTMpa5k#FP z(xoAFIf8|K&6>(#MrF{Zu;1Dl@iu+muYd}N$CqE!7j8L&H__+lzuVaI!biFXt-O6L2IL(yrOp|32bk>c?5O}gH_ z2DBIJYV3SE2!}XZwWE|kGTfs4i>W!AlaDeaM`WsAYyC~VX|IQ1Ci4mdPhl~`zEoh< z&Um$mqu~T~+Z3*7`HFQe%}wVzlic#b!VK!C)kBik8U(r(ysD5dAjw6JpX+f&j$LKn zNLo#Firg+ZMJ||RsQ0&{SO~qiD^mMxK~y)kn_NQxG_-sI0|)#HaKI+w6MG!*^rhIN z)IcVm+I3w(s;+Vpa#EOKJZJ6Xv9!7>4*QtUF;xGNrqJ!Vs>x>V`(y}@m+F&|hS=M6 zOkG_AI!5G!ly-#cr|*dvN|A=RB3r7=N04mPEeA(&j|Tqci+7?gRXuwIg5;4K0RTtg zK(t+-bRe-BekT8`YlQk7vFesC(rai@aw=Ufe7m=FT7piYa$j-OYnq)eTvyf2Ji6;y zDE(rcCV_D1;R5$%-eUVAl8Y2WDng1RoMjT|J-7vm_3Ttu7I|uIfo@EofC~u~95@>d zO%UHKiHNr{TJ(BQur(a}w(2^0D8Oqth0;_(G94f|^2LJ#m@36zMK02rRQo9U=CKvX zf(gA=X%BDYT!p03@QY+pnzJL=VKP=>6?WQTG?b2h;Fx<+^}M>owpwxIgG7`E{cXHs z1Y?rKHo}7?w>K}qrf06&FIRx8zR{O#XZ`&FZE_Ql!jg)M|2|4V!n+~wd!(mP$uow% zMxsM2i|N$NzOk}6gU&{~XU{dOI6;!+E$ukW3G`qL_g4{J&t%dhN^3 z9z%1U8ln?8PS&D(dTCbB0@*IHZtimgDhakfSg-GE8m#ALa}~+kDeK$*1=1n_FjUbx z-me;{WqeKc(5!=_UO(8|5w5cJ>snh2vyEfdxxYk@W|z{PCXlI1-7ZL9gzA;JCk-Yr z2(1o9aw=S=bO@8RKtqq-UKx_^Y%H`UO@`Y5#I1y(O>mAKNQ?AW(XyDh&yVbEBv-Fm ziB_t-Q5R*oOMEbAuZ7wf!>ilbbKl}uaPDg$s#awBaHGoxM-!F{fTp?jj#*OX&yz`F zp3f6wK>nz!FTwH453CX+w(-BJ-Z^_U+o4IYchoxy&o@9F3MgP%b#Cc#GE=YU~hytmZ`csPS+rKJhKA-6t>i92Y@~9D= z$}x@mJvfs+%=A5vI1^;E)^w)STqh$Xib%)SGbRgBkGuq)QeGT*jLDq;Cq9!qLF_&v ztFR}h!kuTi>T1~rxtC@5CU16s1EpI zKFeR08HmJqPOstu0ONA=Ta*CJgJ?#B0?x&0{-N1%P95On^yM6AaskSu6#k3B+W%)k zV!>*Z=|8yu|8D`~_YwpU4!0+Iq?+{i2*HuQy}+U%NCzy~(Ag;DPuv*U_+kMwfsENR*fjIP{_TKGT$a4Bv)iklJuUoV%!;LN{;hQt=pQ8!080J_ zoDYv-Fz@^103{uND_;S?s_`VE2{VrH=UHllrzsRMXwzZJ1MRhxm<)VwyFmGi#eetR zy5a=%vav{wQF|ET1W`JMBkUhU=l7;$@|qJ9B)_t}$xO?%a^ z#2RJnY@8W45oz6lxCe_h@!{u`03_D#cjWD$T&Uz@WG~pQjgbE@-zC}-RTGIKwLzf< zGy$wH-xj6{6N>?5jn4bb9S`V%;yt^@b034U+lF9@8NXKcFr~KI!=_}Yv)c&_w7sYq z7KyR<7dwhcIe@+2BIywe2BNzw4tsI)7Xw@Fgjei{qKO2ni!BeD(GK|cQ0n9heG5C!lJF#pK~=^ zk#0QgeQbzZSyZu$7y67z5m+T@m0xxZ|B$nfBB57ZrRMI-H}Zq-!CFTvAILOp-obk% zF8j1ZVRvX2Gp0$#6%Jq{No-8-$n|Dm#jV+1BYBU5!Sw}y`St?)@tbA7|KKq3rFWcs zg2XCKdzdWy2{OBai{p~_H@R)5n-;>F8Si2UtVwLrUbmW=;F%UzwCYFU6W_qbK<+F* zec0zm$$_y;+Gb-AvK^C^f^mgU#q1N%8-fzX^xzB1C6e*d+_&_$wI4C$pUz0^TSq*3 zFdJR3i(J;&jQaY1MtZAUW`_U<19+4aX?ndj%@<0_fa=AU`})}%>-zgmP~saH!~eVC z{`Y=T8lU$|uU35VS_4^dMZWi|7)v8Hy8*+TB~Nj~TBV5rhxs6u38t0QB$_W20WA~&`0Z|6XhO<&u0KeI2p z)uXd@AUu5tYR#dAoVn&IU~SLjetP`(bhFgPcCfVy>daMl>vakw+e`yaCY8>rHvgQQ)AU^L?FL4siwD=fEj{AC|CI6I0S?r+ViPfmnwQCh(<4C869VXRP zCdpUN0vo8PQOM1FB^fL9PR5;{p#&N&>slEdrIXKD#{Lw|b#T0uFtelU-zBz@cQvvL#YdX{lUTEjAr$xvng!777K7S-EEQSSfx-bg>no0pOcM1#(rJVEU zQSxPR0`zhip=B|XW_q;VnYu=Np|;tHpQVW878PZ#xu*`Ea?H^~V^W_I9np_9H8G~m z3nh`xgWkMrBBuGD#9@Y0IyXn50lrGh)1bf7CV>1hwm8W60Szj#4y%rrmP%M`ogA%w5djOAKAu0 zx>-&%^B#!-?YYGB9n1R~KyzOGTIVq>fbPY(!^|sOCnHw%NNnMPJ#wGlPH(zdE<;=^1PcH6m0)T3a zVlhWlcrX8BuQ-gh=TJYSFpgw-tSlMVVY&q;^p0xZ)xM~p_W67dxsA8kT+k=Z-bJ5W zvNOkM3e-&_W0|iq)W^_FrK;L_(_XDz<{DKok`T`&n%I(>CbJ8O^m$G5uDaswUG2n; z8jbIy=YOZu{4(jrS2Z<7IIp3zLA=#T^j=nPx70zO94wPnK!DK8H|*mMiUXKV&Lb@8 zFS(lL>odB=D}w7o_EX_OwU4;9c=scZP4^EruzE)PajuYCrH#oJaoX8_$PIjg1)KNU zPs=#QW4FGEl4`x+(-Hzb#V)5Q*xO_Gg*hHoiDF|hSc|v4cuG5rGdW!8J|ucN6&22B zq|Z@^W(^-imu_CuI_WySE4vi~9J4%`CFO(C)%R2yW6%8>Y=p7Hn%RkOr~&KH`ODVf ziYE#Y-z7TnX&#xg?*0(E<7i~c3N7B&>@*~|2tBxH?J+e-sL=3W{+&t`RF+%?qBdao zrskX@9~~C_@&3n+6-6}u5_bHA;C;DPZM#&d-m)B|{C)(m%yw>bAI)!aAY?C7SbNqD z3mw{hB(+ie#y6dD7w3*247+{4IlbH<)e26fAuw;L3yl**Q|avco`r?tVFgbM@$#LG z9KvAJ$o47RTvJ=`Z@oh7$uSS=+QO1%vCr!O+(tj{A~cjiEQ>3qXQ@FY>r+=|YKdOH zwUDNfPHIm34xQC{;zV%s)11Qmn|mG#3D{^D)tCUte4!wd>*r6s4FT!t5f@XnBJi5@ zBAf+XG(JA6srjrqWfN+N)w6K->x{$G|E5vgq3UkQw$$PezF* z)$JbH=EC8EqsObe)p`3zp)^9n<_TW+x{NZsWj&&m^csG=NNzz6GVa9|%s|u^6Uv@I zOYhIc;5GCh$*N8+OBs47dnx6ooOEkFjZL01Q$HlG^8PZNn$@lyi1^f0U2(7WOx47# zW6OxP-ahDIky<@lT0#B^WR(4AFo@|(=y+I!PzGQOL;x&a0Fd;bP)Y!jg~5c#@mKPO z!b>5OhMaEgK}7ulMuJi^s0E@K%!FC=dn+OREHbu zZKpB$aHWQOo|UR5O5(>h4&osuew#v>iq(0dY13jb_{^pDf}>AgRIZ%8uS()PReh6q zK%qI!Tc|o8QIlRzeWby!u{Lc&*IA5#5-yuPFb8MV73wS90w@$kwh@54;>*udP_;l! z<^A=Rl3jeB3_L#5fumA!2t9kd&ShwQzlL(NkKIKyYn2ln!@|yGv_|se8hBvRapH>a-NmUv@V1)Pp*& zzN9`8)ji{;!pvntc|<@tnbkIWnK>mu;E+A#3HeBBaYcyDv>U1tIQSVdXx68@Qz`th zE5rSqeMSXG_0MeU;{FSbt9EY^8{St%Zj|gsq|e^H3%t`w*a{mUmn2-0jCr9BAcy%! z7VXSHLoTL`KlZT+q8P+pw~mx&daRQyYIITgkyjY?>f@wnXK%rybI{snO|gAp*Q_q` z>&+ptDY?ZTek7?)j)=aF_nf4#M#Iap>Wd2G%^P8Q8dUq9sVffzKbwV!!*C;`D04BH#^HfA&VnQKetGrzg^AVPb zoeJ#zLbPyd70s}Yk?>qo+tkRS#NtvPzG!|E(FXcMx6XAoxyw{xr|Wo1IoY=XEdK9k zYWjG*h7Ig_2mHe8sFC5yi#>3bGr_7h{6ob+06AMI!y3*=noOoJ;<8V!|k_Bhn#zVm*vz8mS>E4A-IXysn2 zN?&;oM3RHMLk-C!?$9Lp&8mkY@ET1#a%2Tlkng$Kir8u6ShWwLCF7F*jUxFsc(_1KtEoLWOaBO?77SAzx z%a}7Q;?W{<~opvAo2OBj< z=JAsPk2`Ja%5ha;uhfip((WH5_lCdx7uEG)mKr2mk^P_4x@a>Jt*C&30ZI)QJ2ZKl z^4D%uLb{pC9yVUIM!JY7T=g+pXU1%S>5wp}Y4SKEeva~tYP#&5OR2#WYJau0y}Wmn zy@9Ele)t=ndwd8_tB2!wjrY*nJHb=CIiKL8`ueIB`-#zdi&J?rLrL#R&w>Lscs#0% z7H50VTjX?_@^Z2?)G1DPRO1j?AFz9^e$DglD3f;(l6kv~hTOcUw^n>I70KebneQhm zZk49yKEN9cM`|4DeU~?!71`zt@H+8}V%$`TEBMOK5qZd*6IIJs&y2IPlB0+1+2!;GXkV1odh@fnus*iw(L3JvGuV zUrwN?>y`Wm>96=*_;AqUSEjg&7-IzU4=ld(d9v&3oelv8Vq6crAmQfO`q?*YP)i3v zq9fEkZr$n9w1_auI0{9li|DR?RaHq3zL-mD8E3HZhZFrrM4q zmeUrM5SHQB4Tn+3UY>^Slm});8e}AS5o1sC5{xB<>o@hh{ z{zHiO{*;;K+uU>;EkTuN=|LUgu^VeQ76EX-xbOde^Ud5E=G;1z871YjqJlc<^ zdSn;O^TObGgE;J7Qf6p}`r6^1hHYy2DfPbBK2!F14sAj9;g8n&GCuP?HiL%a#pCL& z+DuL7W~tOd(l=R11&F%m$(f4QG?U1A(G;fN9Q*kL=d&T& z5{g);3=C+m7MxJ`S9qg@3|x++Mja~Fod#eo%Y#BRi2Yb^g=jJ}!A0K=9*5lJ>Uk7* zl@jqPuQRp6YY#o7}w%uXQ+3T}`>-oRX3;}i+lIrD9}gznk!bi=mx$h`Gp$s9{| z*N4Kfu;raH&XrD!n(N5?fm(}><7O+eehPFWT}TB_Qto6s@|l+gOCpIZ145N8qqu`p z({%>l@8m#u-|I)iwJDRg+}+%hFio!wzqpAnyNJ_%NL%3=I*1qNb$f&JhuTLrJ?@RM z!9j3TtmMatxLs+jTc~*4$Dh>zT%PKeoKJ_z8I{!bl3NrCIR+OwkE?VCOY>uEr$aKA z5Tt}EXNz40)QR40g?l}7r`}={WNTE;gByWjOKxf1r2ci=e8(p|j**WqurBn51j0TZ}{iAJ}na&yo zFAZtaRq5SuXf#GR>$7`&7f;~XIH2_ViIqcCV=}mcJKlQlegC!79;Tt8Kyu;2zA3cq zSl>M)v9mD!T3i3x&*(QMK4=jDBzgjnh8C^A2C)N{=oE5b zbTGNAi<(!-TGs2~VFskp#dc51B);$7VZ!B~(7tuVVPY2BR8Pw4vZJPY9wjXGFSBm; zM~bjUeh$*IDVS0^5xYNLS+UBVShaITx0N~Yeb8@0q3q6J5~?myA;(c^04t)?8n7(Kb&pYQJf9&EQs714T*vW+^-aTILq=@X{5+S>HH8>zFLyU^t;yKKc+Zy9 z;Ocuz8bO9XPTIm>!`JZsmPoXn@o(w;v0xK-N0XxR(2|{-0Tpd4PG869%4xC6tw^0gqxv!3}*129)#fz z7dvfVf_gyLg9rTIRY4doQR!s%?~x+@-T?zIO_lvTL}VT7K*H->mTPinN@uahx~1N3 zksOx8GD6XOijE1SFTIDvVVWef^@Z%Q`a)|9yA{GxUR&2&^!Ql@3U7M5tTxgJkzJ%9 z=%_DzSMSsFRimS{Fe}!Wet=|QuX^wj-GD8`b<{G5;=%gxM1tF=tS>6#u_$xq`pqYJ z4LhanUi(IV6bJMXXPe(~0uU$Lg)E}x1vNyPuM+LI^j=9UM7#qU= z#IybqO}Np1xbAFS>buK!c5EY7O)`rYX&&&pjid+X5%FAZ7nvOXw(vu7+mIE$vv+L! zL}{{lXicSgNQVS4mEV6cn<1ysI0O_t5OCLQJQ}Lozwt@`tQOBTWx28M3BB6_XWlYD? z?6saN505Pd0*_)3*i@T}Y1;b(jZ<@vY4a10M-UX#Irij_w$n1DDuRN>&1nG=bpuKA z!VmWiE3zU!vWg?vlQM)8-FLwRG~KQoAPBr83LcP7Brt=&b7spK?!>_Ks0BUTQNo zIUUamteN*2Nd?OpPKf2zNeG)E`JV`mIYWX2X6nh>|v30drlm zJL_J8o}R9BK4dXa>_>Zdey$AVoIL=PBlK96FO=N8n~=ifu5$;?hpt#bBEG*HePiM9 zWiyT_xn;n%aAk~LCCyChM?jftt_ndm?&SV3s?$0UOw% zTweS!>k2Q_DG1GirW(`h&sfKH-_hi4BDb@ee?~aYfCT03ACn)1en^8R1WNObr=SpA zbo1Yr2d5;pu#Ff`OL)7pMM(PX2gb6v^t0~=>{V48XwFD6=CEgOBoHM_!)b=jHpdGV zwhN-xm9s~b(RMpN5)uScLq8M>sCHHz+A~*wWYDV{!bw7zsFWR=!tiy-#t zq&eey9{Tz4*u=x$k=Ixci@4CEMdhM7pa$$iBTm4llnk7UURLBn)$rJwk0+*xuN3^# znli1CIP83*XtPt_j>o=ccl&;pP%PgPpl^b6op00F>eWa+1%xjBM)Ee9Q^}p;=@b4l zgn*9I%>B8Kp3$Y8TBXmp=LDn70Ud#!GB|GrLnh!*4oE5n1c&Ed zc zk`<0@nxJ~t(sb>Xzv5Swqu@ov)*e~YWVQKdfz`+fD{1d_WRzr9BOkLSuqOCU#lqws z2_oTtTgf;ouOPM0&Y#dQNQB^&I^`qARxL+0A#V4+sMv#EziUVo{?RgUjyTU=I*sKD z+V&BW?gh348mN`cpC9O|dD6qToI9o6c>*=?%1R5b2zKX4|t(BBOz8Z37coYUAgEg}h#(=nnM9J50* z-Q*pp7j=^t?@!?Kr~XvQ*^RvR_F9Lgn=+U~A(o6c?~paqrOxt(dTeYoztHpfJZ6G%mm}Mi2l}XearMSI_WR=WyRVSwH%>OLcPx%eO%9I%YC?h z@km{mF@V7P6HjyEBz2$C8p4j<8T??GN!xpH8F9aO{K-Lq)zC_m zzvCSH6Lz;;$kzL^e8=`SHL2UR?OdPwZ=q{QtwI~PT;AURO14&oZD*rdd-6UWDFo-| zDQ=D5miMQ)*{`-#39jQ?qSpuauN&K$E6O~1i=moK|0`tk(F8rHE>A99ht*%~iaU(?hpLRrl=Ci}#y%_375P-c(vujNRFjJ?}VIN3|NEYqZN_ zh9p=m<=84+dRycy%9^p$c9%_nFqZAn(sDA|fA2uZujw`Ia26YKf1@~=PRb;;Fq&25 zC?R~UPDi}elbMUqpg2td|u+G)9 z(4~9aAT4d@-OJVO3xY+-e$vuxOGo=7 z0}Cn!-2tAA$;E+texx01a8H&rA+#uhuAf&yA>GdCMn~y~YGGh&zePUmeKrdAVHv3w z&IDfF?lX@n>H*>lsC9lL1?mA1iR%DbuLB zLoH`$npTGAJN-XhaS>%bt>E>oJ3Z)oUgv4{h-&k>>)ex;TSgt!`Xe{yp_DaotOqxQQM0ip) z143>xWgA}Avt*=u?WTmXxV$f4eP?Hs1dGOfvY7FJ*w}9IT%4Y%6cBcc?7vL-@4{B~ zaoNCM&i5vS;?wDGuuevvAw}2hpDRYgmgQ9P^Ln6=HHG@*h;|jXx=m} zk(*Rd(23OPMoegZLd8w@GYxRtx=2}9u;5%1hxy*1C*lePif$7k4sv_)6;u4pDxl`v ze=_zUswa5-1OVRXy}bkIhyperc!al{1?UToN2*S6 za?nRolGcNv6M8He}X^^HGyIjiw;!Rd| z-f~cKUsuWTI^Bq(E;lRW%+9n&Min?60m5o*ri5J_PM&*Kx< zS9hpXyZVWObljc<0oDlkzHjBstFTyrxhlo{%Yg&;1-e0#vFfaaMRrpW60rFEG>c3` ziMwehw4PEcO?cm%4)5UL3%SQT8b}%7k9$6LZM$RCan_G!lzTB;G#6(-0NaUaa{Ydo z=pr)lLPIvu;IzB6bFe<1BE2=~#&M_v;nx0EY7@jmqHb@16NWONc=&O9&*8MYdS6?# zvBmWYcaZf}t9jxCy#-Lyrq3KFbuNh?+EDFWbxC*hhNvfZu`gyh{2&RA9Z1)lo0ZBA zq;PRz@BLD*J>V}t5Wh4~zW@^~@~jRClT?vn+xlTZ$9{^OuJ~l`eY>Eoh5#LF?F*S| zK&`%hcI)a;sfYtIHYnItQrP`Es2&ynsa5)q5;_hq=%BqimpW!EcGw@9s`jt8o& zUwGy~xj~FC8kken ztl?IrsjaCc@anvea(Xq?)c@Re~ORSc=%xraWXNI;uyh%IP{4=+|0i^?q3Db+X+h zLwRoyX+KZq-KIV@?ksW7E_9Ra$Q=AWNW`?c*`C@X6M>uOIjGT5JKY^opRvZ#TI3f*?%_O~Smgr8;ro!43=B%0dCdVFvu7evT z%G4+l?ZgfdE#7qMsQIUZ6_eD13Y>O_6<>aDI6H1QCrlk#?U5FYcUM^2aPKgZZUWlf z^Xkqc^Fkz}<67@BG~2aJC|A>C(ZuS(J`Mv*$IFL}B7wk68teV@rYt#JqJfZkUeOYS z*a{`3y^RaX)R_HVMj1qbpOP==K4P$io-kf(l)ARDkqJ~tKg5CakfEAW6T9EAs&mx2JIPxAAVQn>tSie~tRyX;-sI_DOeO@A zG;nzDN2hWO0RzKl|F7r)+%quVLi+lnZCBY4z9aIKnSz25-O+sqE{5Z@)&_2#DvQyn zq3?73C(1C-$uai)ByBciY4i2^oZS2P2DA3dMEp{v7Td<#mKxARdtGs@=8OL3^8Ax64vucQro$%J<-Q>&>NC+&965M#8aoZ%xp zESS_ANKS`QLEKE_UOR&xYxxGfy2fMHN=D7uY%gc=&RuDA<*-558|yy4Z1e-3ffENR ztPv1JeT%uX8d`xdG@!HbpW{BlyRjP%GR*_@aq{9eE%!s{E*Y^1^mTKVl}A_?$+n9ss`hpX zp0%^#8oY0b_L>vLwLR(GJ5z)oC^3tD{=K3mY>qj9t3RFpZxb|Rm8KKD1NH_X$3{|x z@iW*o-MMllp1kXks#}k1FF;fbDAY=!Tk&IF0?92-G>z9RE&J1iXmnQPf|%6|1!p@) z#!h**{r7?>Z(FT~cL}p6O;S>H=<+PIH!w`~4DT_ynwacAXl>{0?Cus&u^p;*AxjSQ z;7(3m1Btn5E0>W@w)cj-78}&+>w(ZSs`T;^ny!x4F#*J(VTZ=C(^!Y4NGp|+Lz#-3 zi7u~x=w}P83qI1_R&q0yEZy~Llh^HtShh``?CYP}Gfa-(w|);hIUGGmU9NLWEK4XM z#D~os6@A4uG8l{R3=nMKf~IKErZ>Z`F|UELFoMoLPNIzwHb7BQy@TeS8<34Y*}D)SR| z#7lRtv-}Z+OP+_$RjbP1=PrPm1GjdaqHnq+@>;E~k5T0wtcP{}w$Y`%RKX`BBJTF@ zIWN&H&&1d`<=md!now*JXq2F?&>u3LtJga`GzG^0qPhm^*5SYp74Py_mN0_!xfIp?K&DD_Py= z6i7hn>>H)}bDlw$SkUvfe!j3n-?){kjXP~B5Y@4LeEWQZ`L-}QtpvG+{d$(nT`CHx zhNB%E;BvzPwdV7|zWBhE{jalILK{B60O6p4pKX>_m$~+==GX=@G$_3cENXwI6E)f@qx z+X&{aD)J@`QmboCQ8I2VlKN$Z<7w>`)Y>Vf^6Mp#yaIN-If4LiWW5=N?eORRtB!*WqW7m6<|s(*G_XL9oBby_+M@UZ4H4|@+3vSQ!@g_acwAU9Bb`d%BZT4 zMFikXgkpV+=?Cx0b!O7s4RDj7ulbUVNpa#ybr#^jBb;@HzSSW8b_aoj1q<|!ZQD)% z;pdLar=I#Fhv(vDU)WAt{WEF^D%PddJuUW(Gi5_7n_a)Isu0x14)NY2YAf3ffKV}m zfBZ0so)D&)x;5cX6N~67u=e>OS;6FnL$Y=jR~(xryUz5%-8a;0lTkK<^DU6!@~UUd zeh~&-&~GI^z0cb*jS{QClsBwx0FrNLXMiQ{#l{wk^OC#xNrpA{dlfG!UG_CcZ|J0{ zTM5!epelL2h`+gjq|_5`yuAmw&Hv||p?SlPv(3#bleRyy_@CJ=dU4D-$#7C=4480s zgtQD)H1q5~uZX*ytGf^z!)&M{Vp!qy(%`RCbO7|r1)?|C+r9)$=6Y9@@;zJnS1Yd2 z*hCG=IW*Ap=DbaM`f>VZi0muX)YpNDi(#N!laltYI`)r=M|YhT6b#uI8}6jYIwUZn z&o=Q2EscyQ{BIf>@f^WV%B?h4w;iro=c!mOdp5jQrJrJwMbiVorHCI-i#8u{ejY{A za76Q+zA{&_itEoz%9w#LW~r{ba`0c&7MZi0sWwi1?vb6xk^GLu5~OYkc8@a_*!?M< zD+Y5E14dK?=rp-$2M#Xb-rp?H#7XN2;W==6MAC+g&SHxczKa+U(vC~%XUaXwOO486 zrrI6#tOo`Ho+-1Lidh*1tPIq6?lF(?35Zjd2u2>#0@q6M5 zU^g7yd*J?E62Pu+D0oRRv?Ptcx`%%Z+%hXIke|{oxZ6<0J=Y>cH2l~)Y!Whe?8$rw z@h#|XuL&c#3~^a%kwh4k@$)4hv6ycP4Jnf$^a^p>2|$mpt$%^rCkxX7D#^P)<-mU) zf)zTb)!C4wwFQYglQdTx%;EGB=-;W$qDOoS;p#QL3?6XvquM)itT=y)3ysoSQ%9cj znueiO%n9?;70j)M*N8eNIfA*!ze)fb`R9!F&%?($EDW?Dhqa8SbIYNWo0HI(!4ICY zE&E5RvRM&C8Y!)cAltaFmdZ$T5wuM0%#193ssz0?>>EeqJN+dU_-@q!`!=8~>-Jao z@{fsP_jPp)?cf=wC}-d)Gp0P5(+1!o(oj7`r3St*798Fki$|sLzxPh+e~+ZUlo4XQZgh;D_-&!r6-)yzcMU)M#&7&vE z77v1lv@$hgyU9tc=6Cm&pgcgU4rG!6;r!Z)sT&7?t}gs7boF0^^vEE(+gpKP=+p$! z;a(9}kYrGyHe@LDEzG|~xA>zMTs8G$OIwo-096~E0X!%U6HjF8Zf@lC`1nhW$8zbp zcN7q?&iaNCf=4sq;)xyO zC&=(l*!M=e-+!+&@R#WBmjV5!1aJQpy#lE4k70GOn$s`mqv(!?vwzirU)`ESi3tCwp zU+wECY#tz2vFH}Lnv>rNZ5Sut!YYvHsYaDMY>5ue;{6#6FQ<51 zBxysyrTo1g63n}^y2TSI1<*Ut#b~9E@u{^xQHK3P{Jh8Xr~3s(*Vzv=HRf__#x&UY zP*D0OD&z*l12qaF1|Cq!Plti3^jX6$oBnU(h1|0~S(WWA&CyZ0@XWLxs>BMJVnR(| zkB>03-9#1NP;-N}PxBP%PDzZ25kC|0HhCo~mL%UW@fhmM&nGD8mngDlesF*boi(8k zA_Jh#iS?J2wVl${uEqo!KS&?TK1bzMpKajtZ$R|4lQ6>$mu+LOq!B||e)H|FPK@a2ahJJ1RN)K}!F|b~gLVx2E|2d{mq#nz9 zO!#zdaRi^&_c?N`btvfVty`KGeOh>jq!5!_dv2~K^kp}(eOlhIZVc%4Iw$g4GC?bz zz!7*=EVjUM{SMF)dhMF3md2HX85y?s@fbne&~*7{ompWGpQCjGBBvs_23214)UQ8(-SQ{xDa&5@!JU)+R=;RC*-+YAmq`fT>akIB+oXX zcazl0^sBTQHHHC-Es@e33Y@^rPij2pF=BW8nb=X6+}+bWHu0C6W5t7|W$KMy^qe$1 zJU#9lc(Q*Gdduef=%5a<)x{w@6t{lS!;f4CoySz#H2af#Vq7`kA_o7Dg(?~5W>3W@8>Udc1XFL~g-K1MRPU<6VRlA~bZwq5S_+dpd-ED}b$dHKs&hpWr0!Eb zxKgS0Nze>UK&`fi00vo9`RJD+CQ;En0+^B==|xJ;TH?O?ldTAiY{K9$xV#pN&=6Qx zYlPM~DOgusT#uSFkjjX?X*9+H7Tx?=V~hztP{F9F$@FhI+ENKC1D*$5@|vMrIbVl6 z7z$kvd&AG{jV!(mv{Fa+7FH?VUvK5DYopnT(6~GKwnLp5%n;NN zYwu$KX35v|<|4QyPS5Y~L@@xf1m@vA8^@TXuRk+OOIVa(;Es-T0tLczP%VR>Zx(lX z2aRv7v>I5!yRzyDJqoc87&4;*036i>mvr<6AKKx{WT+2EmbV?@hIVZQXw8`%AwQ0E z*wR}f<(OPFu_NDH+g)!1+Cd5?^!?kis?7BzBAyLx(}8$8ax?m2i_Il(6O)DZ@C+pd zi>~vy1DbY~T66&v+tezF3hC|Achwh4)`Rsy=>Cmg>^wviyWx`X_-${V1Lp|4SHkX| zfMHwoH?h$_XR)sIURrs6Qo#Z@xHq3tQ0Jn$8nhT{xOSqx1xZe(;f@aWs1KsZO(L{x zo014V^FHXhAozC0eQxTzjYe5KyUwXfi}F9Y0H3LBNHuIs`V%bqh-r!4xs2t-aq1S} z#8`Cav2dxb26OrJyi--gy{H&+zW)=9btA?mH+G;r?g&1zFR!~UuQ!?J?2Jrs5O`C% zIq5RBY4|wzWZk1s#GejzMM$=_t2OHl?ezOw3}R~ZGNc`5J}{%}4-2ju+b&u?bFeeG z#{2c9yqZKPjTFH{FCKEH&n<-l67EHe=LgGqcfVwP#Zg5N&&jMFUUbK!tWV5M7FxFu zIK8(-=uvvXK;~$7Gj%#)pmE4kh(NsQW{?Ey-B8Hm3X%~vD+F*z3*7RBt5jZDm)m;W z@8FWvSTa+zOi;>^$;1$4WJN6CnjY3&HJ*!Yj>x_EXsjxJWL%G2ir~`+b<7jhB*JMr zn{kV_S-Zli^W=|6W8uOA>pkoh}14^^C#rmlGU&Yo7m-+rKhXZ+2(#M?f3?5D4o6NS_`Gu7x#mfY#W z;Xcfl#>u22-Rcb)JI4)bu6jsCnz}lo>5o*NN2Q=gaM;@@BvV1MoCV9m1mfx^ySK06k2|0ivB<3xNxJNp&Nh*A5~nE1^Tx+Y2M;!mcN*ax zGJam&0`h$y;)EMH{mAsjqihsVV?h)eIEsQ%z{z~F_&|z_*baUUJPuZ5z^+bZ$S){7PXuAV8ZZDbfqqeKp=%{Q&W;ZBwi@F>HLcPsxXO#14>|bb=eEZjf(KXlhW?_vYnP1rmHzA zaPKJcDga~@1Dw*{W_Z$j^-zephu?U%7gkk$c?z!bxESJv$shlZFPtz9f3@<_g*SR} znYb?SArYsRwvCW>uc52Hjktn5o53%r%PqP^TRK9R=UMI^O$Es=Tpm91`{= z7P+gE0|*BU<^Ljb{>(lbHJ8D#{k}A~3YNkvk2wG@7V1bGm1`<64e^?GD~rAbVMOG0 z!&mM6iB9T!zpvz4a-m^pPHTT0CNkbVb)^}@Cwo`UtHYT*)D zsk!c>Nc=fo%S|`Vc?TqOQf*<~zGFpC$9wtk%9r8mC>UBZW4|LJ^+r_UKoc&fiuEjf zsqeGMB!!{?hj_t1{tg|mF^8@X{LC)^hj`ZYlkGPH!76#)_**MJ zOy~1Kw3>N0G_-!l!X~cQWvinN6uj^>F}YBdBN=4X^;K~z0Z*_S!0b1AsC zxJt~&&yJQZzv}90stxH!N;usB{tmfc8v;rQK!Ca%$q_myM4tA3aNXiwM+1L><=~)|baC7lp7gvkkrWzi=Y7Xuf4I=an zeDlRxSpG@3VTo1Mb^*tGp4g!1BS%sZe=cF`KEN)Gy2F)c6RHD>iU`>03KiX|;yvC0 z3osdAof*#s7El&r!qPeaV`^w?xfyu-pQ$iqvEb5a=Wc4b7VOHU8jucDhOWnK#Jy(B?%6c@4rM2nK?mGXzqELSn*e5k(pj$6`VgJ z>L&^OdLFg`>6Mp{GTRBiSj^P!BTVrAd4?*=;uIb&P}sxTxkH}cZnI*PVHA}v55JM~gr0P)=b_S&D@_Z&`EUF{Ezg86OCa~H(EPfO#w?2Az z{;jf7oF*3LNg@KxlKi!5C!X(P|H154tp*n|-B^l#t7o19ihhYxQyXepLK`%_=pxL= zF<#jc84IgEj zJfOYfN)9A%SuLXAe5a35b{P_P=hGJ?wa*%6EElG+`bmC`$lxps^o&KNhiQ<*H}()1XLw_zvcOIS-wJg?-K z76Xgc6YLTnVoh{_!Kcn1dcKx(MZ8y@Q|EAeZ_592GXb(P#5_nOtT}g0AGkG#lVQ_y zI;CRaV&G`mb25}~dE<(Vf(gc4`JC~Jb(K3O?LT$^NJ}iTM@kT}%=zruY30WuD;P#Za`*dG1fF&3baFC87&mDvvy31=# zv{7;Nr7KcK#qk$7Ig|9W==97Mo;dD(dHY0Xwtu+!nT;Nh{2zZ`6bLr$(Q;VnE?zFr zh30A>cds%PMdJ6Z9EBYe%}Dj36G&3DzX-8(mY4Vismm>TH{A6KfV%4T%Wkf8K zF(dTydcC1oq_LoPB%6(%#QioKVx|r-a}s_mo3eTlaKvE%c76G?v3H5D96SE_UdHHe z=_PL-WLpLtB43WQ5c})6tvFoxtzMssjA9&VC@^7g#De_Jic$>XeriHm)qKo&L=B!m zLh2qu^IaYoULQg^7O_LXp0;K`)8a%#AE{I69zJo3h!MXI3nc)vO|?=p+C~eEql-y#{;Xc@riLQT)s{{5WX-R_RFLRJMDBC#Z#ucFyL2(t)yj$2kUndzVY*6 z*Ch{;4{%A_9+jSeUei(B8w%0*q7u)RiAi!b@Bw3f{eHU!1Qz}ImBJee0PplX{%*nk ziKL%grzg~1@$QlIRP&?okqW3F@Y=$hWF8?d*;#0k>bO9{oCvSeD#l&h6YyCG$hFoq zswc1p88HyjLe-&{EM%(Dle(DOO9nlc+BzdDrF6(6X+Fpw$?xa`DTe#!f52>h`jr3# z1qYG0MmO+_sNONvTTwN}DfgK#J5heJiJ@HU%y zMoptad(ejuLz&t5lq1pc{Wh{JsD53_thMc*mywII+iy-LU{g*&0OXXeSV7V+qSMnq zapo0QHQRvCr%T@p1~nNqVwc51R?rwe+VxYew7x^7yMK}D^H9D0*xo5}(S)$P3hj5S zOZZW7lOj{oldAfZjovW?+*gd3`8%Og!gq`QWAO~7+WzMJH9lVxQ$pk~>bsbTS7c8~ zepJpJLnhWb@TJ_-Q{h?(H_Xe=bq&E{E<}0o43@-ePGKyIXB*0A+5vPyvb?2g_uh84gy+! z8h^LVHa!J}o>`6>4rbd(daql$^my zJe;DzH~3zYPtL{e32wNw;P~?NHA1yt7?1P(&=YcE)k_@!K7wsXo27%$K8}Hy`2UF` zfHd>wDd9A;#pLX+zrz>PfFg4o_gElDa`T9!Q_z-oOcg^_0OZ?weC0@)ck;k>wB-DK=?+_-ccQSuD7 z#S4zdW+!a6MXa9JZmq`*jz{-gpUhWOJ&46y)frT6-Er|?C6-~#{E@Xp0PCVC^R!1s znPa!7Itrae_e}jx;_v5Rja-ArI01mdVH1qhVTZL=FXzX|jYomRSRuzMGe{g^2V2;F zKb92A4i<-sAJm#!1nCvF;LfzRr`<`Qu!A@$#qLdLISv7hxN_=k2{XMRtLj zNs(+l#)8R)%MetX978G&BS!)xN|BNhy|6Y~61tLpMazxvQWiIVoW79=WVsaB8jU+ zc^`mdzOJWptQ?N8?wW9mI9?PaEGZ2;tzKZLYk1rswU+IHIw^A==a5~AYPfKfb+vCS zhET+S9P?ifooRK->lUWeP3c@G)c7LK#>D8+8qA-3<>m%@HIFQK$j>|R^mkK%A-tEx zG~}E{N;v)sPl4%n-!9`=`y-xlo)=~+pIm73OqjsCd5U(tK938wCsI`-8kbtlMCdbl zBp$6dg$+y{^jMvK5BRwNF~kMIyV>oTby;asqkw2;V>wRc{uoV=!@hNW+Ex84fl~5u zM$K%uata2OyUH%X@WxI#XZ?jQ?w`2l*l#FVSYw&dq9WYG0fe6Utfra|I$>I6pt*wPFUF+r%HbV{*Cr9EJE`Lm}M%0*d8a5ugDULQaDkwtS()AMhh(zhi zhC0ABmUzz(Fm@t*e!*aSu1i5In^Wo}ysW0xiqDy*IK0+VM@^|^@>ih{n(#N+Z}tcv zn%+nl1{4LK+8g!`NtOG9VS4EQ9QJrn;1zG1&N%Bm7r?|&*ATX<6jQj6QyV~F zb4cjQ5~~G~a;i<0R%T*-%7fxfyI<%D(n__z(r|gpmhW36lcU4D7JIDns62}AV60zc zZ>{x44eeZ-=zfClC}-q;fV8UPM8o}b5T_uDq&y?F3z5pB_h}bmzM#Y==Y-%ww>0r_ zS)&*;9|OsVgKGC%ING)Gr0F1p3Rc^17BF(o-3#wlQ9lN`HE|+*a7Sg$|31HcyKFff; zxeu`%u9Lgkm;_j}XD$`N&J#(Ja$w&BSd>6#@-eMXzvtyA0Q1 z-SaJ;pWe1I^V;(*dlq9nALE8so6sNJGs|Y#?|_H z@21R=f5`-a`T*+!=8MVxb3DQOWkBz@aN6VyyC)Ojbqo0|=1kjj}Io$y8b+;R`PtQbjx-Ju>$?xg&>95-xP|wBkqiTb2wRP}U zhEl?kYliqbVZ>GSJXuuHU}b+S%a#j|LWlc@(9nC zCL1u$=@Pt!?u<3x!}}5olmL3X6?CzeO#R$(;dQyMum?9>5o{*^pN*0=%(rf-1evei zp-W1LfG1HPI$vJH;nB}64DrKuoZt1&h)*vyTZzIli=PZT1)RPVHw%`2t>J$q@9P|l z*IdLlIYN_jygTCS{f-in3r-|sfWi!JH!Bgn*9lrk&SJm<7siTjm)ZzuN{Ps__X=D5O zM>WYMjOe;#JVrR*DMMW@8GQQ)wHBIp#e>k8WspnGAd$cP94oqtC?|EVMt8HN84tkSz!NDQ5y9b3B|_G{yXZSYk-_Gv6@>K9gi^{M2< zq7>>8$HE-IndM|mYO+$PvX)V5v9LVZWVH0Dz-x=F7F-no!XB;5L<6+{)~$q5N7PZ_ zo$c4nn)8%OkPk^?4C+{a8cN2%gLTh9%Lhb2k>=hpITDkTdHaaU#^wAL4#`S6_FCgyfE7k+(h%p^tbXK?Q_E^7*n#2JY>ix^H z0R<$Jl8N(kIl8}_-6GNf?pDwJ{EM-T#uum}XJEIhGm+ISW&9&Q&;8}a*cdJraZG}@ zds=p0Mf)Ew)=Zk3MyesjwDC|G__wn@0T7cT`kS5P<$yBwXbDZm!R);biZVOmYB6Bnq zqL5NX^mn!~drAHm`20&1ONOX@c}du3-4RoB6{Gp$NU0;Nx$pja^T4~4*ZaU_1v)(~ zc@pjmhKJd&Vz#zs%(LyKtxmDf^WqE%HH!xP3I7@AuE zUfO^#N1_e!(o|dl&8B^dJfK!2sH>_WHPSS?)zNZeiD5UB+umB~x3P zb?cwfQ(ewxYw*)5RL63w6nIS&_KU< zla(_5i4w#Cq}RIrj@5F1*7w8nUwX6JS@SwUeP6K|J+;6IpGXOIyvp1tW88Gf^M{Dp z%kvXp=?zvGqtD$y^O!@wE>^o?@IHLA_{rfL8@~>|NYZ3^LX=Dj5r5VXFHz}SCQcOP zdw9L6Azz>*)Z(Ay;fLO<0GO1kM`|}aTo?bS-Cc+3+kk-!1}X(vdKOqT^*#^i4_dr6 zuR~uFk^isjfDD}WCMzZDxl}NCLjBZQuh@MY)B_qG@>EGTQ7w*UD#VwyaCL7)3TL^x znh!q%1;j~jj8s>K29c8yXAGgWkwpf?x|*$vs+(yg@r!0wnB{R@=;Q+|h0)#wP^2Ar zkQWUYPZxtvCo~hNA6ba#Fj3>t2ZWi6mS7$2i8pC?dyxqzE4-hup1U00{j+tpZn)r= zdtYWG4M1GazeC*r!ZY5OOl1(_z-?kE&p!LDL z%H{Ks2Qr)h-?~`2tZ%kZ=jxKiwG7wPYEHmU7!>0FmWs%zSfil0DoItL$MD>LB3+jfosN( zNCHd-C3>+Uv|rk@*T|mVqtC~C3J0S&qW(@;JBS3W_h%%a@Mt<&>%y>da) z_3Un>cd3w=%Sv&Q{pu)#-R#oq_|*n99xg->^XB&LdtBP%-+k=Ye{SAaij3riod>bS zh821Zpv8k}nm6~Q-MQwJRze&`b5%xikKXxyL}h zsrq1bHC?@>Y?7@(Dg`CYlv7&^+Mv|EGE2bu6e0 z{%7FXF*GjdLFrKvNQ$31fzT)kmtWGSO}eHpWmx=@vaz-bPkRjZpc;Pjpqx(?WwjJxEeF+_xn%hHG;b9Ul0U zAsbtJvVUt|;Jh6uV+~#dn4|2#WZ3brJPydln*Y{(_jkY2Vltg5yv+ z=^cW6t2AH*N&FH7wUU7M=#LO>sHXuS>vM8Z?i51$&trsBvLX@;#lK82fM10r z4EBN97$(Z630xb%z01*tAap1HtB#VI`S^jX1&dRccXEvh#IoBJCG^*&kwMy%PpQZ; zUsD4c`ZAqAI-4Ir5~Rv$dZ<%K$D0#4(=LhHFOi2N*b;rZF&v~y)kG^JfXAf=|GRTW z!~r>@i)Zrs+1f8p1CSLIqenzQvVDfNdU<%@^T?zZ|Mj6^%b5{rqfFQ#XgvFBJ9LpeJbbj#|;(GyMaXy;Xy>usFa@3NUT4Ow;(CuFBD7Y0jJFc zEAl7Wlfwm7jA%{y5c8J|8>f7mB`^CRW3F&JP<+V9@3lf;Cg9}{hzMYUgG_(t_y2tp z4=EFn{q^b^6i%GNTQ;?YkBGPj>Se=GE@Fr^Kj_ogv&!I^db4b`W&q2JJRTE=py}U) zp{Vxp8{02Q$AY$4Rc6BEk*e5&n{%6xM~&IS4k0TA1t;H4tHh$vAjhQa)UnOG2B+*_ zxW8l@Q1%-xKAWYIQtrzv{_MwOOZ6F6lB9&5y=NY)NdI^7;$flh^dM>A6(G z&ldsz*&*onm(l)!>?Fu+zB7^2{#&3VKbYK8;*6I`3G&UOC+SYb!V9Uf#7wD>d2J1c zg9TwkepiBMkPzw0f{#;;t%e_~Ea~P`rM+BkDJp}(AGz+SC$ttym_}tCiiU`M z&%CJPzJUwkheS#ne$P;wvQpxt;yD*s}QS`hs}ik`N%y3R#u4W7VowWN*9IVyThwKLR+P>=;{MtaODxdf zHpg)t5-A@}=a!X_DtFP#*I>n2TN*DiBk!Ac$B%*aG7aHk-iun?WeX~UD-|rPwb4r9 zO!rG8#m-h9Ujs669U_gr{0b` zSX4LT3}GD$B;Sq7;4VIvMa2b-cEP&A{$jNFrR&Wu)FF=+na&l@y0aP3rE@ph^jZpo z^Gny~J&>^9A{f-;g!(&GM6d#a#J^xt0BB)Ifhik~^2?4nO8547YP3H63B|8YeVy+m zX4%87&=U#-r5m-v4km&|7H+>M;Vt^gC$1YS`AB3jhNn#}1;l1Re_ZRmHTg*Gg|@Ia zUZZnpdH^I*+K(IcI8HA?X*g9vbh&o`7y?|~?)7!Rle|GLpHkwb@x-B$V|(0|{rY&ND1 zPT~Dj#h5!lQ}Nlpu|t-%2rnr9|q#&==`H zG|n^~JQsj}-|k{1{|4bR$NXl0E15M7b05o16s8we!MUj z>X?FxGV$~~56bfb)I_Jp7ao~Pk+A5O%)4w$h~!B8>2IphwIFfBPf3)WZ-+D(8^5P^qJ~?O?77^ zZhTSXBl;;Oh5Eh3)X^n*bvm8VdV%rElle#qWIQU=#U+>*4$TZfqMI6tlyJ8YAV3U; z%u1$7iP+ahjTTtG543$kL>DBbU!wI5zfArM-0-I(oyrvM=e&Oe)Z9c}bCded1f#S{ zB{H{k@&jM9OJAU;TS}?o7T6vv@I&cv$p{|zRqgU0>Ixj137DN`5X5wNqH)TG5Y?a0 zTh!($$UW{!9TfEU!BK+XQ4$nuy$8BnB6WIcpZ>e{I~%o0ewf)weEhY`qiihTNB!Q5 ziy_UWU+QpMq1on^0z12D|9bf4Ol+~uB;T`&cQ5Npqq`O4rw%1`JD}uR3q<^yl!A1M z587eLrklvn=!V}S*pZbLQBtaXN|>t{Lkp zz*nc;`U<`F-C$!M9JO?qUU>@dzd-=G(0?ftAxLB33JlwF(v& z;A!8TL_=wY{;mYBp3X{4WWV;sIDkff&WLMPoy&tM1Ff{PBM8$+F8;Ts&tp$DR_Uu8 zR*C)IGYNF7D@0Gi9{~u)LhM`Z2|u;I+Zu9W`p8iYJAKXj+(A%aT^c#A!z#H%Osj^zGE;Griekyg8bR zEIuogu~$;|39U0EK0G!)@NvqZFMdZnFaX>5a>2VB`Im(}IRjrYO;85njgX04lZ&yD z0zg<1DX%(Pe`YvPii1K!)X3%Yw&H(HJ~_!OT(n@{2a^GC@dHnqr=uW(?#m;{+J@%8 zXplElnj|tt2RcZzfJ4w%5=2`xW(m9qMVZQu*W0iD*2Sn?-`CqND1+anFKKFKKjc9HxI<%q z---(Z0x3q~zsvuBHGE7@0|Np*%}v$zHV-Y!7$1C%bgwETQRpl>pMsg7qYRl1yG%HM z96F?%XyDQfI-`frkpK=HT|gWhtevk?+RnS%sXV9o7+aamDWeplO(fW~&K$NLhiT2b zAmgZfEY7%*awkK-VP(4!ZYw*d(m7T|q8E4g)Ohuvg*JBUU2*Ynh*lq9KKLTB(|q4T zKaL=$p`S#7g&abM>jilwPzG<_bkCRhE`F@awjJ zFo6}YwP9`e*1+3BFs5{3Yh3ap+q_E>G;%P}LZ{{zd1_f83(og*ROjdpPv71o><6X{uR|6D&5$E8oPsOU5#b&NU0EeK>KzqWR&pqKpNK zCM;TeS!0jv=p=$ghX44cKNG;sg4)PAbwBUsB(5Lqk8EA zgHM{5+FmJi#})^4Fe}!hrfhuyI(hHHm5&KuV^f)!g=R$)aKUUekOKX=KxZxy!;@Ba zY-RfS?^dl;h`nF%hNb=(G^pVx1^Nyw$FDgy3#zJje*B28+hWHNZQhVEetpNYoVdCd z;(QJA5evJ7+vPMg5vzQ_H!3+%!K_ zTNm%DY!_P3%9^4WSgmdTWCvW+?%(Dvgy>Q9|JIk;fq!}JOmn;BZ}YQdclKa$lmcH1 zRr?|>J}sL$uvW8McjLJ$PiO0!4dielS8HaR!B>VmxdM_TNA4T9&brqq?vndpP#C()6eP$p?;ZrWTWLXo|eu zY!(2dIlO<1GZ6kfHs1_nazimOeaO%Z%49H1{uK3xAXxD{j*PSY)~LI~Tqk^uOR}l~ z({K$bi+r`O@5`)%*5PE8y)(6eH)0|n%a8NM83^lvRCqsxd`E_GXpE5H#TSsiHZzXL2w}Y@^W<`R(N@&3@1fJ;#1RU?Z93t zj=uMRMl(j-sEqP&ZJxZCGf{|d<3qexsch^_D|GzlLp-(aLO;`};2N0&dd_3v!63-x z_i3@Q5`U?3Ob&Ek%i|;}Yd-ZZYe&*)?-h3SjE+bE!+rZ%&P8y^2}k2Y)B0%#7QzLb z{AsVSiz8smAG7|~Y4!VgzkR)$ua()(`tj+HLvw|m_W(VuHz3+Lf&fyeRo&P)=i0;w z^N{pVj#M!76istZi=XKe3xF?`WV^}44>o(Oy*za2)~atOCKJU|*tqIau_}8P9^Lh} zZpr5(Gq-)esBMKH`(ZPCOjx)5?4of%F1umF%`#DIeuUN$0#@o_{_i&yH;GzGG<^NB z99Uv1mm27&ZK=PZna1Bc?1SB{f zVHLMuT}Gqhv(;`V=TVmn1&SI#&A^P)q2=g<6i6Qpz4kDHmn%_ts@C4_y*st^RLZl{ zj8Z7@j<3@H`yKW8{fPSLnU_4_(n%j%53m{=apW9XC*Z>ed>-%{1U15A3!NxN?B(ae z1HcMpG;(1Zv(`g)Sn6Le)ADbz-8&hbOj~Wtr5q3Hj?)sLp*!>E zU^V?)()#-H9V%IH*7MNyIu{nq=7h0mA_J+@kxy5HdMbcp3#$M3R^zO=HQpulg%23X)dJ98Z(IC?gNo(M<;RXEGHhv+=y{nkP6B zLW2xw9fp<%N&PA|$2Y$!G3!b%3q)rrH6un{>1p=fGB@mTdy7!(B>vgcd$Bxgd+ty6 zs4geIUb&RX+YeAkX*cpRYe&+%)J4OC0{PMKrar1*QT)u8(5{ko4_s(OGGe_!hZWdq zA;=SHlNzF_;&u8StfUJBlsAAD>ioHB{ndBHjwPiD^|_KtJ=J|r=oHUx)dkbJ(**Wx z!8aNrZ`f3R>ul$vI|RfOob2XSH8|o>gz`#h%Wc?S4p`yR)KrpB9W5botg4n+c=lo| z2JY>BmbhlzsgYp}xsrSf)K0b#aNN4**E{6up38HUfC zo(oVhDewWJ5qT3*x+Z-tEKDa<{186)JJY5z5;hgh&RA#*LD|@orN;({7v@OfD7~Sg zIs03X^Zu_A8QyBK`j^m3pcTAi%%wy^M-I>x#zs0NOgn(Nvj6xCF!m&}TJF?Idip2| zV)sI1NLKRssP7sJ`U=m1znttIJN}J^(Dor@^3e!=X<+36(1)XS6x#O6kLw=%ty zC;&*PF4uUD0q(zU7rAbSbMG|QKFG+vcpSW)OS3#+s?#w--Zi7%5rT~_lCMOb5pWr7 zn-yqwPi7j+1x#N(Y6QRjJA4E_Q;#cNV?T8oR;YeFU&AZ>(v2D=5pQYYPiFl|q$ZSv8E$8PGqKALuS`sDD#S#BnCW|2+b0c5j9 zqMi@l2vS^P$@=WNh6I44`U4o&0x1aEX*ZschP4^6VZcne6a{$#$jBySqEAZL zoU@PW4!+#skef>rks2OZML=Umj$$14#JU1Zetj}Wt+OTaoNIiq6aLkinv>!Iz5RD} zNxrwz4r;7a9)LPSdFKiw)EPeqqs5l4(YeX51ARA1zyg!C{^o)}IsoQyKjepQ&|h?& z2iw2#+-j+^#3E0_1#7E&3J)FkYUuC!c$4vI9k3o>N3V&Z|WCGDoZK)wvO1%EW&!0|8kX=x-cXmp8kSG4U})w*8#TRH`X~ zwtVWckN5m#4OP8QUd+XI18V&w=xW!r+HbihJi1N7bIzyFl^h}v2|+fW*QsR{p)gK3 zm*<(*IWwF2xGrNrB>lL>!dZVNG3LCP(uTRjRi?>=cX$x|@UD9Tf z0bioJ77Nn&=c7pot#Ovf@iE80ylH<@)`0Z~W`#x|K@1GYC{_YJe50D&tRcnjqPWczc0QM7A>j>e+sdfA8dl&EEJLmJ8 z<$V?UKPzzmVS(@D)tPV}j>=YhS6@qqMI;b{EkF7!@f1QAP?rbYJ^GaAwh728AZ>LG zcCqw_Nnx606~j23_Zjo%s>1Tc>KW~(A*tEB(87a&&6;M7KLzRzp;jk4_N z6s8g+a5(U=G8?9RxGipO@KlG%Tqyr$@AZaQ6D=^4>gN+3Z^Ht#E(|xky8UBw2o#L# zXKkmwf8PS5`w9xy*HfcullO29+3?$11o-p$drF7}t>X!;6`qq)>jjilHMw6%>-h~z ziXXPoxqAc|5tp_cq96SX(b%KSbmjewMTh;$C4TYq+VV@KiQ1hOkpW5|LvVaQefFwg ztC3*myn-CG{+2O0lKue+e}V7vP7Nz{ zr|aS1-c}O~nF#7q?+%cS+9PK_5cE`1xtWz8rKk5mSY3>@WSjIv#l|58zK0snludix zdBM}saOKrbR8QLAr?*lY@lwJ#6*qQ6H^^t*R-TI|j|!Ntx?rBH z>Fr5`b-5M7l+M>wV8!B-{?DGshigE+g39*49`((HqTwjkh#eLUDW9OXOn* za>$!eoiF6$v&_;BSo5P)aaI~28ui1=1h$@FZQ5^K-25b24lq}?CS5%{ziaX9pazLL zCc92*;4?-0+}$62*D^boFd@9b-_>j<>>`?+pp4QV@tj%d1-K^N|d~h1cJkWXE^{^qq-oDLQBFBC7XRU3M zqJ*@#%Wnlg(I5nOKJag=h&iS%!cec3vo&xTEI5QdaYbe}_1B9@0GqHYc6S4uAMJ9+M`vT)!SB1#vt(s^-REGQpeEo z|I7LWCQ;-&@n_?ZIi9KPmziP?-L&8b+=LB4cYF_&3YS1pQ&tYuS#=w80hTwbQ)WHS zi@_LeZRGVI^OfRHZg0J9NsBv64xC&%&_7pTgxSR?@|EI2Ug-5ZWGK*lZJA1UobwG2 zk9Lz&d0~+BVzyrc7_ha84!C&P6RzCoEP?&+=#1uepB@0HR=!97rmN2o*Q(>?-Q!6d#+}k zj?IEG0V~sel}Vi%SC>njoFnqjg&_cnN%dqUyeczgK;_hy*65p^&?UpegUpCn+@YWw z%g&$Zc%jc8IO6|o`E<{*WSCTD%xAeg5jz$`C_5os0Lw;nX>HlF({j(MlJb45_0^X3 z2M?sz0arVBuA$}dUJ|~?7sq{k`k8b8!Jn*N>jAe71`3;d!&r6mXH~t-$>aI>sTuUp z4494V5vS$@imipO?q~DDm3^bB;c`03kry&n4-_Lj>HH1M-a@ZZzHC&Bt{Qv{wenQ; z*au9&8vCcaBq9+D04>*!&bPr0y4|IfbE_x6s00oB-t{?@R7{^#W{?M-K`xJGb&5iT zH1Ru~oo*`@7}ieMm>w*+c&6N@ zesWR-GhiM~?CiNUAtkWMj<;Rty4_Y=(R;s@!&UhZAzwf3EonGfTnFqJ(IDY}rnA-5 z6(Nf7t*GxVGRt?*$yuc0I{c=DS4S=uMkmR@`6qoR97Ue-($W{u9Foc%Ucxe`Q(HKR zs5G!B?NNS0_wYO*A7ODm>zNk@Q;O{vF2_|d3LXd-M`Gf?`p`FFI*Qd{L#y)&i{|@7 zI>|f!wt6y=@xhE`Ar)XNwq`xH{*d%f!;b=vxO|W9b_hS4dfOqb@T67qLrdH)3ppu5 z3JaQ=T-xEU<5w2Rp1P3i$nc=m)YVU*L}>Cn1L&ZkKfAr>M)4O;X4PnJm4|Rgrz3d= zBKfP(WUO`LU)HpVeCeb{De}*O0V)W@;m#OGv?Kd=dV@m@eU<$gCwdz}MPMFg ze9JojJ-_URF8&4_Czy6NhbXGG-yvQ8>Rx^4Cn_;83*5Q6p=-GCc=w&)A+IB__6#S% zWnk@!Tr$@9mK*Vs;A}`a4XbPiSBjgb!sbz~Az;pGz_dk+#y}<*sOf8Pvo_ZtAYZx~ zW`sbJe7$J!N|0Q=tAM2#K5|E7TIt3Ee&d9uq^RRgfQeER)cn4+FFk7`UA?DX@yBM&? zr>!Tza47=(56Xu+MANjjkmCwU;Aw_oOu_k&MLq0&ukxmR_~)1O^K=JGCwW&@Ngx7A zt{ZgxoA4_{B4g9=$~ZYTJEi>NUuuT)@yEO-vJow*;w-z7o#KM?)^yWV(x=0*8H_SN zg|7Y$y++RG7b(1hRVCG4gZhA!h!z|3$>B@ux$*h>jqQpL$*m%nwY%YH;fC0=tvlnu z=Y^3p7*CE`_(da4#tYUQ%xgp45pyN!TR+SI!}q&m%9w~NE=ii$9p3AGRdZ6h_c?^_ zOCfRN1u{*1?#XVTKJA@Au!tX?x6ij=zN8;byT(gwM&`(P7I$vBbk5I`5E;ih=0q5JmS;)8*IxHHMQ=DK1eqg zG(qq<-&5c=COUFyD+N+ z_oZs)wBp8&GJ7@Y#c2>p~U>f0up|-qO<;vMu zixS4cN9!T2uRdH~TP?{=8q7gaU2=Qemv8cO`SW~HUoM@|D#>>fRO%wA0dEnx~`Bev9H&8;7QeDSUWgz3`Di4yU93<}#I{G*`;pS^8uva82gh!qN6#S8NclpUjfWU8&i_#u{R1md?)m6eureXFA5Xi*)Moy<0V$yt+}J| zL3B8Ilo*{RM*-wKT=+=jw8Zs>V(2^t`xZcu7{Bcib{AAOy8?Cpq+6&;#dKXU{$RzY?s#u*PQ=rA zjIJG=i~a)*i*pw?fj6=&ch@P&@7~@zE&*9Lf$W<=%sJaj_$;|!mCsIra`PEVmamB& zQW?-wYHCV{Ur(>ca+$8)jY0W2bwX)I z)y`2tt-glRyjy#&IY~rNMOh6E*iNo(ukFOgs>vdn=&S45`_z6m}R(!heHjaLjO-Ay=84m*w-C5Yu=j!R++Qx97B& z`u6hVX~UL=V?C{R)MtvkCuWtPDfu;EtGs*giV)9<#@j2$_;=?0;jbol7^2I9C}Bc9 zn6ru4Aie$QsNH@XY|?g)7K)91BN&^u82y=GUk9Qu6&ssmu2&U5WUZd=6tx>zY>B>I zU)jUCI#6i%IRrKAziXEd)s5dowKy+dq+ij-V($CR0?|c1Y!AupqXjP&x8X>)zaN%8 zu*=w!b2^V6J}HQ=gQDZr3a#xpPv**Yf2h52;J1Gc=ie*3u`(SH-;S5V5x)_+!V-mD z?jJ@vAH*jfZQfgj!Sp_hfyDb|tpU=B%LlFBE4l#7E;v9+65qR+iH(& ziU(}w_5HuYDImlgx2hJDGD?K8UweZSpcAN+zF5N@v9MaVAzD8ZXg;6{uU`~sw|!#S zVG~y9JHyx#TPA&$2y7mt_5Y`!oA}Bd*^a+(Hwx;u=e+_@x7?NHex`*B`BW0v%bsXuN~v9fOuz-ua~a zzpD{jm&r8VO|lD0Ve0uOQ(L+1e~<_|YrX1q`>Yym+@FNb`kej-fXiS%BR{K(y^C%9 z97WL1XEI7?9Y(LHkZ;d})k4I^FNjS-3NR8PiWTj6NTB3SLj50w7c8_sGgi^7tNg@<-eFR*{QxU1gM zaPzD%UvN_7?()kTxfa*e*Dk7?c+ghIpY%yLP+}r^RlSpWoH+`I6S56%q{C0Ly$w6L zNtbop~Pc;@7)x)<&>=5{w5uom1+v)hk&p@zaarj8izGyQv4$}i`c z&A@{xdDZb$7R}sssPd>Nq~2GxrQ+3{Vx6w{&>VPS>>`(r=hub1^aZ%qL26i7iS;&b zC&)$@m?vZ>Ja50-}b9K}u_ZV|;NPSX5g|n*uMvMp*P`2~$?><_OvG|c zXtV}$;LmcUMW!Q}NrTIGM-;fm632xf$N4=!;8#UNGv2wPbQntt)7w+9baq}R{hYSB8=&tVgv2n>vnR%m% z-44P93{3M*41Bk{42DBd;F0bxjJVkIskPy_aRZuo?7dE~A)rdTEiuJy-EXVL58aWp zEiLRgS;Gek?13GmI2G)8DjQDdmd{;z1-JW87IleUB6uo2CA#Sz))8F(JO;2Yv z`beqalo)Hq8-1tTc>lhHk`ExZxFZi2U5Tl&2m(V} zYfXZhfk9#vt3gu_=9`Xwh_iywG6sCk!!b7ygZZ7?N(%zzO5h6)XWgKA4v4Pa)q<%H z2ZC5!oygj`I1sHErx21Njp`<1ZFt5QfW0XoqQvSWg{cU;Gn%FFIN7^u3}C^jFN|9m zcH|R*a%v0VTz*mu?r@}@O+93^=lFZXXYRi0U0h7~;Olm=+0Hu=ulKf{oWE^a*&+C7 z>$l_w=P@bbhL;prKEEdq`0!@l5J00W;>%cha()k0QWnl{3)Gr{?|2w&-ll*OYB{gx zO**Bhxmn{$DsV7>1PyeXq(ux1Oj2JwoN@#$cRkE(G6oZ}Rj5CF^F`Kj$4rU|oymH4 zj+~AxxkWeR>y||7x5xyvml>VET5AL-`LoAJ=Uxfr6u58BD_6G`$4E{iz{P2CK`v5R zs?9!e(&+s1a{mn|;%7_U=4lbi57V13fff07y!sMCsps^{`TvKv=qJHh-%Cge+ zg-x8#2`zJn%_!?WUK=W%P%C+FtY6Rg+bt4j!i-$nKKXh>MZ=G0nT3QH+1IN0iL zmeSvo+ZfiX2fj&jLLt3#w|VqTLzdb}v2&3*`8GYZ()YUtR+v&5C3#12=|_2~>R$=p z6&xk|6TC|Tm(G5@-xk|)n>IsGeFi$=Tg>=~V|u}LXS-qt zC1=xy&w?7g+8hu!b7OdS!^Hm8#BVcg^76P7kwOa|vxGGl>=mE=Zq}gqV-8ndU&lwk zT-y$#7;nXt4&cGnka6}mD2WjePuXE$BzQ5m-tXiHGZludP&6n(=80kSAW&c}=Y2O~ z!3(mnZ)T9yx=wByfBN?j{6^07DH^FOlA$M}XmlaHS%jeX=OTRIDF=X$wi&nR4?TJ?`J z*5b@C(T1Ulb18KYwxBud#QFYRODMvqpodkV*$Y^!Q%+!3djT#Darfsw+L38Y!k2fs zdnpC}?G2SR=Z~*#Y83=vB|3=a+23i#ioT`EOq{1}8X~%tp(D)7o37?=VG?aTzy7vT z8icHjGmj?i=+*b>b8AaA-YdWtgfdXn1VT z(DA%!B$8Oz2zEqcPj|C$4mR9 z-Pc<%U}v}TevroCCS;w|)yloiFI>5_A_~QHhM3UnR;Hv1)k~W5Ms%;R^lmtbtt-y^x%3~ ze>NZNKvV=1DPdfK#_DI}n5q*c;+=Re<|%Z-$ZAaD!Cy!yJ~T-L_^`pV$cjDtDq`<% z^P6@g79hP;%<$_ta_bM;?Qbi@zuqR&&L5SvLqhFP7wcrhfaE`9sPx@Tt#964RwpQM zdmlUAF+XObAr~!2MVjk{9R=ya6V3Rq?I5A2sR%8Y4Qp%XI$x97+mk;{t0rEHLe&}U zNRn!tuRRX08~Ywsu*C4PwZ1z!6#Z<{aak2Srv2w^K7&(T%%?MyYc2BiK5~anK{J7` z)^ImuJzkVN#S-vwmx3r1q#qQ;Nx}Z#oiZR8ncke^;>UkEM`8Op{TjLU5NCivPB7|e z%49S314OCikI>fC5b|||)!T$<=@G|UL#kz`VKc9T?VA@CdzY|CvsLSNV7I@oT?zy7 z ze6zUQF|P4wgVUeJ!hh`cVx6hOm|4A{AZpiw(?8<#h4B1B!bkVCCb*$LLC^gd&0OZeX3RqHM3jxbz*lK@ejTWWQr<)9SS2!7+-}93LrpGrVtoZyUCshGvrI$xx zN||qJbZ|9&5Y06Y3${@l+FxhRYmn}{a#BwsE!+%h$9DF`d;MAREh2PH0OP?G0u!e+ z&B0gA`l_tw;X_RW{3*5qb+g^!mkUk5C%6n+#F{Ba@syNU7nwWNoJfwBJa{GtNTJd% zN)pXv33Dms^ra9pT?EYRMu~Fz))as8W@>bP+l#`%r-@kZi-ArTS|FNw^n#BQH@>?I zP#r)0PIEwBr2e>(9h;EtSA=yI@Yi&|tevi?j={tmaxsPAA(|L<4RU#@_+ug}DL*c- z6GS!Ch$t2WR8LoA`+O{+LY4D+c<4m%&xb_E3`W5d9{I5Zj{f6fFtMV9va6!F&BqpK_{S;<=TsYhU?7S*mYLolU_t%13Hy zZ>r;qXs$+Y!>8zGLX(OW-^pUbQrX^mC;|mH*_K}gMyekgFUL!I(V70y0)!3PCqVM6 zRQJ%vuuabx2tZ-bEZ-p03A!^FnskLixtYKd*+`Tc_V_zVeI+x%Da}*-f=h^utDHXsBYsXHkGgrnx4E9xLZZ4>rDn>Wdlh& zRFHm9tXX*Nne}P?3BSQaRE)X49NVRtip1wfRAd0TS?bXxL z&4Ubkt%t~W#av_`*-snud^GD-sg6?VRjrZX@Rn;?^B|<%EcOTG1TN+;*0*A@lloAn z?O%$g+Wq;n*74{Wo&*JQH^X%4%oZu1^0ao}gPbvXdQB_imT}&r7IQ|wD+<-*17 z?w)VSUCXLvD>PNTS-eUoLxJ#|9;%os17AjvJ#@adQ=KB)(jmvd`CT%A10-WmxG)q7 z4u3xVn{Qtrk+p?EWX^NgrI=fB6j3-$ia{`$dJ>RTvgk%`(kPVIQGJDXm6`?Z zH*ENk=Iguxv?C?Q$5*OgNNHe%A6d6k`k=R|c&QdVzi2DCs6)7`9>UcEl2hD4jYk%a!qU!`W4# z%*MG_nBurz<%-;?D#kjl4+8kzo+IN-B|eZgl<{_~nl`}oGSo8AKUOrIYP zYt)hBT=UoK9*Vt~e#73y13#zp+l?LX?Kinnoflon5LlX#q;q_V#ekDYtQBEO`(Gd| z3DB-wPKt8pZdPU-!}pU$WIE1^5E>wf<9@y_dVEww`2%XD`_g9mR6^HTpP2s2XgoMQ z+|9%Jtu!>~%kB-7T3I5=_vP3dtCm*2OXIpLu4c=GEpcDf$_-3NK0TEz)RFIr{$A@D zcozHwXxxsLccZi%qu=3(uJ52nnK&fI!8<;fT~i^_)OxcI+AW<&fAo&3+6kU_WC!8; zF@BIMDV0KeKAkSP8$7bNSqkWxf#q&K8A0fanP3dFdubSwwnpWzFt|*~yG=!kRdKla z{=FM+WY}LGyf1J|yU--id3*5CKqWtAKZd<&-Fc0djD_nfANQHaNCG--8Q;*j2oepz zXF&@8l?i~K&C{xJIM=EB>XoetAI@vL9+E$F`6xY*CV3NMtaZx=`29+KIWFpXVh&Yi zBdaGyUEJm7Zx$msFM5xCW@2wAVXZU&MJlaCs-;Ud=+%Fv<%e7^M~6Jia-Ol5G4Oo` zo|?)2vxNih4rR8FLM;z^QCPsDF`jp*K^h^|8A-xxMWNNC_Q@1QJ4+pe9Jhjebi*5k zy!(=0K)1movbm`)J}pS;(*dW+gZZy%i$SybM&xbAL88X|>BikJFK}!m9&#i{OWDD& zhe!U^ew1#w!GpZpz15o5Uzb~ebq|10Byki0pJyQdZ#@1#uFyLdX424^C6JdF>v330 zN0pPg*u3jpTYK>ZKj5?ki#7>tfA)oih4OwpF20p`#-BO%Bd6Iy0AV?n&N}wM;G~u0 zpX>w9jh~({4`lD!>JF3xR1j*4&Vv8OGadHN& z$5o}kbB7FypO8*>$NNLFOZ1j=dIW=!9DUShKk}S_KJ(n84osz{o7KWFL^6-anE`yE4Y{IjzCnd!S0JpA;tb5s2CxG9XxbQT{O2V zt>+nfL|>Qq0W0iAsMp3O0Y-rOzeaFc73KCH8V~OhSkx8Cd2VH~L>=)4b_Qy}y&RH} z!f=hlFp1M_?F$w12-l+0OBZpMw4pNt0tI_KL$gb*Lo#*rtz>wVzS@YU=Z0;(Ac_*l z26}ZQi4;w|zpM;;t7axD0#^FFB8fcmo);(aT)ZNQD$P1wWAvennQ(Z}nebX8Hm9~g zuk1v~Z6MN?K{OSIXDV^Pnkr83++>y3--rwns9G*qZxF{2LyQ@aF4uMn`|D%H?Q;?4(9B5)B?uWH5a$#iLYlQu2oOnsE>&1-l ziWsXer!}>_BprWXt&y!)5TOYp_rM+7G5>XFUM=|Ht(pRXun0$O$VMxyl>l%Fi-H_R zDF+-dJTgKSngDMpltE&lP@Cr^vOF^4ySmUHd6!pWtoDB#f}E481e3a6$O<8ZW>jz= zVzZE*V~_3|k`*E%tpyeab9wEEB?Z_V%= zY*qIC7xsxT2+L%}9_z^RL`n^R4LQgyeh@B=M~SrV3kO0(G#+*gEg|$rmA-=k!F6HI zbiziIJxMPmuiH-%w}w&}yj?XP9i6dAF@~rBJDDX?Ewf=j>^8;c|E!G`e~W8E|My6P z=6Z*apr{q+?5}qWH4n0n!WT3#S8LJH38){?%c9rgdsjHM%pCdu{TTP(3cl*1Ah4xx zV_r_@4SVIyuz0-Vc~VB;=G`9hN;$n3aW@Zfl%W%`2Eavb@2JdGT66!-~+ZjuQFXPAMG@DWCIgX#7Ctfu?h<@^cxKfFv|rY$m}P zjuW41U4^*`;7AfNH zpV8L7x8efH;srCI%polFfNekrYPYw#^-urg4+a0$EMEE)1_lTj6xJ8u*`8>C5#_aQ z+<^5WKe4MI0i)GcJSG45V2&uLIs9d_k_KJ0d}4US|S4no1oS_ zumcaYQp;*~4s27-GLfMtG!Y4YV|iAbM91@?AU`S}r{EeAE#7Xt>On|!(KJO*$U1vK zZOb=!mS%a(vV2k+0E#01HxvZ|{Ww^P5gL-fwq-b*rLcZn$zgO7SbHS)peW>bAYTpz zYrPycSTGy<-6a_iCtaAb|)oB}5qeEyo zB3TdTvg<3gyJh+^ynd@B}qMTfH?il|S|GqExqnTQb#shvD^Z)FH z5fZU(&*RyMg{7Lq5HfiT%dN4I?Oryk*N_l|3k+0524rJ4aZ07)QsV+nOc{ z2%M4#4$KHfG6H&}YA0K{{R3#R2+YEX2e}1-9gdj@ryP|A;>dUl_Erf1{__79?i4HJ z+BFBSPK=~EeZ!loVUBj#2 zCImt|ja=A(vPFVjJwXc2mh%h(u@p8^;4kit2GC;xRATHcAwipWwgCR%5GD}`kvAiS zjurJi#{b1Mf54IJ<{CN8_LztT0XXPL-J_`G7zVc59CdRUxPV{D{Ri1zPa<&N(%O5P zuxkMQ`6hr{6qv2&80Gl@eB%g1-TyqH_R3}c49)4Cw#Q8p!Iv0YhY3c2HC$bo7b#U< z`EJm;=(IWS$+{Dr51@rn==wLL7=`sui0jwVMLc+&f?NXYf*oiw84)e@9dip5`wlA& zOUM`@lDmwJ*RT!kH=8xZ!Q79RMj)|Q8&D_;MS%8&qhb{! zw3u1ha`eF(&n3)Cx+t_$HWUjMveB}ZY}cFdGjMiAe~xad6@uT%xOMn7d_+S!Csm+E1xC_Dm{D!5uelUS?Cj}@S@$`YP;hO|B4k9Z( z`#v!(c!sV1Mag{}`p_{gM)NoXyU3hpU60ZR7;sctnCo}QRuVJy6Lx3gou&RG{;@#&Mphfv zDpJ>nxvwOas+QXjZ0$7gfcpm;m=}AzB?`Eq5D=`0>5vnWu9axwV$R2HFETa+)%CuRoanSui><4t%Y`)-)0 zQJY==uWx*fc;vs2Z?{poUi%!v-piz$N#SpDowB@ZEK>I6xEY`tPnl_uPQhjP)lt!^awyOPV1rKGvUGiQogqOZsI79Y zot(VWJzbEB7x*Wz&~V7%vc2aP2YLkZ|2qBeoR*!*=$9Y>yc&}IUysz(9~y~rQz6&x z=8M*ODe5q&*y=`GqKC`ONyA8vvBnFzH=qRqL2K7@}(*d&PzN`W{uUYD5h_C93Q{W<$HHQM5rVRHr`8NUUaT;P_m z2@f;V6B}HG3=mRhhqUbUfFRELPqMGZD?geNNk!cf$YVoMfmiY###M37%ZmcVBX#$= z0dOH&@_RMmci>dT7bGq%YzvcG`*@+3d>62V;aP|cot+nZfAu~Eq=Q%$N*3n5H8(FF zS||~_Go7Qf__-nLTnM~<9KP)U7iR<<2P~1D+O)zDrPaTmxP8v3ua?wbDUp+ia|S2V zn(1XwE(c=BNi%?r{q*bqcVjCBFD5D6tfKwVE4~<>5j~fbjrT+Br$!(7L2KXK=kkU! z?SlJ5nLfL3tJRvNaqP;M8lH0hhG0I_>qLlb#wH&0MS5A{F;*z?9!aEGe=rG6RPg0d zp2)nMU6pQYg-GHunWVu`;37sX6$!42pS9 zp5gB#799mE;#kpA!?lJvH;b9h8`TBR4Dv*O*^3jPx36QO5* z!m&}}xqyc#-)#Z{=$Q^o(K6C{X6gQ2FD(`!)r5x(O6@z9e-~~VqqEoKC0AK^l*`yd zKWA_Y`j}9%6Q97b#dM2#xrdMt#5Uw{NB0w-NBQg=DhnT$`(=L96mY)dBbi8DY4pvq3eiP%Hy9(uvefkJXMUv=|!BZk#8V^-6 zoIax_9C_->aO8`zLmc}n8IhN&z-LN6KBu*_x^MQCid{aad)wYZhSvL(oa~#RMS@k0 zp6TE-&4I7kd3R7u&_^TkgYj5?`Dv+Dtc)3O$iNRi8+)$OG+zZveY(k`{Rt`bx@&Zp z&Si?xb}HxzFE)ahFNVyYRGg!{fXD48_M@LzD?{r35`({&LI6R(+cY)}TH^a6a&SNz z99Ojv*D0tbcum(f@fC9RL=J^Yb|>jAjp_X-jdiwR!KPRGt<%<(mZ6Qkp{ zfobMHvK1bV-CSOq zPYYc`J2SR8&qO5KJi-B_GXF`0RV0)y5|hX^pWn=F$mhdbMIH#3#S0pSM3(Z`sP0Y(ZX??Wc4l z2~az_ej<`ve&p8W65@a@iGiXUn}#zKjGYK*bALU$(d;jx`^3(@RqZ*IqTO?QTZ7mQ z-Hc)m7HN*@kv}Nu0tGNQR~h@3i5T?-B>RtL)rJsrkw8t&`9c~x!V7Ro?3UbD1lUbe z?#dY%>m*IQBtVP-#I`Q=7&U)-tG^ombTCi#pLOJ`6WfX@-U^n8)+~EFA}(%%D9bNQ z(1cbStq*b~wn#qYZ0)p3&2s>PQTm5q(2$$m@rRzR>bF5ndd48gA$q{swI!scHQpd`wmA9<&}8C-ff*8!KUQKvw=%5wx*Kl5%bg6sQ!-}MttmT&4X|y%Bi@k z82o30XC?gG(}oS5lm%YD6h@>+)Sx~6o34k?6r&LSmsRXPEv-~pt(-FPaVHRIW zw#B$0uA2vlg;)OoU;328hYOWPW?nQ`t9@=kG-^(Q97mCxe!q98yj1=#-!?_~$8lgb z**F}x)sAUbEyZ^DE^Zz7`8fDxq2Rw1xgv|!o^xID>uWL^UoTBEM=NqRdpL3Ecy9(g zDXTB!wBhDW3qg&=>jdK`qZWlKlP4~nb6P>-_G#h=XANX({>+fV zJLjF;S8@6SkN3&hUTl`^6>1^~ZXj(pC2MDX_+~dLARqoJpvNh#y*7lgIZ_qkR9jD; zyf^fGjTszDVJ}Pb*j?F1N0$!lWTYGz6?Aa7LK34kbl_m3jX@rB}P6dz2 z7Fq^IWBfNj;~+Bsv9^AbN?nP!9)1&23f!kd>XZ5~(A86K$@+A@x1v?|Kr!ruD zli~V$hA{fcxcuD`SYb$w{s=b^PREJ7pPt_@u-VbHH(piYjIajIP3-UThi-LL`C7xb z<{Ia0;S#8H1UqU59K^0-y)SA!ZffXVc=pGgx|7SXj#w$tJGEaKf~X7$q_tMAyswWe z0}_?=f8LAqSI6}QD&Uj!F#lwy_NV)6@GIBf16^PCl`~&tZL2>_CP1@O&Dt8jV$N@r z!8yw--y_g+G9JjwFe(3(yyY6zI(Cu=gJMyveS6D3CTMG77!!92sSz)dHKx_5Wl^M_ z)28Mrf0vt}6a`Ji2Jeas4Tg=diy2XWdK{tVb~G~?Mw%@F5Z*8adF+{+n3z=D)rRJ$ zAavOeu^X2>le2R8_OeO7BceMhqWDJ;ab5#}gpjr|lJ+P_T7x8CAW9j?8hMh=h%YBi^2o#O1NProQ+ur98<$Gl+;2AR} zmxNta&f5etm)Xx=bY`LZk^w{!8cYlsE_6l5c?Oc6wNrRc)vB;vb@2UV^!-^!>eX%QtP+xnWz(uPyi1!lvcexoO&@-LA>0tR2pHlEROe4*t5`AH)u+V25F zh=1@`C=)v`L3eDfvCP$(!E1ZT;_LJUpIqK&^FEDDdI6?y(mX3s2<=^&cS}~EK6G-y z_)rstW>4c5fn{x@8X6(^Tt~QfOIB9e)vZ;;VpEtnuZXeRz~CHvGz&d))muGlY+87KAi8xbw21Z|3O9>%Any`H%{iK| z+|*zK$n$H^OU zYe+Y~$#o-JXc4>&m+A5&9kW(BTE*D?*OltnTBmqGGiB#iu|(8ELPo#a)0FvQg=TVX2FCyE&)Wjj>Sd$8%~!PX0QvLw-S;HV-UzCzSTfyQK?nFC zW7&U-_gb~s2#|eg=s3HZg?re`;t?|{;69Hs!&j`VVn8hBr!+7Z8;Ua@1I+eI z+8OSSN5>?5c#pEqHA>EY)e3Mw^vzMGO-TgCl5M*$dmBp)K`tzBG(ewRFl+fKxX4zZ{c^PEy6!xT4btoE6O+q&oz#mEydUN%J%0nB5v z^nRL3A9wABRGn-OGE(mB{;F5M@?Cq|ApRF!>-YVNnO$(|oy~{LyrW6{?12I#5Yml6kQ)@qQ_HTVjxu^@q4QlIz##i;74F;DI?R1HByySo2$z%c zQ?dOS#f(2gw7X5>3G8swcOrwlI$v3F;TGu}!JaSag)?|L*MPsa5uNKeQ3Zk~k(z-) z-{^$($9U=%pAjt$*~MKgqhGk{Tz=ar)t^xBXz&b}4j`M7D5!Mh7O#L$BRhKusSj!to;gW|bFqu!J}2=U2i~S&)a5q+j>W{`$M( zv!J0W4ZnkUcm>OrpI*&lc>vFVV|Lzh+%m%$$`j-}cGYuQ3sh0?dwt*@{u)KgKo&sL`uZ z2%U!4uh!>Y_RBFv3!D2PUR@{}Uj|-*M!0h3*TN_ipXPyz@2xz9tMF>ymnFgjSqJPZ z_e}X$+OaWhirgzM)KsHTfLsG~{y0DKu~(5`5Y#TUEu`f}ucy4&2t8~26$m^QKf#)= zmMbk?P6U5U-~K$(AQZDc9DtF)`%cn;FR%##tOO(f8dqgv9?v%`!rjbK0LWtUNs}lI zjw<>tGUocvQ7C)~?e6n^Gu^drV#pa>ODEe^d%e09$!H~(hG|aCSZE0hfLq$SIgxd6@lrE+x-E2};h7<6HVy)~Y|?*o*}y&5rJK|X`}Mh> zGG@4hNd%>X-4A)Vlg(Ew&>5Dh)k%eg7?xtv>-027-t4EdamEBoRN_Kd^9@hXH$CGm z^X-S-p#gRMqCR`&b;goNnZWx_p8DM|n8KgjAM);8_Ac=7Zc?r@!GzrOS2#e^bhFiw za;eY_Tz%4~wiC@7W{q?z{_PBDzv^C8wACn;fHzJpHkWtb85<<4*5Owk*wy*gApHn5 zNSjY_asFQO#gF;YaMVdB+Tcg{$K(2fenl8ZQ0GRWJm;S)N%e_8)T$aEgTYn`3-^>m zmg{MFPzula)_g?1K3tNXifpxpS1U2$AfS3jM2-M)=b?+`meD-kdd2xbl-N>J_@V7E zcV&^}LKQuS$%)?b$fA{1j+O}0?}WaU^crs^Juns$P}e46-m!oqkZy&7UZ34rtG-38 zf4?X`$f$kVFAc(O{a@BU#k-g$Pprx)FL za>N18hZK^S@kX!n7&gW_G4h*Zo$bCKpB}f$s4xkY^YYFt5AYd|7=`c!T*Pn-RV||v z_2QlSFh_!)dLEl;X)=&UxtXmyi5h#N0o4fFX9eKge`0uIbKa!}C{HJVCmBBY?0Lu( z)a-q4Kx#H@<1-j)HIki2UObRjs83X-{~`Mjv#eD3$c;&+k$VIipn^m3od4l~G<}>Q zn@A8`fX?OfMUKB1Ho;QreI9QC5}ujXooR>k2VzLG;gdPe4Mo4E&ECsiZi_>3Z~n_= z`&r{#GkYgB)UdhelUE7|{mx-9qs8T4$r%$wuS{!x#^mok^;7uU>aDtZ^u<`rWBrTYWgMhpyuuQ#CA?mn zmv*VBwVTMPEH8tC2@@qIALaiQ$9Fm%3kfKHiB3i3MM(~z`jlNI&p4Kh*;7@59O?6 z8GYXPP!@U9Mpy9eL)nC+-E3=oj;bY#bfpZ?v?`0=>8NNs9~mM@^e?fRH|3$Am! zXk6#OC6=G3mRtDU%6N;C>%0mVCu+#;+SF5P>}T>4Fnqn=a>Z`JL|B-MIBUvf_R(I= z;Z5p@5%^C<_JtI9si1X_$+&0@7Z0w2?B-`Mcl;7=|Mc23df9E5lO_p0K@HB48Bwtuj3S8v+MdV)G%z!K zb?e?&sZA(HQT+hoJ?=71Rgu(4m(<(*W?HRz5?7 z$zap}vm`~3KNF{ODLS8{rF*3}pc~=TO8FRYj5Z```}jm~+Bp&0Ii}32tX;4joq3ECZx>(5lbg_(}Z0`~8y{q%Q;e0^@Yk#zGf1|Yc; z+gaCBz_Xo)q0axy%q+Pb_VQDsn|*5SSmd*{&Lon4NTf%5D#T^1H1F)Dk#Qk7cZS8$ z)!sTESlnlXx-Mk|tm)orWn+UPhsMxHW%71|!~`97ZtHy0RmFmeqiSH_fA1B?!Ur-> zH;Xbw{aKIwNNDmwo?mau|j z#&tl7bM5A-AB+#SIfd}10!rczLRbbHz?dc}*X9nx1A-$Hh)?JC0N@rGsZX<@?|()a zf$I+~gvk|wD7>4HqZF7Zm~wLaVm71dh2McKmKuBnVr#AsOaW~Nyag(g{{$-COkqJB z-+swq9H|dJkgdN@Vue!9Q;SXZB^5v}G9y2y(OTh*EB1oM*hswHrSvK^tNsL2T5fOS z>biTkP_}VP<;D9xRcZyj9V?Et4jt0tX!xjr`)RavC4s|t*CPXD!{DUa?yo#+$g~^- zZ<>#{BUll?)iMaEC)VG_EvUgagu>a2jC{J|ULdCR?I=g^%dUZRrHx+l5elH=nPlB; zA4reOdEcIB?a-U7Uc zMNf|VqXT5!JgQeL?(8LXei8Wv-iHg41yNSCa3CrG6jx?yJJrTF7hU@gQ2i%+ZH#+VIZ84^LF~H;XylBBV`>6s zjs*^o_c;BED3Hqm*84E3*|z|_?JHz?=1tHvE@-hXh{|ZXGeE*3X|4%5@1ZvU)v3KT zuooD)*U(%8ebLU65CPH@Z@yJmYkXmgXv9y*oydoo&V^#X;Vc z*~_0BQRuak?q!s&)&NaOXSHCP2S~E9cp;C4G=O;FD4ueH6X<_@op%#kZeVv1?Rw^z zd-6LOUZITsm;FNVFhVdpHg5A1X7xkZ=|$Vu<;bI&PY0u(Wz50Nhna=dKnp6j?D{r- zK5!gLR7^(SV*i?!LoQoWoZTBBH4XkDHQ!Y5j#aI|%7ptBI^hG&OC8O~ZZvb%mY(!L96oHJYKf))KHX&i2;uR-<%9nX z;g&!MHvvNUFZgY?X1kYGNIO3>mH&^bs}9RLUD|X=NlGIj-Cfe%4N8~bOSg1)D=m!( zDAL^xf|PWZgmg>uJ@4+>bN0Kg{byZR=c$={X6}1_8w1qbncj7)I#?XsDdjJr1>232 z&lBUHlG6MRlZZcKQPQzL0k`~fm^QdFdOTHqS-KbSqn)izJ80yu@JSpTq%R6=)@fJ# z&p}DnKi?_P(c<%Skcuo+7!o#R7QbyI`m!&z%`e1V^%69JY;i5U8>xUFlKF4R7Rb~R zhC+=`#w!tV1~SE1U2Z?;Fzt+rL6kmzS&JH?=jM!uKzIOC<#6HH=4ujVUh#|5_rX>t6Wmc=h>A7(ClZVwNn zHfoB7!J9AqVUoX2civyhCg#Ym!5>*!sYsn|GF5ZEtCN7EMv-zi6eB+yC@~N#m>fXV zu1iOQhD5FZ=UIT7l7dYCdEgIJu%D0jRWbfAMQ=RI%wxSysPLIoH++fHW!?8PoGU0U zY#u4_RszwOVb4`x!h%NSuy(4aU8)9kdBfZ*a#VyYARIw^(6=J0 zaN(;xf;Gl2NZjEs84BL>iAYTu8xi{`y9te5PPt&0F~r}M9!}G(%}!<1lzDvjIrlrY z-f_E12*3xRQ;#yLmjfBRK>lqhNKK7Jj-E*{pc>fqK z>PtsXF!wFb^*rCz@UC$9yV#%1HWW~{)j8*Q4s!;$g6#h2(UwTi>pK!Td3AG9Z=$M5 z!rHvUo!N9w*kCZt1Laeq4$K1=(h9tZ1n?&5*aKGf;SwEo{#q6O{t&Z`1R`RQ$8`2z zKX|<@ka~h@7rl)`hZn+3E;^5=*dl&Zn4KA`U5<@Ep^)FF|d^i>CHvp>t4S73vJ6e{D6Q*fC4acyT& za5k-cja{s+bGaHM+pPWL;ln|n+J$O5Buh*K zpJ(_yxi-4u5n#n!kwS=hzhJo3@$%0+m07R^wGiqjP`mWoj+clnap#*?xuWc{ckwj^ zX#@R!sjUh4yO1z3!)c)r5QTziJ3?%@b+oJ2(_>oy$j}eZOI3t%@1RN?XMheo8lRtU zjH#fwcL!D=nYc2~?c#h*w9OxF*~>%mmn_$3nCD7y~p|%&5&=R;q zTHp@XQ3)A+A==>B}#+p zi+G|PVIshd*aaGk=I>kmxR>7+GUaY8j(mmL}$5$80Nw<^u*Di2NZEBZElL-CSLDw3YzY zg&bEpv!YlN6Fam}IJXta+vkR)`%#hv{Q%)L78`C_6I-CRvl46%5vHP;89wy zp!;vE70;ft8{G1JS`^EDLogqI&-vtqS|5 zaHCV+ZAZWkTVZ(~ZpM27J`wiM(K0{ynTKPK*{k)X#Gw&zb^m%Sf2rts zUl;og!~QwkS%>TsrQyiTbir`z^Lo*@@un?E6Syc#-w*M1EVtU6M=IhpVciS+xSc&^ zEs1B+eSB}^A^Wo_yLTn5-Mx%5Km$TZg-$Ak31ZYQ7{-s>2moJ5h6Dx^h6doFi&ry% z3d7`bm|5;qS!N zSx>RW!2oZ`mazy7hS^W~e?F{W2u_qni$=R=r0_O1y&f?dd0wn zyxbLvpwV&y6NLTu2+mSsa9@T_A*U6^9jo z;h{kP5ry;wAK#s?-X;bu7EwuD?2ntarMv4Tk}`)8X4N~?9UF2VPCB3kMiNqCODf}G*{B<@Y7@0|ZeW4M3dXVlvC4_3-J&tcDStdWlK0<_x4&ZQj{>pcsG z6Q8{gAcpg44ftwyoL0gSzF~))-HN?q6H6epb=I*sfhGWdsSHOW_Md;ksK-vrqEh`y z!rWKSls55DhuUm{m+TtyoW=h0GYNqC)yE&1cQn~>B3j-Rh5JlKAr4I6Mhi=SO{1m; z$Jz|HW$GundKHe|C~)Dxd0qBvB~pp1XK^Hicu(&Bz){!(x2szBIrptrH|+SMGyk>6 zf}_(~Ko9DyB~cU=CEoN?jYdt$OnQ9)Odpnb6=?Y5Y_Vq_v%~lvb1^`jxf%aM;6_<8 z18)^##sOYT00JszGrH)^-V*|NcYqLmhl5qOz!U2;Eu!FYo3D~?c<%wR$hIs^*Mw{F zcwY4G1Ml*=F5f3SkE-225LebMC&YBA$i^&Gp6PMaoz&2;ah#G}1HDh*Ug@7ilH_v*i3zVNu2Bw|9Hbd2q&Y!CqO!qMSlePeJKM72GpA3&E<)6{r527*eR|Vav0Wpqf(eCi?26== zWenOl200QY8#2_lz$7aZ2LYcz+0`L#kq!h5lyd=|x*=);)I(&I0yy+Az+b|NS)~$z z|x!|_R-DQ|VZeAJV>B>KF>_5i+3J96!!JKWz zG{6&4fM>}$6|X(}sPg+XHrdMFN+IqdMhVP@ki_&>pKylX4_r!Quz63G-FKv6ODF)# zEX=EbfrdP@s;Sw612J;Sby0tsH@HvX3)JGr2&>1Ov=y6_L z0qh4P;*bp@pq&WQvj=M9f%2l6XfgJXri{S`;v`}QIE%_m>v+t6$*( z50vxMOnK{uLiS;$?%3D zE(MeHUAZ4!2f;=KLQ}H8Mgr*>W(GXS9_v%I2N`?VbsSs_Im-OzK9QM|c#V(*i<;~( zzXpjUmoHo?7_ViA8BWpRVipFchNRgQg;4c^C-a^}ZQ{m-X7RM-5ra__+t6)B51V4! z;&12_CmqgfVBPaM4Gqd=8An*J0nA?7Ky4sQ{zm+2Z#WuAVLk(M{*fgG>fax&_1`WN z+eVm4W-{orBY`b7?+GjzU=6gD$MDrE3#?afeN1(NVz=fKGn*0s)aA196+g>%)T#!y zN=g(`v9R*CXs9OgdaIxy zm{KO^zXjmY%#IN?HkIXDzN;EF32KZel5!;x3j4@y3QLtV9yB2CD;UEzd2D>1U{{hC zuf9^bcG}iSUF3|-UYGu#ils>u0x22iZng()(xCw<;bNuw*%$b-my$oJ)e7>@W;=4n zBD67ez8q&Jr9;i)FkyZp2sO)afPWPa9HS;euNfln{gec7|K2Sxe-hUDM`ol+%E~;n z*nSe#ZddQS!b`K4mAt6GpjE1e^y>aa+=0-^f&1b)c+{A`^*_uUBCu9p(Pclv$p!pm z%d@ymsEC+4%z7qubf;#K8rG(7{;S}7z$e}a0r$IuRToQT{?5XpzYn1hA)Yrois(|odhGe{9F*&X0C|uurg1xxUZw%3 zl=fWb1&HWVpbKYoNcQB*iet<|Ub}iI9vtbxX@h0>v$6QXT>;@V9kN#TLYesiy-peg zPQ$CIu7IQ+5@k*s%xXdui;RWdjxZN)0&sUL-~CVVR)T#sda3KAeoe_g_$o(D{E_-< zayjcNV|#_a_azbQ_b?$Z-p=w+YUhoVCS1pmYS!8~y(k29sFp{pb5*Sh2+^o*=;snT z0bn6yeojlOGJ$I^H}2gLniE)J+Tellt?3OT_j#Zi1SLEegD^>q7OmazqovPXNX)fv z_}Z5VbU`l6ud}EY?-zPUV$RsmfqfjHxbmOZcXNgCFm%TFFxgDRO5f0U$lvnp*hIGC zS@prx)tf2PhT(SXc6;Avlq}aD2fgM(FEm9k?kU_wXiR89mh>iypg)5VIBYHX%FH=n zU>b{AAIVXV>=J75lVnJOWWc`ay>Dqd2l^-~EVsyS;!?gl@;Fy3m6AkOjuZ-c6RTq6 zR$#Xv1RarXDb(1|+Q%^-T^W%Sza!81Nw>CQrD<`lGdnii?=9ZSM6o>ZH$)`S>uC^=bGqEXs_lC>9ocIFc=VeE$a({p%EtPNx ziw=;d(OzdumlJmsfXZ56!8Cio&sSGutlVa}6{$9QpzK8c#3U`v`MEmW?_a#>iDH!C zjzx_B&t6$N_Utja@FkKDY&}x?Zw0mWQV^biVH5?cl&dh#V&I^W)#g4?LJyh>{WEz; ztD?-XA|^fjxFJ!XAbxhC-_A4tzoZYew`pg4StJridE%2Ty8$+;r|^$LBG>^AW?p8X z?mzifd#tlRM8;)+gh;m0qQ5}751XWf^$xQJ?&xli0NQtS(y`$`ZDO|ol$J3A^X(G^ zjbS@y>OcvoS~42y1^*tEAtD%9)GBn`TQ}7eLC{EioZ6S%bx;9|Gr$K9-3Qmo{~A2I zM6AL6f~R1+pNKC7`9cD|sRq44?kdYOw_6m#*J>BJREfR(aEFb(emqcYxS`aL1Ds4( z^$#qT;a*f+0|5UQYKJkS~bXDe@D(M{ck;(-lkmup%9!H@#&#(Y$_$+oT)r`do) zD0t!OdJp?A`JgDIJNX+yHgHI5<|Y@|JNaF6g08~a6&j(fvNFX6lA~*|S9|bdc=`yE zQnsb;5`xFWr%It=%LIjCQ(R1ZXWlhO)IQbsg`Z;m3)j^QjH}uC$cn}xO=#`S*@>0g zy*o!HTGk=zJRdwDp8_+b&!nC&Esji7%wnL@fC%zb@yUPPl&>w^(ba-eUt1AZmusmD zT(-5~%Yvp-DL81q#59E75&fT}d6XX59fN~v6E7*MGq}t`IhNaH?BAhhO$=GSA^T+l z_=-ko%H*`X02K_yE4f?u$Z0_Igo>O6n)`1=y7*)wV#~EyJ1(YVx2qPC7Dg=Abf1xU z&kW!ZGibi)lz2KoQ;)`>aw!j9%fF@P@ye^u43Ab*4UcH>Uhmj;d={g-sa;LO*w1n& z!u}BO7V1WMT}iJ9fpux*|8b+lK1NMrdXGX4uk(*m;d!c81uhrHF!wd4J!}!aPSya` zvxtoXM(}n(XE>ivf0#NjHo>y#SZEBXVGkBx{HhkSp%4`*9xhzh7J+m4z-*)E8hTn` zdg%Y7HU0S1HwqCA#0ny7__`gePE!2AaF+eTjEWXk6DOgIk~{foY@dy5^Gd)PhY%n$ zB$w|0v|iyy_c|~!W*)BCU=Y7p$@KOG78FZafmo)HZ~-bYFUBk*sEfj6CBX(sKBWky z2vfD`r>q<8ibX=oKtPuiqp8Q>LdE<<4|PVN>Hm$&6d%FfAnS0866so3eM2u-7NMZj z?!~ubTMZ3tl_zQZdijCB*5?V>v)?Fk5W?C$KiKe<;QQ#dU`P4Z-&-B9AD~9&&Z6ka zLt6Hc-^>Sw4=8+J~7VLtrVIJ$6drd`H*4Gv1BBmSJ~DVxpnEcuipBu%2UXo89#HL3lWVBLt+~_ zUX(NCdEh=SK*E!44~qPPbyCDw%{F6gSAtQo`vq^J5ve+|h!NpN?OjwlN;NhbVKHp( ziSgvSIG>pWhd8*0aLB{jmuu4VI%f-%cvJ|Cg+^bB247ZSg$qGWW)D+Y}X-y=qdAV(d7BV$Aw%hF{AF*w{| zmR}t8F-%~Fk~bm~ri#uMC$&#=2p$C&onMZp0GnJg1 z2?f9@{)N8a|LaKp!t)hw%~)<`$xY66o_Qlfm3$fqASJRLQxxr)l@Q?)?PXg^c93@fb z-Jle!H6kEFG9T~l^I*j4#=p&x6|2XBv3(zdOk3JPXQ2Fwi1VL=A#6PU+7&NihecW( zr1*u*t%q#hdzS2>zqnk}mXLi`nwLf)mI|!_$X)@46#myL#4L(rZec+3)iG5{!#(R_ z{nXb*a-9s?o`DaHQjX8Hy%pW?I-MB}-%fLY!FiT+`V`JR(Hy!Qq#s*oGGI9y=hN)- zUF&*nuUR)L{V>`~6FC-Wm&x_z;yb3kv8^B73*j=du%*ZFzz?H9PSzPef|yC&ss~L7 z(2cJSbok7}1#937lrqAI2BmkV>hjQ$#ow=sTDcb=nbqGYby-->a!6itD}0LFg6&Lv z4NV$0B_MOCpuiRS@5a(oAwPu`2T={6dWkr*L~DHUR0bx4AWYdA%bnZ6_G)jdyE;OG zx?bI6?;2VjSDoKy%0Y->W*0yS}YGo23()1qcbxIB<7CP0Cp?b;ve(Qx1nFcFWtg2QV%$?2mOL84-xD_&HE-w)!Jz;oZT{H`d7Yr z2Qw&;m*yiyN*Pg+r_Q#z6w+#Y%JfWcc}dMCLcw6zG!YJFcCaoqgi5*3(9Fg9-?uJv za_r*-%pY<@c+IM_VW|`BDnvjN0`PK{zz^mAwc;=7dadx^gS{1b78S=m#B>=#cRjLj zDWGa;2qf4w4LLwm za9B`PD@i>K3z+`2Ys+dB=LMs@2*RjOAL=jpnO%8I;#!|LOSz>TjX(3-`$~FcwGbbQ zWXwbnNc8Xo;sg#LVfv@<+&{X&$$*wcBk!q!R>C>2&kKPIU-7lnZ7tAB z)P&g1kEHc9s-1sL9z*ks(B8@9QTtb+^*(k5BY7H5tJYR85AF2q1uNGk<|}l4$$jU? z67AEGzKRJn<(DbX zzYT6dQ9i920F;rtzR=c9D38OBzdH2}m%V9nEYe_0y^>X*$sc&m6% zYVdUUCc}2+g2TQG9V)+7qDxikdU=r(LY2PZ5E%B9H}o+efj8mjT^|#-?ll#u#^$@; zD?!v>&`B4|YSq-@&^?6t_Z~tA)eaDxs+!c_V7V3_^|~1_4EJ$wW_IRq)SpqL8eHsv z?uV8vBHYx3%T(yw`NBzd)kc_S-Ae>3Az34;XCP{-i8zTsdLAaf#?8S zS)jyCo&JS%1^$b4!2mnxw4FH#Qxx#_l7BKQX^P7CQU4Q?!{IIpWbIYz*Y(gSr zFq}1VGE`>x<0G(MPR>`p9Fv_!U@sNokz}C&x*{f}pGcqa0j%oj0fg!)<*n z*L3S$zv`W>`}1%&o7qS$$9?;)){fdrul+?jHzdZWMk6Eg?O@}pw~7uuwMQ&TcK(>q zf;{a))if&!BXl-uFTMlUDDa-lV1&Yd3qya+tFb`4rhEDX)}iG^fxxjK=F5Dt>LFD{ zLIf9~M<=2=^Y4eLtM-0mV_FChE4YjhlTADXrqB7#8#!wbUp|c&`=uf4N93i;wW~Ow z{f*6$hwUx?fh-ZLq?Z|Vcb??Tkc!*?HHMRG1qUws&)XY8jxC`8V$@?u$BZD3Wxv1n zAb&f}!$OznVH~-sn>UMx^)K2FF*A_E!my|e-$yhklm8b_A%bZkQe8(jJ^H;IweqMc zs^5(8-Q#{1awd)R+@t@}%eO9VOGD|dzk;c|DPO5SQmIR8At2xN4UazgH?VA&BUe~5M z^|0DJ*2=Q;_KI1;(%TaK2g!0ogu`oQ%$yTc+o;QpTxI|7#N%!l;cJ@OXVr(g5twj- zIx_yzD4^Snm5c3CGqI}u<Ph|20856BJ$;X~nT z?LZfi0`tq#9`BJ?3!zLeM#Y$RQy0 z(LEvdQB2KO9z=P`J)VfG?Od0io89>#stRy?u57)rON!O`XJ=uOC%?ZKR_$u;^oiG) zM?Y}i^xPzAs;`9x4m3<;mSu-{@wk+v*tc$>#@^J2h05Ha&Wy_TiwjpK#*z7FgV|oe z+G_;hnmVWFPbNTFE#F6X50{rQtbMKp>BlM2m5n0G?%8U*Q2Ki}`pP#nx=@wHI@kmJ zAM|@k9B3anm(;KfPLr)ARHxn76iQBdZPxaTn0NxS5pyJlqkc6P4}V1v#B9S`6ZE)0 zWkWRAvQohKD#sZ&fzn_INfXwU4zLPF+VaF_#ynowSNU2gG2CxoqCVj#GK~EgG3@0&#{1sKj^(cZi1NJBsudEQ zNF!;7Z<{%J=$Js=vzx}0j&{~ldRIf@@SN(#?1D4IPA%6o%8Q$L>VATAuqLqm-jKV+ zT{CDb;tvMTVPIS^>o7OoLZTRSk_cKS3&{R{ae`q9{b0cE`<&#ZVO-+aasvun;uEW4 zOUfn!g8JFjzVc-49zA#L2&uqZ^0DL2FDHDFepfFQFq*Hdh;=#gi@lBg`Dd&wJ?_Q`;3x&!EyH^6?0mj!a5l;Kh zE7lDW_xR{%8{|8;ST@jN`;b=G*)$=uiNUsJyv*0qKiQJz&MB67HMnpO!VLmDE;oz4 z&j?U@gX1I>{)EV6V*K+=#7frA(VhGnaVqp7*5e1(i$3^T)YpH~l(v{_80ozz@|R0? z(oV?;cSrt^OxiVL2X^vlhW*ZPMGeA7e#0pxUz^@^Smrci^C0BoU<}`;@BbVVwWc9^ zXeV~%pzrI`h%IC59)=9F?mN=?I;u;$?wMpGqn}5nF1~^Ij<7&77BZSw4X{rle%U8L z^>PMm`bcwt9oL{iXNH~8Si6JdDK6YdNF8GM9=g{to8R<)WDtS2}{C?Bg=nmie zl@}*@rzH)oza@_u(tYdP;7R)J?w-J7lt1FMUH%z&I<~|kwx_2G9~pP5Jy280)L8>z zdSDY2qkwyqLApcgdE-jNl*=TZWX_l|8T!`yTg(Hdsq2fHkJ|#cmT58MC$PLbkDTr8 z*to=8lkt5a;W!w5%S9ZoZC#J6 zhxa1c2(?FQ#-PxOW^6TyoJAp4Y6|yS(Q}L9z2xRJ)uj0-k**4={1`gYl>jEvPycW> zyiOpd-TZPGt~Lh*PmW7`_y-@#i5sH^@klPdiH13Z&*qNJMqV8_BKu8NB6!r3MsxP! z-gZjD+Z<2TZOam;KQ8+Iq}u^_uSIos7B;ks*D`=U2^$&pSdLcy#Z0mD+8a-6XiWXH3=>7FC;tAz(Ald;dqa zvR$;_TOzi(d!>!@cfXyi4_5Nm2Qr~V@Ff8!R=}?>(Ux%n(=0HyH$%9p!U7C6%zK|8 zq7g)d$a_iJs4rZSyra^0$@bYNX4Hi2^M5l%(vU&`TOnSto-o_YcLk#;dBFeef&)E@ z{_=unz4JR=uE9y)n6PLKA{(;4IUC7?KYlgGioSl*K*NcC(%a4at9-}lfM`$=8h8Pk zTYv9>evH-lo0gPGh{Gs;r6$1^*K=OeWWRVV+SilRW2NLc(WX(c9d#b;+?I)2O7*T= zedYR`+FoZq$CMaDDe;fwv075QHi6}aN+s&W5Ov9y#oo7ZiMKKRBLyzwy=qkAlPveX z`#oW5+^z@yut6-h{TV3^@5qC>;+QAT4LMi~!+PK1h~4}9qNs9?wcD(Fdoq(puKffl zo=GGg6Rgmuz7MK4@7AOdZx2}<2OlSJ$4b1gzBD~zs#;_G*!#q7{#5-NlEl}M9h!c% zsqr0qVLk@3tw@ZsdmSH`vydJ+(- z>__S94${*)o{o5xUZ}5TKIVXzWIva?#d`2mQJxT`!#oF$vQESs9 zNyPTU1}CFJcyNcl9l<;{T*iChs@#spR7Rr70vxxmr`fLY;TBfNi8_u`@-D87WA8l9 z$VeqNx8&-GKuQ#sR%9$b=Y^pb7$_i!{#fk5bM%xraC+@_*!xeMnyp3n%+F~s0BhZX z9T~ggHAMKNChD=RIIh$&rxo4j6!*KX&}Rv?>BlFK1$*+;xI7&YNb0dKlwF>RTPbg5 zsUeHZLE4%*UWI(5<9!gh?mkYNu}kUbj$Of#bB<(9tToJ!^jNE6;Q93)2sS_7rtPT)$p=uh9+3kJ-k9(@}`MLXEAo zS@2*B)D%g4XJ+1VfqY{uZn!YaUgJy%VG&Mbsr<~5$Fa$nHluoyp4Q=MH@dn4|9!@8 zdC`?dM4h9^;@D}U-a^CB8O_oz)nL%D*+c=OaZ(*J00xc^1gsF;zp9D6!q?hzS6N@G zBnpvR=<&I}!n;VTIf$x+RfXW0>on4SfU#)~aC?rhyK=g4;zHRgQ#J=0 zE;QS})z$0X>M5cBP!x_Fzmt1T%UBz^LsC6EA zlz{oq*GIJ(kkzxC*!(sTt|wcOvBmyRTx*dqfSddBC(r!UKSjo+7qxoR?(&5TQ_+Q!35o#jl8P$CGGE(qWXpU!kx6O&$>*R! zU>a4&@7qmf+Znq;rNo$uN8K^cYCuUp{+!sjm3;ZoP5uMQ%!o{XXR4&NP`kKsLwb{g z1)m(o*(JeV z@pU?0g#+M8MFA?;h{z1vM9^DEp|%wJ2z5_^U|9gZCcbEAx#6F4%{IS3i&B z!M#lX=8H3>@Yc7Q7BhsTohTKTq$viRQUz*sWUOwPVuN3lgro?aN-FV?F8#aJa^lNb z<<%b(T+L5j=rXcAqKjeBMn%hg!*145xP1GlhO!p+vQ5GrT?K!04!hHlr1ZCU8q6-v^M$SQ*A zKmt2mXJ<_`YX-&OV&IJdr(*HF+}O4Qa^DtWlsxE)9%aY=R3++78xu8+;5ej(e@)KY z+Bi^`VbqzocRZ7%z4!d_cJm)DK%#6JRXU7~eihR zd5P3L97~zZJnItFBAYMWpYfk@-}dw+ArF-XW>5{TGLtPK)B2ZH*oh&1o;1P0V}boz z7={WWs=ucmD9;bvhlDX7pYn-{9T;bmpia2Ws}s&UwAt!idSJua-!;*{%7o~Eb9n^@ z6vBZpoe-@0x645G^EG1;?B$e-h1L4P#V#*^7nT1!NZfWRMZ)vJzfa6oNhUTAl@aNYnZWgX=j~iR9Qo7q3UNOE3FT|^d+!!z`rv%@ z&JV=W={odO2zhBUJs4=GQOo`6Q`chqqmq{1rp3F~=-SMTBfUp@$z*_Z(9!oK_R2@$ zx<@9@MI+**@kAeGmz0YFtn)he2S+KiLoW-)p#GIbwref%?L2}-k>4i5A%32nWi=9S zuVg5>t{CHo-&A-@PQ)ANxG@t(Wg#c~)WmK(C9DCY9^?chsd8Df19r6g$x#8gg<+;J z?ah+iTP)v;yoAYD7e{}m3HEvkJe~cL_s}l1Qex`X>q<^65i|oOru>=p2x#-TP5q+MV z#z@FY_%zdK2+Hgq50`O*D70sNvBuA6V>L%2U~Gh_m27{(K<}nJILyEDLkHVG7&q-> zFLY?`oD6Kwqm^gvTizd6%Ur-HX-EG_Emk^q-r&Pku-%Y>D?Qk+*7?)tpgw&gqah3W zC$HwGx+68Z4Q%+~7O%slD%UO;ukNf0(s-x5@~}6U0#f`DIK3#}6@L#P&|3YiNWhjy zK=v>_>4117%p9|d7lqNh>*;C$I<#u!R_NGpc~#6_%N0oPg7$; zUWZMmw^!Tx6d^M5mYAMi*xSo)96csK=T*uZ_gj{In*MXO2G3E17aOE*$|gj0tKcLN zRblB8C_#A_T|9Oo=AXkHUqY5<(joE>d;0w&mXTIZnQCN2+b8UzxBYqIbE9494NQ1^ zeoM)0BfKtPwLe~^v*1$)h3Ptkl%EBEic8E$w-){N1cfERP_3>mGeEO2tZ~edu;DDe z+pOC?uQIr_TvPhMrY)`XDWlm@C%ZA8(RH>}Nb4HDwU7vcYI{)FHV&VC=(C+Arl8&W z7B!8A)V$wE5CC&h-{1AQA3fWv?7LD7nnlS)oe=If%X(nXr%D6gd+~byxmZ?T823*Is0(;I z_oly*1y7WWt-97s3M7dbsAkj}TxcNMQ^`wwsiN=2{|b_Zh8d3d;2MwIPhmCdAwiA* zN>eUIFhsphJ-XFFlD=bgk^a1+qH+ugKdGEdB-vCUq0>`P>GztAu6dUvhdWZySVdt- zKZZg{qVe*U_2;qTl?VSybJZrD=#lR$GLuaHFTN7k^?!;r9(pIl(D__~1e#*A4m`Zgh4DfsOmTzQ~WkSvq> z8{o;EDo*)|0dKs~nKnz`P4nX{6A|AJJ(9PS;}GcSlm`RT@{`NP*sq=b-`StYs_V5AspP6VpqS&=h+>+A;p`)l5wSm6B~1EPFHc%iaNBYoZtm0 zZ-}-6UEdKnqibbx%Nb+!sr%#O+^=lGKiyDbu4f8d>Nl`4_s!-k7`@5GwYPl*dYMvi z!Pp6b!kjDvu>g{GRymxaG+fweca_=M5*TP@Cl}QJ0JKZ^)qi-3&~U;unt+`tA5eSn zqs39j8TtcPmGv$l$o~@Mj)SJh zkfHJamj01QQq!7ScrTb(-E75#+kb6)!xp>;tL^aWA!tXzesOl_8tlmFM{48n>&8B; z7oqwQlSI}+w0J^|rZc#NdTZ=8xj|d}G|Hs|T8fJr=GQma4!oB{Df%m5ilPlV^dmdNE z$pMr#f~?)=3I{y&omJ<`=lq9jsY>h;p0PH5gtg&EAD>QbwV1a224@KOOYkEzII=&m z!0vB_bIE=e1!w+{?$WaeuHCObmgT=wSy1tHPo)DEMG}3+A;u#)bxVy$DLwE}+akP% z3OXFt3%Qm_@(yOF^rID~bQ7e31`t8$Zb6${cpDTRvw_AIL)COkqqIN+f@kxlk=6dL z3MncX082eCeV;v0PaOgz9)Pnkb;up9_*qUEjz*OZda|uWU?XHRmXU6BJrnN1XQ53AT+GQ{I!8=dFjYtf#N(YWO0x%G-Zmo{FLf>YA z69!!?3D^_G`3=!fOUc^S8;+8L`S+ho9y}?i{ru zcF`;JZgmKrBtc`C7IT1@#s|zi<4+zW3Eawv-l)z1fV7AM-KKYEsl3f2;GuK+Ukb zu3JmcEs{>2#K~&Z{QdFu?h?x@z;1d$$c6dySR#Kc!UKy z%?5ugxOZ78*dzUn;L<}2)JxFUkMeK#uvmA}yz~xHKXzM_wMG6VjOHZPcS$0;AWwX4 zljyL#x4>8O4iF%@>)Yu!xP~!80ljqQA{?{b^o=MLr;K$>R$utw=k*z7*}T^mYfN^d z8*70)sLJ*bVPBs&~ zkc}=gkp}>lX-5mZTWE!juud=LJ{CPc1O(|m~efSu1PnK7qz0FZHXUdeQaf% zXd&R^adC}}!o+BX?c*LhuY6jSguNQig5zG1@;FNT5S(e_3 z?z|3K@Y1nBH(83QK8YPkJrR5S5+cIKM;587_2WA~CNP0<2>z-bf};AUnoM;a++ zuZOu#qJrg{ac9_ywi)?ryjA8b&Kw-$^F=sOXhx32cE-&$LE)E zu|qbOy+qnV#x&V}lPCJepTf{zUb;ccqI6c_E*~g?vnTS}-?IR>82Se!;MGW$kwXKS zI^=ItdK*rfso;D{x;qk{Hsrr9oK0ZE)s>;|n|HUQ;(x0ME|gwW^`~HrC=`|DQyo-{{E4mT*fSNwhAYx4K887F>d2sX6mO1OrRQEL z)^QS4wH6TnD_Bqwl9j-2G4frePVWY8JF?GBa zQd_875R)RL=8i*E>NOnhf6WL0NL0(L!aL6-;uo2BRK|jQe$m36FIt4B-BIaQEENF) zQ3M}*`JaylxAl>j`}m$b(fMR6%=k;raZi}fw+5CV5}O6P7=e{Lz#|eE8iqW?GDPZyIu1bX%?+Z zJ;CryV2xg#oOdUZQJFCX>mW@@z)2g?s|3}4Ws0uK%?TDmF!$b_zg8R(WAu9MH^oC& zm|8$wFNg#lMz|@4QgwAu0(t>oVi_C%~}s;ypS?eZ-G_%`&*$1@V^OuuP&j98cDCRa*42vUi->Vf;tYUI(>mI z0}=l#_f?(EaeawAS$;Cyf-3R7PUShrT$dksmzP{yD52n)taLeU7heKcwlJDTtp`Q? zabU{D|1Lr#Njn>2V01Gxe?IJL9xXnX@bB_0FCk_#HsY1xk8R(C-DC!kVd=;?_ar8ZCi*J!>HjjZPRHB`+D?Vwm!+o#uSTGSuDGk1Y7a$b$1HYFb931 zOx)`1@Cn$6y(>JYAORdu3P6cl`il{P2^bOo&k`moxa}JKGqmebh&HJvZ)Nd2A#{V; zyzXnPCfKW|G0dStFbO{p_(3=9`wj|*};I%n+FQL{|R@Rd7 z^_BvK)YxkivaMwB-;JDHDult4bs#r&DQP-IQ=f<3r>W?Z^HLOFy0>mXkd%%@BEsS`@LV?sT?9cmG4|{lfgLqK@oSBM#s&% zKkGTA9h$#7jTB19OS3nKx|H>LLB*t7paIwx=Izp-t_oVv)ak0d5G=@Y=BlL9lTS;? zAsqY*853otuWSHb)9X%D&y)oZY2%j^41^xN`v~zzJIqx>9LRj!7?|czL5wcU)-XqI zH(`&NXm{lqc|4bOI&}(3G){4ToK4r8p~&Q(1Rjs4j7iDY zPHV}0W+OkoU#;OAN7Moz$0T-{Lbuu%yK;++V1z+btbw2TRL#-aA0Ec8N)+B6rsV+ z3@Zd1&)oan1QXh+&P10+-ZeU*tg!T{G&SPGx0e%f+s&O!xznQOQD{SmA`mOFy94>n z$*@~Y9%%gV zIw==;VAb}cm8ayhm)x}}77n=L4iGz;31841Xs=(&79a5!hLbj4_$U-mg3&!>yzpeR zFFau2_?FYzj~jno_ZcjsaX=DRw=nqJ#rz1oB&Mlo;MKO(tt{gRcGPRg;@Pq0Y(pW7 z!s*rgFFbdGpT`Y*8WWht{^Lt zj`l+7^A0X*+UL3QKO>7cRS02g#_mgcb*Wk!A!}my%*k2t-P^75_TdXl>&@+OoU)*G z1265yXUVWwH%QbwSTfNri2`Mis%!gLCrtD610wN9@?Xi+^DVAie54sRLu2#3E4Qa` znZdte1Fr2ZKfe~Ob3;7ieItVW#dlTUO1DgM+mnu((^Ah?>0yK{K;+?f!zy`ouG>Xd zbFE8P$t9UT%uW0lFfIvs2SB{A5=UR7|^s7tZj402Vp)hhK}blBG&=7Fzmx)=jw%~$_VB*tz>Rm z`A&)Vu|QHQk56)v`j%uk18>G2`{tJgIw{_oZCeDLvT)A8D|UDUkDSPn>i7?xqiwu6 z^jq_F*d&tA0$|E>-h&aYuwpZAgkc@uw|&r_`CV>Y4R9cL?~%22ZTREIEd?n3r4>Dn0+y|ghzWHbR($e|zuhQ`t)PxSdOnLO^l;=ze7kQ$6J%Uw zN1DLq`WFv_ah$>O59!PDdJ(ivHlQGG*2?AMcVSY^2bNVUUYicbLdSa_S;f?j50fvXkC@M@velSe*mQ18 z_a6v5+uct{!YeU9ALmFd;PZ<`M%t?ub0|jbxHAvoS82_azrv+Ur3I6J9#qu;eS$yA ztDVj1tF~re`x%JA@)Ja0+pouTX6kMyk^FPJiFo|)O1Se^;-poKQHQf)TXC@@?kKZE z`B_JjRn!puVlE_q1}vsw+s#Q*N}FmYV)&oeB#5$2)XC*NQIvMCss%sBq-gzNL6EW*p2*VZoj)vUpfw_mfpJXKJ&3JB1O_%lPI#Zg`G}zM!PyDmxJfH zlPbzMBeK57lfdg_UliMMY-{c@C(q*TZa|Jaa~qBv0qz}}6NUBSuRLS5ylHF_;{eJC zQcvYBVzmr!f`4sl3XlI_a&yFNL`092sk8Y0E`y3?Hzn=o{FM}_ID6-P$#?mC%U&mTo}Xpocj=xSz4@WZd~g< z+YgfVejtmNz%cA-VZT}z@*R?}yLBpPkq5`&yrYfh2e6JpwlQetOIQ@fTNEP(VvQUH zQ4*tR{bh^?0N%+-u6#Zyr}|nqrD*K)(n&9gvgOCEjU?z?z9DQGalsZj+zyljO|;U< zFErPT3nikUl{Oz~k2f z8k$)-oHa{3mh|1Piq}S`F;of;xW>B%WCSE9eJChjS={@hAb21JFlr1%t0#YQ(eMFR zPs;oX#dZ?FzVURN>|Gm?>PaMVui)v7;9-mGZf7ec=vPoeS+p#>``CNJJ=WYqEo!FO z1WgDc197eAZfo}H{NMs8ImgUkc1BG7zrN|Xu!%W7T!8x-s1ptj+#qQG_pb;8_}0HL z%Hk^Znxo6EU6hT62_r_8MVRrs4l=doCQZkF6d8VB3j%>|d}%VetpG#H8NR|3G}{32 zFbW(?VMS4<#PpJ@toZA#+v7UAfVVsqW^4-YQ~%P=HpjwwrI4doV;3DecZ1PPj8se1UTEoFv`U%zF;8YqNOgxp zSMUWe5zasW@*hhsUP2?m7KuM@ke)1SO5P}=6^hM2#-wiY&Kvt#iL@Dcz9xTVZP}pd z&n61hvRAhs(yqQ7PwO2*c3OE5wy8zuoGj_5KN()VAn6q9I#uB`1lgv~7DuHQBlwI+ zb?}fI6og%ln=LAkci{%vNyMCjTnrt7@KRY;nuI`jU>A9*P&0y|3{l&T-RK-FtkRAF zM?xS**wIE5R^mf$kwQ58EOKsPAAnYKvgdA}(RPZ8_qn!WpZoZ?qIuiR$2?D!mT;Qa z7kNlqgdZ?DK7wrdzlXrU@(GFyJoz2vHeD#jUem)Y8U zQr#bC{Qh&*zVpIU++?w%Y6ll9?V-V!{idZf9^HO#Y6*n4F#WJJjK;}8B zL!VG$Tu*M$x3#!y|Bs~c7^ULxS9>fzdvr9P1;`6@(G~h*pf{gc&GFT|q~`X|90)!I zQi8DJgGiW1R>OMMx{3v>&9C)^)O6pc_;Z2!VZ?G1pv2d|AbkJ=dWBy_V&jWj5<0YF zD_u^Ia5CP|VvrIDX`0Q4M7|Q9S&@FRX>HL_qFRt)ExDvpe0WL2pWYUvIFogyDX-rP zV380lQpKEj8a%1^V&q|)LRE}?Y!cj&-rL*E9+|yVI+7G`*K3t0YY?j}^I2O#u3u}< z+^7qS`=;TCR*ONY4|p@)K42#u-!Sq%j)8)T7@U2M`G7XuEn!T<%&CuN58=9CBssK_ z2hc%RjTgeH%>A%csZdSj~`nl*Ttu)~quhkuwY+}3) z63w-~*>{I$l50v>F}c@wG8W<1deB1n4QN4)Sp&{(&NE4_I0y-F7yCpVp& zEG;)+j(F0Y3K3m!QxXbPokX-gtjv*Dm~ucq*79&tgmW-$$@b8jZi(655;zZ&HF2T< z3sFbf?$J0z71p>5#Ne}$myTuAbeu|JsaCnF?j~VdOo1rHc}nv$Un9peaPw~1rbuExY^6bXQ_apM z_6Dar+#C(_Oa+A=(hx2;SMftL9MYSwxY8v!TBIBeIlC4af1#cymKxNsYsJt5x(RDf z*e)vr3&1Q#R+I^%X(??9FB?a-KBjt`q-oy%rP!N36v(CNJ;ztnEn6S{m$qaSDX}6v zl*p&uy%TOGZA3{6?B&G^ga$4rKUFrZGwV;%^$4cZ5R?Hn)JPG%RQEBOy&|*S-b1dPk99# z;GxtOLt>fldof}=>}uxw);KH z4Z;RxGo(wUb0SlRaoc?m1L==JiIqVB#X~B_utEo@7+>jsP0a;iX_=jKADgu2Dd$i| zWo@QPD&Rgw69i?cp{d_8pJ*4%>LKRci9T$a&M5K07pI!!m_Jf%vavrJBZ?1Z;GQH6 z=yE;lszn1F#ZQ>m`7%Gi{WvHE_v36lAcypuhOsIm2dl*pv>oEs1eb;J3os5FFZMJS z=<`lTc#?YCx}}$)(qhy3UIjM_GUvV6lbse1$piq9pA^wnjZh3>cEGf!VtZWV#q)r!^jQP1{{)Vo{@@DVcDA>z5zxJ+f z+xALbDPt^hE2PFw&&>L$8DkVZp2~G49>(p~fzzDb_{bX3P8NTg8j;HdF%65I3d8W* zPkjmTC^|Ot%u;)*Y^AwqXT^mydgTUs0NSmn`!f4I&)BuiZL1ZclTfyRP^~ zXJXLiH>JNB0{HBwFH=qMfx$%HLf(V&z&kLzf_Tp)7ef!qhy=Z7uu^;<>43$+IIX(V2e|NYU0CkM%YfF90$sz zOupXljsp5*pjgJ>v$ddBwadO`?fu=dE?O?1wa70FBN?s))gaG6P&qxJ2C^aLuQg;8 zfK-q9?tdRdQv?eDjSp@~>YSt9wq4naQ3>>fN5aAD;9Y)(HL$zJB`Kj)t~yY||5=Sd zkYNE}VXirRYG&?UAEo-hpvx!-JEuuG-3jQI1Dfo*n??R_+xdKrAPr=o(KBJ=90)-c z5hO{bps7h`iGR-`&R@Q_=tmiO+CKYXFsAVrCW8OfaMH`)EsvzO^C-VzQqB0BiIdc} zG@iOjo!JH<8o1U^V-FgGt8f>5JWlkcs;bA6;Ki;Hs)jtoUi=fa7VrL{CiqC4946f} z8DLxf5WxVuHl0NN%{xFM1wU*-i+_<>|L^n49?SuVIUB$7B)X0tR!J2#DqdlH^m=?d zt6(%Vr(+KjwRnO{Vj_-_Jsrt^&Bz#K*4GI5COD=6MF323v^M8u(E?xof-tl637P_# z7;@DA0(JoWAYuT8177`m=KX(&IWMx4pH{7$Jn=jM-(krZV>Mv3Z}rK)xm)+JOyYt? zXO8UDplx$6lKQWY&~s&^5s%1z0IO_<`lU?;j6M_mVDlVY_=dE9Pkuqc6lxk?K;xcE zYwqsumD3QNITjWpd??KU5oexJRc_vmpPOky0Tt&^52!?H#sMnI%Jy$ZXB@CDPnWAo@&Vj4HNLa5{uCt7XqT&Opap{aSN@#@@NZK{ z^{R4KFPzrA!NU9zHy#6}RtfY{yJ$_B8Yg2R{Dwk8B_4 zzW1MY>d#h;453@lN<;U`pz1;&aGH0yj$`-{-+$J6ap*=GShi&M7w70V@g`ms*c%OI-Xc-A#gzW zK#jOR(lRLP1?rssu`dD~EE(^~p#Q?&ER5*CtqE82Eg-`zjXee94F14M}q!qtOS?9|H#>f^_y zSk_Ok;1%mTMEh2;L(}-*F^dt2F~PmY^?i-_?B~_^td;-gRdG1z!cr%b{@<59GnH`U zHMAxKS)1`e%abR^1&@NLLB;Xl=}{Ao@p|Nau1ez%>KEgu4OEeiT%jPq&)c* z4K+q7e!$D@D(0Xi3?TebaUYFl07XOUr+MQMjOnWqpx&M)3-{_eAuYC`|gM4i|y30zR^!^ zO9815jY)(c4r`Cp89Np~7N)r68rHmdbKHyG0Oz#SLWIhuYg)@_mZJN83Ais%tB&%Y zoWuWvZi(-5bmWRI$o3xVEIuGc4bFqiXCeb+QTp zRMyU(-bb{oPit5G?hZ37cqDaVpP4C*+|48)m_p6p82sO(=VIeQgI2s*hq&H0O%DM^ z^*ai2doD2akK{0lXrb{bn!&h7Tnxl!Pc_3ZYmD`Jta2j_++1YCsA+8e-8wz2hp{i3 z(}#sD8_&79rEZ+AYx}CGzZ1sPjESHET^dXqdxr*7jBt8SOS{22>TT7MXc$GefpGof z8;tV4CNq_jZ&%5=eJYQ2Zb$c2ed*s`8eQ$=v?D8t;=);|7Z;Okg;mJsNkbVsczwDE z2#Znr$v=&?C%S-sk6^D_>;XT|m_9e4tdZ3GGvy%Tvws}q>0E@NMA;*Wicv537rQH# z>R7`KA_DIMp@sK|L3IP?-!QK+t%66;$7)qOdg(YNkPy(f-c*N^ zw^f@=enToGW80nggZ%(BW^kfgZ@s`HIf8Efh#fXaZG4(*D#c-?4#L2t>AZu|{Fp#< zn{*MSeCgWcGUTXm|ND$DgV@GR5jyf?d__T%*03tj!5esmA?r`L-y}HA*7j90@V{kE z4!eH_WlHu)&0;jvb(AfL^KO*wnTXNcmp)IAR&kHSRM8atQM!`*GKi z8;RO7pfA)s@-RpuIV@A!8rC(;zei~77650QJ04Bs{Fg4y-Y@sxTQYis@6Cs%M!r3J z`uAYu#ISzYYwP!N_pu}dz{=))Ho1%_w7DAwP=fP;_4$7Li=1j6{ql37G(J-0y%Oe} z0dihQRNJM(tbySoG5-*^Z29@K1>BY!LX3$6ZQd#%!}8Toii)R0NNTl534l3HoEiq<+4Nkdu<#5DV4HLiN3#;ET5L7217f#8eNK~_Z4Jmc3stQ3(VL{_DAj&%6kP$*i z{7LiWp!fW#Au;snOKSFi9Y?7E*lL3o7iw6aw)M)4N73_SjMrL3thP0|@AZ@;?w?1G zc;Me2VSO{8Wpa2ty{0Qv@cdifHHeq72FSXe=9FtAJ(P(Aeiyk7Qd-)h+IAtJ^4>;0 ze(r-mQ1~-&3d=2raCR^rz7^&PT{fh@s%Qh@ZA~UFxBHjCKnBi{qlxjRE)9*6NoZJz zj-zT!{ibTX@=Uw<`go~@7##vZ10YIABuq5g{{0obwFhzd7G_0Ek7{i9o^0p6-uTR~ z7bsk(7e*gpZ@-ssYSraMw>8czOGwa>kAF(jNxCVY#+uk0~l$xD9Uu-329u9#Ok zo1p<`LtN6GMF7--XTaZI7ljMS!{0j#po;Bg3Hq3BUF}Y!owQ=JaQRjgT7I!#=`ZJ= z#b7~0@D}r+E7}e)DqiUi=OnSoUz7oil zi6FU0LL3cwT)6`{m_X*iF3wrMkHhn;!zD@H>F0~~pLisa2A@i3M3>U**18T@;!DpG zX78NtGfqudvIKZ@mftUTE3YLvBQBLYoyUL!8r_<7THB7FU_kx(>A<>ch|O!5;eigz zXjBNj`rd_!VbU1ujgMkn!)z)*dv-c$0nS9jq-(qH2nRnM`g8ooWR>M!mC5BX9{Jip zoYroiQvTuY@p~ul9sq=hGdKNp=r;13yu?gyN}}4HLkLKPO7KhT5X)O|GB3-oP?vXOti+_AF zEMYOoL!C6FU>OV1Q#^AkliFXsGk+yA!x^d)@Jhm!hYm->GHszc_C z#VhtzI?gw_M#2$S_{~JG9~NPl?(J(5g$u$nU1gcv(@#KVM!}-l@|2(9RQuEAk3Vn` z!_5?9lS$$Ec&te}Cc~dc@);`?w{gwAC~&a+^n)o~ge>wfhPQ!EUj9M)&7(Ah{6=h* z>gF%Q9Von9$sl5QD@*p*8+Z3`2+--dx%ha0u5m{J*OZyRc4osB>a)T92@VqgmH=rK z$c{Cjf(sA&#ghH@botu;rmws-n&RZKMEuZg@{8AyjD+g^r$KFx&~8m*rz@?0K$OMV z`DkZbOd6su4nI6_-!NyhkJT&W9k5p9MSB-+xQdy$@_!y|DW*ToabOL;&+YVm#yI|~ zf9}fzpXD;LP}jRo^ryOcr>W;#Cq+`H`oA)>KZ$63GByDcHO?X~!RTNO?e)DmW^54J zC%kB>zcUt^B*GH=SUF<$nx^m*oe>PFQ6viE&(z}fuTNji^^{D17D+zdQX3!xE-$Lt zgh)W@7$_7GiZfRqB$`}BMj9JXl(B1jfTzlI>V7+jY*L@ll_va+7l(I}T#MC76&7Qji%8T%?zlPnV(5wliNYhy|hbYW`R<eX|_1PS=d@-uq>$TONI+d@HohEkB+E#?VL4Zy@3K za?VAsipXCma?Q0r98MUbL3;i6aY;7wK>xDB$r?-wN_Iyz5Bsy~u_D75=1Q&*jZr=| zppmGrbtM-#88^;XP7UMJ0!VNkcvmVMx@{HC={Q%}0Hs#vNrq+|0zbrT+ES2YuO(oZ zcnsNmlPGH86_p6of#&@SizJU8+EA#ES8K~*lWbn<%VwBnJXZc;+7 z{6?C+y$|VJ-|$n_8T>BQ1hVO}oPMGmlq3UB-Nt8HcY>Ecv)iXlRt2U&{7r3Uk_?3c zW%8blWO!lk-89JFPome{1^>UFgv$fI4S((9dy@P-nB%9O|HB=bMgo?=EHk- z45)!XC-qT;G1z52wl}>UZA!}%C3Mb?;6kjgQ0lRb&lsytgO-?;ARc`>W>YGet{Kd? z`$=(#kYeK)g*qn%F>Ou!QvwaIy9+1mDI;S%N&D@!(D*FF)q$FHMN%RY5_H40Kz{Vk zhA%C2`Jwr$eQR^Xtbsj{#V;1l*-#8J<0&Dsm3vrmK4d^j!(tplh42ah{ZCW$YnSFX z1>vouMv59N@h##e&2-+!PAJ)@kgrr)b5@V5g3tl}QO|1CAn#=f6O_&@4ZO)Ni3Aa$fj3&2x{3#7SPdXqa;f{*m&sgOy~K zP3d-}yY_L(v$^CJ%r*Y;BZ{;s8tKvoAvkkK#AN@9mi%Y+DHG?LH%TNoJ*)4^L&TPY z-Qh_wmM6XOo4j!MPRe0}(tQ%!_Bqb+q|4BBb&6Ia7LaOpfbV2quHlr!;^#*Q2P$7t z7rPAkVndM#dY4!+Aa*7U=IA?(VI6EhID`PNS1yQ_2?RkKEE zFZUYl(9e-jG>63xfVkS6%e)gbw06)c{eW?>FctKQXg3^7%5(elLTdYTDRk=~dAaFf z^CV0v9NZ*W%{f{6=dp5BzJ9 zS5VIRt>Y&B6&;9mL|o!hNCCrYzP`ujPj!j6>su3t9gj}$h|%C60z62Q_p&{cThc8) z`C^9IvTm1*uqu|N92CJodtUz4u-sjJu^S0Umb4{%_*e@3Z1(x_3tT4bNn!lP*--os zUnAJJkj%g7Cb}OYa;Q(l5Ji>eQjdh7KOLu(4Smx78sjnmVB$UwQ_MG^e z76^w&29PZ{Kp!E@2EbVTbGb*O7DC8@f1M|Wf`~p6A7}eC#f59BiEW}#Ow@277cL7e zfr@i=k3r*^Nt`3!p2(zGDPvkYZ!fqkNw z#U(xIKG{Frfrt>TWSt_`brCa|W2a9zD0+FiB#~rnV!;(2j$MDkP9&%D_gP;MBO zPV}D$uSt;qhy%WLXs*qyL|0(n{b+*vDJ!`1x~QspXp<{4wObdzJ2}dakUi3XEwC-IPyNv;rzODyVzXGT| zjZl7+HWb~4>V&sG9;RohSFS`&)LqL9NE!B?Y?ifs%9f}~5XtR;7|^LHS`AAQlSD4y zRva}h^`l~zK(TeXq>~KMc2wnjjZ!gU?PWE_f`sC|rT7*LWT1MM78(~*zNObk3Eeo%A_M56s$04ehQ zzNvWyl3ycqb~b&=c}ghpeX}`|IAw2@0ik4-{;s%BPyxwU9L4kg#(}zn=KTPo)#j9_BvjF(@;g-onT zzS@J8P>LmKV*4>=yPy*9Ih%s*klJX*G2`cUOoP7ZJ;!Hal#S~}V;(x&o%?w11Mp>N zi!E{?UNsZ_`7liZBccoTI|{J`?&kn<;vyzu0s`976-L}JW$|!|8~#+Nz%_$>L@pfU z^}r9CpelPfnH^a9e-wK7%dPs8*UL;zjxy(Z&PfrHN4lAOIes#O%fQ_W!;~}um}qhG zC+^DEs-CRU0yG;8;&$JD9N%8BSst+relA>yBDD5fVhCu@-hrw1*O<2Ovg)4eQ&bP ztq7;%6G2RCrPrVsz~fipPGlev1T8@c`A3BY&cARgRAKoaNPlm-8VEgkqN5VjTn=@1$s-hOi&H!ry~Mi6 z#kz)w3&LyO+eBk3sq`@wmG-2maDv#fa${abrvW#lH7;zbR|5Zf$B0o4pwq)9AR~)#Gbh1KMt%QpNLE1SEq7ArDg; zGWc2aOwyOD*)Owe(pAaa-6??`xY&s0_@@@u*{b088P##jV6LqevXWNJSaP}4@^!@q z4jOpdpE&PIV%K2uow4-DgKO1On%tYFHCK?ZWvodH)|lXL+T6wZg8n+U9T`)&NZ!d* zfBw?VH=4JD3}k;@5G-ESrM0z1Mr%a0-B}6?(NP8mi4#7R;zl4j^v1YXOMg6XM$$MU zOwsCA=3691axeduWKFJWziCUyn~2h5^VYixqc`s#i^WNCGXNuwu`q!jkzA4`n? z7Xy*sw?~7hjo;*S#&~5_=kV)7)0t6f-F#XvLD>Yvo_&)`k?GKI0ZQ53>8sQ={p)c_=j zCU!eoJ}cm|ef=AYZhbve!Z3XA&mv}g+2gwL8XYi9__d*9wo>c9(u9_J!2h)VvEUuK84&DUL?h(!Ha^pxfOTw>{SUnwnHVQ z;pI0>3bt=xVoBRl)p6$jLn$o*4~Mnztv3BN9GP4BmWtyaTFqsm{` z#~mc0_hW>p@e#&v7<{hd=XWdB0AJHFFw(*qr?G?`xAW>6dxW^E>haw@|HOKV)YXRX%;U-xm-;rY9VY-vP7p z4yJm$-0eK~>iVX7JE8H@nCe%*gTzSgiC2A$lQ8bGn83Yge<&iwZr&!@Mt|fgD5a8f zY2QY$Qq2-#nYwa$RYs+q*bYmLNZ==dt(_yoeZ?wx$Tg~0EUfFeAG)QaMI9p|;W`cs{O5)->*=ktXccW`=q{}rZn9|Ed%$QPDE zt3CmzG(evj8F;WbYF=CaJb3rOXE9rAJ8I5@x`T?1qrJ`bRZ;CYnu_Wbe)$u)3Ew2&uOu}Ap42rxYo)uqE26&-8vN9K@$(XOqe%{V z9?*_jrg|!^8Xr9%YIJGllvf&|lF;(pB7dEft4gHpkS&jRY1CC^r=ZkTRbaBbhMiNK zpdDkG-YUBDpvS!_6FoN77Sa#s_B&&zeLBltE8`Q~1N=ujDjHlE3z(ustT6)REN!NhG6^>2< zKcq^)4gL2-nlQy$ z{`#&A2&8IWhjLoC4kA@SD<``C(PsuTE?Io68>a z!}Wm(zg|)4v>e#!#We=Z2aX^XHl-Zj&gJDmqLpohE;J>cncoW| zE5STs9SJMWb@RwukDwEek-|YqF~djEy+L_Xr$fxI?IG~GyoDYFh3mM0HJsfRtR$k9 z>Ph^{8iC&WV{cRmP(SNEB_YwjQ$yRFM1{OK(ljZ|wK(L#m^i`-GjE~) z`Srf!kmuVB$?a!F^j=Z8E>zR}I%EXQVU#{ykNw*hzwJC=UdD^H<8VE9qWRh9&ER?U zB|i5?yi(rMRwzEW*K^;Q+FH}6QP_uYF}*wzmwV_tI|559MK5pz(V332eX<&{vgnTF zrl%S|bT?*&#-DjEqb9buiD2i=KVIZd{xT`M$>-TxKco47eLf$yc!um2h?AvwFabE8s-djjV#eobf z49+Vt!=c66M=G=rMFQL3=4#2@2FoMNdmqld`$jB|yL;)$w{%)5+&oz1ZdIpXXzmeo zwlqLfkVS#c>IO_MtmeqtACIc{j{KT6&Zmb@Gn&23}-^eG&aD zbWfby$E_MRRz2a!Zokl=y@&B>Qe@Kc^irC5`!gvoJeE=tlUk%{*oWKla+`oKFZc1% zG8b3=`o4%L6C_||{#cEo49;!V9tLB=5`E0bKU&y@Mps*Ae-wY#-Ag7uPfDNF1(y^h zc5?2ggHO(dm8Ad`XLzMi-pQ40E^^a5sYTz*v&ieuw*9Z~UhFvppe_CspMnc3@&p(a%HA6NTHD9)wRdM zQFNSmwlhiMdBq9rjLfb?u+b!sn25UMQelwW81h%eKot!Kfl@|fP$^N}zI>HNntcJD>|NztVpY^A>OwMD3t;Dzx_xseQfonJzgR}xo!M#A)lO3Ohf zSZe0;y$3u3NxlghX;!&gvCg^6wo!oY2RqQRN;IGL)h}X*=+%-)(BeS^P_7=ep2FJX znFujtyruqGg}Sai5qV%Q0jsxQgDZ{mUBnx`SKjA~8<~7yUU(OJ|FJZ!s8Yx1-ds|38E@#7j5z|KTO`Ah`Iw!|Sg{|R$DyTV< zWL*x98(488cvt&WLO49GU%{#A)1rYP2BbMIgU#564qJ#EJm28JqOwQ+HPSO?_7ZP= zU7JW3Y_0=trfa&kj>&%2r4+MGzE|1l7Em$>{dR4%lVbYyL!Ok>v?VA6+2zx;%E809 zbP-GJHrkBR=)My`U0T zZ+p2!xUlJbIb1fzN%)3?-~95&Zxh9QmVU2jJT7MdrcO&3dOFx3DPDZRTAZvh;GXeT z#C}idX`Ba0$NXdNhS<5+RB!xjmAVT#8|)Gu=M*cMBC_PUf)OfJ#XMJwCXrM>e)2jw zWa$Z`dqTm$BJ-DE0badmk}s=D6VtsFB~64uhD4ZWKV_oaFRmX)NhqptQ6HW{%1%6L z1_xg5`C@C{eT74m^L6aYeDhesXP6=$L2G4B(Qu_bi&NBNuS$9Kmp z&f9c7-nEL5oZX!!JfgQ=2YXW$)ZV|C_1hFx_ulTv{S{4?WmMEA3*z_6Wo4XxaFZ$lA91PghU!TzY^mT(NWt z!g5qxz0Ix2l$R6R0%8%!lS@{w0=5}fz(XSzU(1L~1laME48y$h5IW_+5h4oA>es+f zt2=#91?}gp=~Xep->HFFeTnAS8p%U3l>7~gFhuaig*=4h#Rc7#OO|RJ_wq82wBgRA zJDA>_Y&(qdQ&A-vNyYr@Ia2NG`bOH1)Cgnnt<9elx1FK*&-_0n{YX1=Cc`Php~>xlTB5L1q4tco75x@NJdBLZO` z45>^0dR`zI%xV1{v)`Nyund{Mf6NJ3BUN_RQ^LCJoQeJ zjN^g9E`^WQpCF2IxGN+eyMhclF3D*aUWA=Gu6jK{Xb6wOT;%z`bee#NPJ%fn!d%-+R zW0@<$flXk-+zah}o5O}J+PdhLG8I}~NsQd475IwqHIAVJlNJHe`%oBgXmk`Oh&qm{ zEcCn7rWT{7?tO8sOXv(MOh)>CCTBxvk&lRVu1>ZLxsEFloH8 zeBV_^%e|M(xyRTtJ)7D;^dYdG0o61L#z#PwYUkM<M^;_7E!G*W#0I7l)jN0wIRX zE0-dZHleknDcx3YA^E$JpS*Y7G$#uj3V~K>nvVUcm&2pXy&X6EIH%Rl!&1IiIvUGY z5#`c9KJrWCnQ#8Kl7Z8rT6VYE?|Sa|5&aA$c4kf}@P>tCESbn-eg3{;aQ1cknXz-v z_vyLNs%mdOj=WW>_KLYL{B39DQ36NR_AXY-p zVD+YQYyyo^ST7IQnhM%cj`wOg&}PXOVguagaecN{+Bs(GnB3Ej%`Be2-J&l$dZ%}* z#XdUGpeGE@D-eG8n1%$N-68&&vD1UesL4X_hmWoVFES#!u(UZZXTpbdPKMC~f?M1d zV5e@6$?9E&&Qsl#*U~66LTDb%(wuK4&~aVn9S^0)f0&_e-z)mkm7qUC!>aEv_53u? zRqwXKf|LR`J^tuH?zKlz+;wuxnE1&OeG0ST-YQjRO}MuVM_w=;_&ZbDT6xRc6NRWg zcJnvLt!~;+R~Q*z5Q*K{DRfkS!oSzs%DLN%F@qiRR)uT!kA}X@o_9&rjB^&;CThPs zfEGyG&%(Bda-$UC7txD~?Qb-;_U|>|_79p@H_hR|pS*?{y_Qt}!5^o%J{rwM>bTi6 zwK{g^(G|}uOkQr}L8Pf6M_%K}k}>RIjpvnIoPO^aV&Fz&VfvJnJLtrFuIVSv3*Yw2 zhnSip+~9zw+@)m-23y53!>JiLU>Qo`E^Zeg?@Ms}JcUSF-J}py=&<}=5;0q1XkL1x z$DHG0eL#G<_YqveN;cr&Nn&oPP2zBC4|_0J#;O3$Yv$BkEH4)r?F6sWdl0W%_ggFe z&g1!$yME{3CbS4eV%=Ta2fgB3)#sf*zFU7G!l{{_|0qR1Nts08?0!Ol^ zgD#-ZUYMebhdJD<CHSQpIq1Z;{PQ!2;hR#JvVp#JHZ#J|auk ziY~)98L`{vO`R##*yUW=PxxK>PHSxI=)(Rh_(P*i^WdJ~>0=c!GZvjY|(CsJ`J zsm+-8h!bePwZ!c-Z;!{dD$A`rMs(>~UwPt>!rR6bJywf#LHW#fe!*sSv;22!t%94? zr3@tQD(h17t0K{vhal$Y^@q?_w2}1a$sfru*MS+U^^=2M?e0+6=R>E@(pmndrF=2c zwfE!O`x>^O5G>S>;#8EHEh{8{>jLD3GkuSq-OTPHpT$NW4n-#EGzo7oYXY_DXfnl| z?ciV^`On~my$)>AD}Mi?xfyCx{^B5%!y5>y6t@G_iC)vQpI6F#bhmeQE~=MWpT+S% zIpOg(YZ~L%Ku|rf8|_t}csO^mob0)$$#PVAQLBH9pJK03UO86L1uj7(%kr0;sF+zf ziQ(L2UmYfEx8-whLjL>{uy1Gg!pq5A%(snm?JX@QwV%Ge%6#-Zb3MAH-tRK;%`}R= z2!7ep#~d~ajV1}qDG`J4qY;~4be8jmonMO6ucGh>d;Ijq0(;YxZLLo&2xEyQC8t!z z^8JnxL_&Bf1z$W2&)uPgr%#m@AVOl+P#lj3!|7Fl0TnB=UD*IZnz{-Ik^J$vLT@dZ z>2h|LHL|pJDZa`Xh`KZwa=szwoSj9RKNBGI_Fj`#zd1Bn-lpBTQ+L=+a5UpaEuNqI z8Q_a=cH}E;OcFhf9tMAU#Xf3#{xbIpM75gZW-s9hYfKcUD-j}e_H!tMe^;%G~Fw3@u3}3pFG49i9(EWexy>(ob-L^I?NJ>jDQjk_U1O%j|L6AmD zy1Tnul#=dl>28n`kZwdm8tH!L((O6lyWhQ^XP>{n^XFPWxYwLxj=07c*O)iQ7UwO} z;&Nd)E5?LHZR}?iN&A-Kre2MPvt`OKT^-SKj@nq&%Swye zpjoQZHLs^lpXJ&m0>y5gTGsfp{OG-d!N>U2k+w-O4tn|>VRu4P??szz>AOz{YV)Iq zkt3q%R8{2R8Jiz<57wDKhLIY{Z!|O6>C)S%xXN_8OL+i!m75tjb@xb!>ImlPA&QL2 z{*prTT5fnIb~UwZG1Hvf5z zhjfSJYWsSRbh*a$4R*ZkYHm66erAu+o$vmX031a$P9~AM-rzizPnZ{sv%ATRjg5eh zr{F4~@o0vZeoF6-;s^~VD;9^OYg|U|+7wdUZfH%Q)I(8yLDp~jOF{^q>AbvSz! z5)9+(%UDwGQdu!jC^!|(05Bfk)BuHv2_(LWuRy2*6}10$+|;h+rOEX5D+1{!$0!iL zs6E*5+ea!%MJ%$@&&+?eT92q5W#pV^s!VOkrF$LYWwJT?SLl4v%oa@3$F!ETa+~6$ zWVd)|CIL%_MIbX?4hMQ%;yQZ9FSWxvdWPDl%alq-IYCACF9ZOAw{~0cVX|b`)c!bMOSXG`JAmT1^YY1?0e8F z6jN~6S+~DIqd=Yz9_JV?2LfCK3}<`g!pNkxO>+6l11rJKOQ|t>k$n3{>O@R(1A$=@ zI!~ZwG=YCpMoSz(er5ptJNu&F*%?>cO|4On124Jx&P*+ilA}6#@lKWWwmL1Jv={c+qNiK3zky&D7KFe(NE1xl~QGUAHE-{HOMENEc zB_|KARDWt!=VeshV0d%?3{@iGdo7-sxBfUGQA0~Ue9#6|yBst>6Ih1=oI~gPjdc!+ zb7BF!{au0tvcuV4p0c|uTXxy~9COd{heJNQx}rMKf(@2Xo|f+zgPe>i8P5j22p&oD zdosrN*tC<2aUZe}W06%UPx`m+-Bz3k!j)6unO3*lr!d5D9XzqgFaxi#zEn{t7*gh@ zGSL5Y9L|A-{kj2Rdu`R-R;rB=h@k%=r*s*{bSJeQv>j>(VQpin33}=qV<75&Keaao!Ltw$+?jplMiigw=dj zq4-X8crwqPPUumwoxR0%!!(~!jqH}-N~fOW>0j>D_(bU)<4c`jZ$*Sw&}TTfz8IdK za^1!s5~4{)wEJlv_a07+YB^@n5E)t&C`JU4^;Zp(0msg&m)^h7Xy&~v3x?-cEY2~y zNEQpyWTuf_Qt1D%5Z>zN))zfzF4_L{3qAz@N14Py}lZq|@EjBFcqvZt7O>er`y}TVd1tVdN$w zn})sVHcEc>3PE#}l)x2PM+}R04%a92{!apf(;CT*>_Edn`NoSpn~uN&m(j@>kr4tc zJ(Dln`IhxmkF3KquDXss#bfeCw{gNo5?o)pDO`41y+A?7#$_lRIr|`hn%eY9cHS%+ zGd$#T>(b;@nXkeUQcO(6si;sJR~w(FG5-YJ2klMf3)}ggcTSrh9pD-q zpVRHral0Ha1mYfNfvZJGCn9FpNhTY)Do^~ud%{E<1!y()HjRj3Wp{Vq%S~pUwivBe zn(=x_w7(f>7n4o#rcy0tB@OiX&csP;QwbTvLzzw-t8uExI z(?p{wX8{`l;u&0JVIG!~9FNWyigel0wE~QgAuV@T80n>0w_#;j7a1ly!q3f*^>A88M{Z~JiioW<`OVvY1_w9mt?wHuWi>>crqNQ3 z$ohMfgLkU%g_dM)ggL5aj6W=obCk49ny>~Xtz^z zkh>Yw8Kf>fVw4|$kCMWN1;BXPz0-t|Tg>?tLxMyM1HLk<0NpMAa|XPu>y_i38%#$RtH zd|j^b+%}r%{Z>MONbEOKfFE`OU;^I4~oiy&8^ID|@wMl_muaj5DJ=+F@Rjr>eO&HoGHT2~-M&m2v;* zeX?n3&=vEK3#oL%5S3tp)K}&UcakFgL`Uyap~vgH+IT8dH|d_ZJnZU{T*JbvA_vcS zLlw@AQi*FTcuA+R>f0z4^aIv)Lqz9X4E2PqZ?>RW>*!#B_GT)yF8sW!A6 z){;V{-hJ4kT=Fq_>`#a0#)6;owlNFJe7(S{xl3pKN5XSak;X(gU<-mFISKyUdsZ(H zm+q&l-w=G625ByiQP4{`w6KI$TNa+JVFgGkz@%d@2sGO<%k2(|F5FfcS6*#ys~D!_ z-OUCq-+bM*zPY)`ZV@)p4D92Vs%&8*DHLuSUQ`Z(rL=)IBP!`Zv+DODDjg*{S&9DW zseK>V>d?x7St#LdQb&521uo6qOufn}&fNv)I!P|laj@vdH*eLf^K`O-xsXz9uQA8Klp?M?z{n+bGnuwKX z-wrVcn$$?bEo^8i zfbb%=E{a?EU0sO3l3ge+4VtXr$T?YIN`X;yYCjrpuGr5C5}5qqwvEG|)x#Dv z=Sk07&lD5voFqQCjNrY#`x-XJ!C19RCNw1Qd_;I|JWMwPfe5vVl-S?!o=j96HTh;L znc$hMaOZ_I0e5q4wYAr+dRLeK#!{fu2{plMg9DEmeh*^Eh{O1sP44n@xSC_#0<((G z&bS)k1`_w5Oi=4ahS9K(myl8tMSXLtZ3-+uJD@|w$xA>aTXEe5wYLh! zRV^}P8{z9vZu!b_yA`8b+tn4(^6ea(pN7cUb5uhazVJ}Vx}|f5;~_Jwltz>eZC;GU zAgjO@+szxI+_{LhGf!pvjzB8om=aPR2Nq?I8e&}U6?h0)6#n6pq*M}$O?B1z%$|FV zAzXKasb~c^hBMB0q!b?tM_mT_k%m)v7GHUmlKYU)9-4eV_3xvv=1$n-iByrLQ)LT` z;Q=OA@QFSXrxL2?tg@$u^G z176#4I;5u;Jo4(KpBQe^KBaSD+_>{C58At=i(-2ru&nQew^2>yRny?}#(Qfu?`rCW z&jSa|@%H0V{Mq5sJF6iNo+XghmH?AMW&ZSHb7^zB;;zK5Im>q>YvKWT2931let8UI|rtOb!?^( z3a)BPrU$Lc+1c+93N3|acoD5$NS?Yp0+q@V6tpS6WQj}t*(&!91|+$U2@b9H2^z4u zCf^i08+;2HEVI-~%k+JFw!3Ym(iZJN4OXlaxoBH}+T6Sepe<3B&#Ils z>p*(_e}_gTPA2Ue;`!zz-3G~(v`Vgy*tzZijl11C|d!T!eWb;#v-QKGlwMDl>j0G8TpMb{wdyjtxeAcv}1TyIvhV$T^WEVaYoE zeafxSeRGB%+Pb{TB3F-*c>G9bYI(}-3)AwOQ*%v`_We89$!evB6VFc6v;(>&@$)u) z{r&lWgu8@gx`y6INU0v8fxIJwbZ|Rc7I_D{$y-otSOotE8x|%`HQ5nKa^2|DK9}i@ zyMJdiSN-b7OY3Nlgmxj}I-rd5Xe-5rR+8&USTnQxckBY=j!BPON`2ida)j1U_73lI ztQiwIxUld3J=Vv#X^jdOZ?-Z6y&7p}&yd%OT?5AE_Ttc$1WtLH%$`ImiC@tLWqe<) zrggjeM8JDD{AtAI+qY?>NyUrfC56LFM19)@*S!j)A@hDhH=bb<9~cFFMFy``E~l6u zGw49ScG63SQ-24-D}Qh4W}K1;74n^=0@-pCF(Cy?868(4x$iq!fVIstmF7Q z7>lFXI$0KK*+44uxxNm zW&HdgF4i0>NnwuDzsyDT&P5d(AbWNKL~Yi_UnaZ05MIR&VY-nGgjGY z`Ss!SgyQ09>}oD1ompF4_g&a*tykFl^-=BQgo1K~mwrn&y0(JD$T8;j8S64k?3A7i z+mZ!3`Z%|A3jW)uBJ{R2hNwH5w9R~)BLsaG&vmnQ($X@v3R;Cx0(Ba+m=}01PiK&| zK+$jG4#Bn=Ml?bbJJ+ibq#)&CP3l+ZNJP2Hz9lj$6yKJyA@|?XndVRE&xH4Y7Q&k| zSZbXuZw1p#Cr3EGIq}N+h{~27$h9lkCC5C|9aW^%cW-??CKxj3%YX=}j0-m`{?jB- zlp&~UNmWpTC>oMmPwXP6H7>c}&d#oR?+Mv2QG#AUra2#h>M7Fn#N5n)cWP(=H1Ejy z@3 zu$Xh=Bf6!#%Kh%9qmY5>`BgG8%ucq=RUmMs!n5N(`?i3ZtDG2*555q{^DTrHwB&ka zjl8Lg?Joe|D+nY0wynI7b{IvJ| zxY}>}S6S34;Ui%D1E*S;5|ojUz$`5rAfn= zX@&t}0M9eB#|_fdLRIMePuVc9us+XBc*m5^=}V7l@Pb{W=-DYv+d?fo8dJ`#Ud6hw zXP~j#jZUI^SJY3YH-WYK9Fm-^*zul#RhMbic=J{46_Vdl)rVX`1x#B1#Tr@l@uge# z=R~zVQ_@o-u9pf(!>d+dxT0(rQtvaxDqc4^|IEo(cwWs|BMnGBZ@QZ`9L8IB!ft*J zJIie_t|XLa5JHa0nGXXV8h6?Uoi+;}$c0627!H2nO98})(~irW%+i3BCS)Q<3{O6c zd8CD4if23iJ*w%T+X?be7CUjcRy&ULlh&w64CL^~I!;!fC?3C~hvKXEk}Z*cN?j8} z9N^_YplMCKeQ00W-ZfPwOScoLH7v3;J}Meoc(yNk(V(K=!ivlOWKM|SIu|Y`EHv^( z#<3&dZ>*X)bDCpw(i-DvF=5ChWQr-ox2Au{%U2j#d%E=<Em)^#gRAQr(e>WQniU^SJc(yVBDY z67HBk#l&I*(n7)4h)!sS4mbSI+Iv4HAwbev30(8U)j$E9&4uP&MpJ)*7_c zFVf!hS(l~MX(n#EY;IiHTR>Gj%d~5!Lt@`U?^HCDq7Ps{uZAb*dL8#778IPvQgSaaZG^zYIAz+{tO3^hhUWcO zNqX@xI@9k+#T}MraV`oCjE7X;CLLZ`T%Ck@s}mSs+L`pLg;SZT%098_nk(G6?k`w@ z0j+e~i@FhAP?APxP{j&@5((V@#C_1(Yiln{e#DnA;oV$fs6`MP4#fom>P1lUu%lVl zN36|JS?8O5VJ_bTxn~4{qyjHWW_~in9Jd7I7a>@DC==;Q0bE!yg{f;4=NmF=xW&-} zlJkMHAxk0jQy{kX?g3_d0L3rkJ!~emP&zheY&IonpFUu}p>t5^6yOLw^$3y`H6Tz| z9(r~@L@&#u7<|u7BSVilh6P$X=Ro=c=s;h2d0A!n9qIVvwngPnjNy`Ezv}8G6VOxk zn@vEVpCc18Ig?^Od0;=`4lUivPu{>5)+~~2jJWzZU1bkoK1m_t6=VUO^e3UnL^3M5 zPI{Cf<${8g6_Gd`ted;A%!l@yv|9VHs9lS-SP8^3eTBF1WK9-_Q*!V`SrzzRZnM~B zIX|uNMX&g`kca@s@I@ADKsFGpIcnZ7_=)qMt@dVCsd_%JGF4?2)vYK}$Yo=UB_*Jl zZ5g>lqEbw}x9e!R>>2rqP9QSTF4~fyHx2v4X@&8Z%_*T4IgJ{^3ugRvu2*HIVR1>* zuGY+_+S~L6vs2ABO!sOh^BGvw)A>S`96F6KswoQ^T-?I0 z@*jYdhZ>ma31N6eB3{Cs>y-rdOX}z~b0@seCM!SY#^=9O6;{j&=|pE?w(A+$sFXI* zG6&|Gt$mm~k(Xl?<11?R)$5T`rN;w?+NUg82Nwb&<>v9@7NN#ezlbc<1&P7P1d|NL zLJmFYK@BP%2OKnBA$2{?j?KUeb2VF(GZ(-g81Kwq% zRyuph87tvW%+xo0y_d|z3OMQTOWA%(C{TKsgE5K*vzE}*t+HG)%(J(WAOCOxp1ud{ zzxJL!fA15D!%G!+hQ@eTzds=Z zbat!{7>r(Gm_mJ=8on46A{)1tvx(MZQ`1fA5G1C&tXU)sCNJ67_GNrv*3chp*94v zxNLL)H>GcLG-L?@!4~jFeZdEpuIQ844dXD3K1ZMKGmA{txTPgpRhHP6w6%?Fzjj7oE~?- zH`vuWB~<~)6*`;P%V?gVvk?M*fk+KoUta9PJs_fkA*%?^6r&zDW_zKt+$FX&n9oYX zj-a=?9Pgq2uLo=Xg(~~F2Sd1SCL`kT)RO(~V`z283Ltgnz_gRgzj~m0pPzq*N`CAo zjOkJXR?1PU&mv(^+SC+FbRe_ET)IhgbU{V_*jk{)%QR7=?(EIiE~MR8w-p8x zKbGQ`l0~$6r^9@@6OrDKjVrfN9b03dILpG{{@}^k>%rR3GQ>FoHY8j|2qQgzEMOh* ze#$00ex0zk6ZabSB&k$(TBpj%NL594!t&Y#puE1kqCM0%qOs})2r9tiU=zgi27-tg zNr!u126zl!868baA`mKBkN?7>_<+f)K;-Kc82q@hzFma}W%za91Sl)wBaWi62W-ZX9N7lyuypg~zc$ugPdTCXHgN zusgeYNmg}M%?t06`f=*cq5out;}J*sDd9zxOamNfX!p*=rD0Hlp@UudzZg3BmtF-7 zV~85wRJJ2FxXo!?uj*6-;cZB+N>`#XG?py_Kz7ml} z1*YJO9iH-rh_iY+tKsD2xck%<6Wx=WLKwff!5)P6dq`$bt=V^Qy1MLpNmgR~zCNL1Pfw zrY|%~vs+odkM{gL7^P?`8}H0fQ6?r9X5uyV>3nCGzb!V`!Z)!m=vpH|A6Z${(2;ie*&P!PTsl`9DaA3uIFKZ60l@gAwn5lA`%QMvvi+>>i(_8idO zhe2~%$kMkvI?v<4BQswf@mrNcIzELNxXqx1z?ho)@HvrP!TE&C+o(4-a;4wyPi1-3 z9xYpFL?Wl%QBh%PoHe6n%13R_Ct~&fA}oRa6qu&@I$FCRSpR_XX)D-fM|U@P>!U)a zV7Fer2^Lt#Q%%hsa-_fkPWZB+sce*LT1Yz16vgw0elqrD^b8@}po zMue72+)#qb(DmU4`&Es6m$OM0uFa%MG9E`h1}DMnm5-TXkn*~51vn)vjyQO<=etuQ z76}PI5OV7|h_yLyt9>JVhjf5e_$@){(k=j_4g>x)6&MLMfFgjG=1$vqmL=m7E#p0!QbbB-A@=4)F`TkQG71B$|9PdxmV>m>`P%5r=`WK_RRTb_Bi{W*DE6Dlba^jPPFl^ zeBbAFZD|iMjaU4^YRL;=kQt4~UWyc85^9d+?3)8v^(YG%(Xa8*f8~D5rN)7YdUM^C zt|nwTuf-IHUVLH%b?!obPg+Z1MY~7S<(SQ1@hGRu$-UacKVgfa8A!IjsFVg2o>P)GYmc?R(p;1mj{QV05fLp(R7R*Tt{? z;y7S;>DXIXn7d#ZoWoc;u>Tyeavm@hOQ z_6Lpa%aB0r1NFk=cHEEfh^@aTsG7Azx*eSh!EeE5=RaB%R}k)tTDJv<(@!|&mk4qO z)5!pZ&(OldVODhmG!X`wF*PMH(CC76mUp`j96WgO;H}%mmjM<-?N+)|wF#f}t?=zm zQevTNZ2rTU`RjyoYceqUGt%@UQ+Ei$uo=gQQKaYYxU*V{N(LzLxQCoOCx>3xp?ika zUk0S`x;2(>YrLERjp)}O{=$d+LgEeJk_13G5UtJaVrp11n~4K-rDy3Y$9 zF?jlD*d1sr!!v$6o}LrcbPvh_vxmyef8pi-JMRo^sMdYU3VVUa5qF8EIT3w8(_T#s3Ahrf*uj&oGYt|}`b$YW1gv5C5@{0W{Y)Y3q8u3q~#J9hO5v zcy7OQRZNWNkhGOA#vgt&#Af13Qst}HhWps6-uwP3kL=BRn+KLllfE8^Jh$bSn>6V6A~p}u*%1dZ|oR$qMn3YUz# zeo@FtbeAH8>fT;bzJ9LTX7^HtEE?H-VOAMQ-9?eE(y__2Z4>c_W1lto#cLx?u%Z^ zM}s5BE$T?9gmTV%Ug~BkeYKWK2~2qo*v~KCh>dsAI5m#eyVs>-br-K1?0@e=;JTDg z1Mp#MDj3TUWMV0VTM6}AOLv$A?-TuE zl_HV8op!_3>N~}dKt_#Kj^3O1L4?!4Tl~GC_}mC;AbZ2M*Y{! z^*@eA{x!ypZ;x;}9P0LrtN=Z=xqU*~a@Z!eNkH?5tEEwh2U)IVN0{ljDQ}K}Rj=M@ z4as=DQ7N<<_a>t$S@6&j+c<<_wf3OaXfvoM76SpP38LS`z99rP$)c ze?B2xK2_hI!cF1L89*-WdZs3TGt{#kOcA3z&Ew9DNnBMhx~`wNU03D)-HpYKcd4sh zn;*ESK172(UFeZJ%5S7zkT1h2P?8bXiLZdbTYF&IAASik^ciH?M^GXU-9Mj1D5Gm4zRlb@vTnYqIZ92i( zB0#<`F*P8VdI_x!@Cp7?7*QJaBhx6S`1xY~N2qx(_ZJTYNfx>}%TMc3%_$r)_Su+V z$q#V-GTwrLp@`yd*=zw|GxGTas8mHz@bvf9CU9UdRXQ%;F__5Jk6t{Ihps*L|gM2%n##`gZb2bPa zxslFCoB6}q`r|cTpN^Qegq@6Td=na9>4rTEP{q2re>pfr7@%FYA4zw)LO8QN5Zs)? zG&WC@2A=-?1Ls*k4cOvE_E7Urz@!x8H?b>oe+bY}T2;LYKRA>li6l1(Xp8}O^8bTl z`6pf!x~XE{VDk*&Rxmp_GYbolK6OS}=z%C~9%$7Kjh{7?2l5W4D~sMALBD~lJP<91~0+G0+)?*pMx#- z#|Oh0{`VK?p0P|=_XppBlvLQ7eSjDI1?%^J)^!@!O}3<798Sn_^Jq31vD8pQrm4Yk zd%Kez7^V$^ShV~!`BeMsInY)TYWipw7TDOcOlefs`(Vqg-eTC|n9IvkSO$9U2*KdE z(Y*-j8fclXQ&vHrs5SMC01uCwydodWngJEEQlr~Vun0wdL1=5Cfxg)~(^hP3BL}NM!Tcd40ag+71jVl(N}2mTa0~LX&y~#iPoM_`L!tE@ z+LwEVc)a+kY~Zi&Ph7g08nZOzSpXlr|-#9x0IIzPy1 z%rJq-fw{e?Ni{GZVueRO7VsWGG|9LCL1f^PFk!Rn!HYXgX*D1S)X;4ry<6`GcBV2j z#?1T(EE1D^=m=VlMg`Rii|a`%)Amlerf~c>Zbaf_4O}g5>&O*yr#$QKnjRN3s~V1H@$S&uu-_Fw;!5_xl~$$UOTGXQYI8J|zB zz&UZ)*#f*k!#Qf*0^^fs-=(kDjqq2OJsd+yNS=K@ycV9yGI>QZ+aO%|U?dUFAI_A< zc_v6GkX_}!i^Wpo`KimIyscjEe6p);;riVfW1O`%QN*y2L}SCEV4`2-!aY&QYM2i@ zCB2=GrxTtnkVtQ>(=3G9GBUF#bpaJ9*E? zBY`CDI1>+uO#MWMeTEWbUZhg4uY$biayoa%Fhd~0A~FuJ`i~#KgNMCtNP)_i7zPc} z^oB(6!D#P7#2eJj4IJCb3-A$eVQx>yA;xp2aL(Ee?nAZ_U=6O9jz% zva@Rm_rP>4i%vsr1jxKOkLRBSfk|2=^p&yd66i>6$$7mCI`Y9~0dYr;nFjJfD5=Jw z-4QV;3~yjEY8k~s-|D{d#8qKco2=B^;bdGFkx|IuHGHb;t&FC_Zc5!*4@0@}hD+V* zZQeoO2d32)qB0S+0*no9&-;clRvl4HybNNVpf#urqyEAB9dN-y-s~!Ipq>xaMi`(f zu+HqS6M-4z40JBT0kuV19#t{{nFqZVDFO|MLU?UiCg5dR!robt+2*||Vf zwG-O=rt8MM<>RuaAvg84Ox^>Y*BVWgJI&K+_&dzlTv{GjkgXzpFECeX_Co#tt$6=) zVgR+oU!5c{2W-~a_ZCbdk+5pfj^SGW7d=J^rLU>n%(?Kp@2=)a)wI-T=;ZVeES+|6Z$ex#6a_VhV9=sy8e-H zKm!Q06^QOR@e8W!SPh(y@^}FW_No4JMEfg<4}N%6+{^(w^!h770!PymHF*JwlfpfT z|AWNpFXi{o(eD4-(t$#-DrIP0?YG?jORKp92P- z?^ikjY%ZLv_@4K1EhZdO)@Op25)7E$(^w2h0H%iw5D>ZN@1SDuj(opaI>_6C}ym_V=riC#V ze+X`NucqHUbruZ)_#E}-22t>Mpp(&9oDgnrV72J94}}Lovy+$7oW1h@=Ggv4S2jIC zB;rkRcW9plQ=huxG zLLUzQOQ3&K;s})(cNTCZk^V~Xi2%aBvkWd@uVg~?WK&bvZ-2YF`^hsvV?B)k!E&Q36{@ z;5q4Zzfr(LSBwiTGx@J^vVU$m{;^P6JOzEfL)aRKRg^|Bj|G>s0}0DsgV39FL?LF$ zrbuwBQsAmoADq}DFb%7b^>3$PftvnHwuFx!76+<7$h_JT@;Lgds0STW;7EqaHjQR& z)S3M+=Te2N_mu2Os*Q3jW6HDgGeigpM9`t==6=3?0UU4}4-IrMf9KiREOEaa_&)|L z|6TcHA{sA7CNZ$Ih1vBQ%i;detG9xm&~otv`*u2%SG7cB1n7?m^S?U!s1)^&F%H{j zA$H_9-bgreR`)#I;=t`awqS6E2qVVd zkf6r+JEhp!h2|J|P?ZLk2>hOS`;(3Rzl;=87^g2{<4ld{uH_7Pn$BCK#xrZ~C$I)0 zE|28h_CL{H*jtP>&&u)R+@=_f6qSG6aJD}-fYr$K$Y^O9|2)}K)E3!Z$ z*Eummgr5jTWsC)WU`}+%(2<5hpc;w@Q1O(HsD2+END1@&L&9(XbQe~J`+Iv3O+O`T z6_OZ0l<=z*L6e%d?)Ylr8Obp!kfoI5KU4aF&KxOh-EuorOOS)y{M;$U@3f2w&$Sv{ zrWNS1Q*UPpqHU5!%dsUH&@Si~t{`k@_nLh-{o9}3NPEHlpvIxG>&PiwHoNOh;mDBD%SHDXeGWJ-^bs{XaXNTqa=zZwoFD>t`HB!Kczi@40pjPW$iJVX{+DLX zBLD}z+mcsVDdE{R>irW3!!dIDW(0cbn5-hE9Re_c_%^#dqccLS^n-k<%x4U6`*(ez z10g!Y9C}bpA-3WJUHn0Cdmy}`BsHrq5QBM$5V*&Zal9Yz%^P5J9H1@q*GBHJhg?+y z3%ZDbSTgfPm1A90_+l9}F1=y~9}#hLEPF#BEV`7Ja6E%@I>?a8u#VR}88)xySmzf` z=rj`p-5`-|k8(~^Nq?|)9(2Ih=5DkQ;?4kXO%b3&paytWR+G_V85F>mhvN9^hX1~< zvq$rn%l<2zvVD!|d}u(z)Cv)(nsiu>`H+>sWfb!Pf{q79TO)c|6qbbP1Xuef4bHXB zMov`g!L)C}h>nK?#%Tx2C)%7Qzx zoG`H25uWIG^mKLFyi{{6%~Z$Q>!KU_@@@UnuiWT!w!;1A&Yfu$6!TbT7daD+-wIkd z&e_L(ga>kUqMay)3zMTH7m{2v(N!fBI$=chf(fBrJ^+<$?*Rk9AKHbf55?t>zYm5N zRG@KP)>0Y;E?Fec1kC%jS>~2$%x?6f_i3AbRtUojyAF3^$qj2RBGQzMA%uI9*A<5^ z+3N?oKYNgx_DT$W(UTg-Ak%a<9(%Z>24OPykD$H8?7pr(%d65(+3OtptljhdNmu+> zpo^-A0wu<~`hcgv{Hx)4xrXio979TO7gqeMZ1nQ)eDL-)_Vnz7@u`#jtFkkWou(20 z)bF_zpg*>4vocH6xw$oI(=VZ5F2LkTo_4pH(!9Fu6Y;Ft<>ho=Zn|zt!!w&BZ7|W( zw)$1mIX^>ty1X>|(s6*nxzI^w7W3?>N!Rcqk25=oHho$M{NiktNB&!&dpV}eapusP z;qTzFV-Z`}uG$0jSCeGGX#Op2>=%TO!(x_MUhp#6+!Wy^wyxRF4UJ*=n%B+IyIK-c8;)wQARPB-?bd((J7Zq9xNh4U_H^k<|R#Ak=_5&I_-rv z(YES%KQEQIeMjxa>+3ASyj>b1Ny;Hb2$i|}YBo|?W`C18%RnnUr*2#$D}#zGn*UGf zef(#&YgFMe)wiEqxipoUQDm(=Ntw>4q6l3+u8XA+5^^<~#^ZY>+N0K-DMvLbcf*77 zcku)Legpt~KM6+jD?z!974+eB(EE)lTmBaKI;_Lpp5{ij@^d|wMs;r3LezC`z7gt8 zbA@e*6Mg)y5nt=r(o&m8SK2P6rd{bp%%t{Lxvl<$j$ zgNm6s@7mZr%xw1J@DT$dU<9)UkaT-(7Bg&gKEF*?tEL<_^?Z~=8%duO$V-GSV^U%o z2D+BYSUn$}0?hG0WXJvTdXpcq>n@b>6O*t-1NM{2y0Zgsm!`a~B}nevX%x;0ET-rb zy$Qdf5?8hQ>@jj#)}sWYUUhqz5twVa81MlqiLR^QZj;YP9C^*%-K> zIQ1GF$1i4SG1EU+%>N&ZmO|)LyEu8+rTDo!luQfD$00@O8SMNq5f=~7*iu%@a2Ia; zR^5gt8kxI!TfsIw1Ra$WG1yx zFil&ep?v95z`!9&zdvvFG$iQlgi%W}pkLsd?0v8T!d_2$b~b+n zwkn&}=0;}fd@OgUB8lca<3x5}oau*GUQgedVRoZprwF!&I{PM1`4lOFtE&%&fF#bV z{(kS4mPh;Gc0{U_v=bxFR&S%5?aE3u;Pk*tpY%(E_Q(4k$n!k%c>?v#;PyW;ZfF)H z+2Jh)Z3Rt2{(enSZT+!rd|uX3rd4OD2=x6vT23q#$1Wsf9_+WgOvO$n{vGPDy>j!rqvVPPHW^@?(P9^d4{m2v6hY(RwmWP=QF;k&U z{$ZQ+vC3Y}Nlg@8X!~7!Yc%6DDHBNs%U1i#LA^P*Iv?*G*CAFBe7)u)9g$la>iG(3)eI91pvCvbN~K7yZL##rY^*J;P(*T? zJmm(>_{&SWFDRgkMx>r04dd20Sieb6Lq6VjQQ&z2JEXH(d3wI!@dchJ!bzQdQS@+` zEFKMC?DoWsBhD$nNoFsUtF?e3H38dZCYI!IL*Q=JNw>*#Xm9$;BgN*-t5#G+MZ>(# z)2aBAtdr(#N~FOe7OTNV>3Z+$3qoSp_@P#&NY8J*Lxx1W%f84~QBIMbGS?9Jf_Wg9?q}(fV9jh~#&%&XYn5N%*pesh^ zc<oz~&=6_VY2q)W5xM33x=tU;fFZWbJ0&$;++DQrJiAj)tc^ zHp+n?yKIMmuUux_-XTLwW`t3(5tzm7XRUH-un217cnK7+dH>%h{Uh%=6R>V8;{aCp zon6+A%%Wtl!i~HJh>vL4?6)1eIsLtPd_)|Yv~u>_n>hazDbK zF4kSY$VzN1f8gT^=MOn~)+l)cM7_=B)a?qlv+J6yMa2XTf$#wkw@^fO+RrAimZl8c zew^~!f#k5#-x{~I1?TDFq@n3CRH=R5yoRS$F&9&MejBlS__ZG5-5^A}(yuLT$^3SK()(3N}>L?iH-pzb%_pLAZ!A>uw zC${egK)3}m#Kz=Y-T<7wIP$>Y!StNLu&4?*OorI7oI^FNz@4bu{AuT@YGvKin&fFh zUYV`X9+As1Ao9RFmZzXM3cZ~>dRba(N(?9>PeD)w?9Xmmq&(} z$0wT{SMc}~*G6>@sJuTpg3mzS%~~aQz=&_>IOzXo&-LKXY>l!${o}kf!3x%f#f-RN z+<3w9^Fsxro*#XLZTk2oW&<7sCA1RxTyR<5x~6?BHn6R0rFaJNaat!%o`K)SG=K|q zeb3xJ4!gY8&a-eE`bWk8yNLgl#Q*992sQs}9slbm{{Q7MN5xxRQ1E)ja-{SpZ1m#` z68MaC$y5%QVN0+;JcOtep*Mkec)&IycmhJvmQ0CiptpQhIQM3zD+oR(xynz3L9iyY z8xcK$J#_@1%_E@Dgnr!FXNfY3AIpu{+WmTNkGDqwad+Z+hz$k5n}zL56xsE8{E&cE z%Vz|e$-$_et)d9CTebY?Iq$-MjR6O6F->E5iBkR%bJre#S6ciEYSVMSLNLHelR^=# zmmG^BEW0Cez4tMOBQazba%jfu4&HCChHhI%g?&lH$`UIfAmd&SSM^$`{`UO`BKS8q zIR{Y?n;Cqoq;0?sY$!+N;lWNHU_MpI20QJmFfN1zJB1I9qXp>IDq4S32_F0%_?$2L zB0pLm&s0Rj$jFG;b^c?;y8ndU;s@ucM`PneC5xk9>|*0Wm{ zh+wZAz%#|Hj^Xl@wRcJ?k3q?Cyk4Q{4-HuWYr&r}p=&L?&PexeO&ayK$UrwI);BxigZln^dDidOhr`gNO z<&Q2bkRG$uOMEs+ zLU`>D=yUOj6^{zPcq8!rQHgv^TPP*3UO@Ej;m-SE)u!yC{6%(Qa;g!$il35{`nN)N ztxRkAbro>)c2&l@g=!!cx%9!5BsCfn}#97gWenBU&+ zFBPzSLGixb=Z}p=3Mb*9*y#j=w678X;+lR@r5N&JH3h=pwpiLVMqz+l{H&5snP3!& zqe1WRRfd?_*HIafVpI@_>fP^?%`rYJ)xARr)BhF}Pa5C;kh0&3Zn z5tb2|0W__G5E5K1DjGmkP)d*wWhG%4NZKkv1Oy2rK#)xaND?4{@71fV(UzazpYLjZ zo#Z<2Gw$bppK;E0PS{i`!8T$eml2UR1V2oUo-fJq)nbbHcQulag1;4!P1@&jx0`Uh z5SUU}5!RvC$1WVZ7JIiwHc9N>J3#QvzL1ss_)vGum@01_GzLTT@dhmReoeTp!sEN` zYVg-ah@q@Cu$0Gq;u_OxI|7L#s1p6M!91}&1IaTFZk!t@s2I4w@=n;G=+Ao~?n?Us zDG$pSnR$R=?E^RE2g=Gl@YA`bf!?M{=d1P!1M;qSVk>nW-htvE>vQVXwb-YaxJLDagmzJarxc9m_ z)ZeUs%=9$3L-m`=(p<1OfnbEg!9|9VF?uCp$g!#hs9niZ zb&V&(YPWF2v(S&cX)4EIOe`5&Ikm$Jz_ph{xmJvL$<3K(Txv|EcxPm__`T0!5Krgy zD}kc2Xj;jI)?vk5$Hj|(R);3Kf0y*`a5H%!mjyNUI2kI7LdnsStb1RN4*O{N5+_oJ zOmZ#RlouB@@Rez8N7QYE-PX&P9H=&yV)q2@jB2@GHAyG3FT2n)a!e&jq{I@7R64fi zy8q_BU5Q?q30=MSJKsc1!AwH`C@}67&)?c^V!bGKx?_(;zC;B6tfK_V~avr zJ137Y*=^CowDJ~H99iMk2Ds+a&TsQm$X5s7fbO ztCGpb#CI}S?c$tk*jy^jK2zJuGH4Tj0gv~olPn!jcb|0w6wtP{2hTTvc3=!D-c}Bq&pfXF(bEi zFwnQi5jks}&}+=9?IKAMLe-S`dSZ7J1jbQ=J;(TE5=s+2rIs0U5;(k9_K_>D^g4o zV0&Y`D`qR-dTPQfaq~rS2zQ%JmlALPTH9#TVdMQ_s0 zQ+j?!N=#s(DcpL;U`;ip#e{+oaZ0!U4t7p(%jITE9~hm~$3RL-V3<>;FlaY}v66uF z)aR|25!&%wC-@C6RMYDf=~-I5(pEvI#6}5!b=k77S`X~o<8A|LP}x{j8F&_BXOXOp zt!+VZvF*bNVY=woS}x8zw&&rnPS>;`Piyi?L`wg?;J_DgDb~1M;Rt6&AnorNpaio~ z`;H90VAXKXIZ2i*siMLm_3?!rSMzO72p#jHK)tBlm7t6sfTT!rShUTwWJ7!sBGM%a zyW&!^u;d0lH?xn z%MiIxjt+_uyP;j?LtoSr&+vZHLy1w#)fg`+e@KQF8Q79?>LzJ*8I+(qZ+|=kn3YbB zVWEt1**F23Kz0H~&^3r)>C2&J>Tgg{f}y zQVu)fW#W`6V!+?;25;vHR-;{aX~9@@dNteb%(!#!;hZtJ?&^8Odlc0EQKi?uQ8IVE zz4;@#xtXg=)mhUV-D8l{dl!aB@~w{x+ZDkZTz+iW+UNZJ_9KT|AdlB6BHY;_Du1iU zf0Ww1CV-skic|Q1DB2%SDRQzeI*Khg{zXCmvr;cZtAY!GC*{KvmeimDlAZ<5WAi^Y z20+fqZ945C=1<-&**hRWz3Q+&d1)yVL(0JJZ>s;PWAw;$MUX2Sy(@#q*GLQYL7~xU}5W=Fax5{xBs`2PZ2t z+F&vJ!N}JjOTR{jy_4oKSqJzXQQm5WbnC6L$+QYTX-{3jU5$eEP8g|n`V2)OMO7O% zYYg>Owv!B&HVV)4%B#knyU?6A{<7k-^ezbnTkrNODL3WB{rT{~-|c9YtbB9!Wueuc zArmikRLaQ3p~c#{upNQz2aSK&B8}@Gb>oxa(()m4?VztO0t1S)%~H#kp>Kd_Hx=Ie zeAOp!iVlJFlKpi2(=T5P5Qe~o3>S0h@JZ`(%?9|GhIUG${h6*~0D0xy0BH=Qb>*1? z`Cy=OSjxfsAo&>DH7z-*cmw%0_r23TQz~WQelYeQOUhU>xzzyqF$aiWFXiBs^?>Rk zWu-4*`OBUdARmm3noBvT0|K<4Z>TRF6lq;!Kz_U4q|ILfzc~1_aaanUe+lp}0sh}J bz=F0$ dict: + """ + Supplementary method converting a NumPy array of body landmark data into dictionaries. The array data must match the + order of the BODY_IDENTIFIERS list. + """ + + output = {} + + for landmark_index, identifier in enumerate(BODY_IDENTIFIERS): + output[identifier] = data_array[:, landmark_index].tolist() + + return output + + +def __dictionary_to_numpy(landmarks_dict: dict) -> np.ndarray: + """ + Supplementary method converting dictionaries of body landmark data into respective NumPy arrays. The resulting array + will match the order of the BODY_IDENTIFIERS list. + """ + + output = np.empty(shape=(len(landmarks_dict["leftEar"]), len(BODY_IDENTIFIERS), 2)) + + for landmark_index, identifier in enumerate(BODY_IDENTIFIERS): + output[:, landmark_index, 0] = np.array(landmarks_dict[identifier])[:, 0] + output[:, landmark_index, 1] = np.array(landmarks_dict[identifier])[:, 1] + + return output + + +def __rotate(origin: tuple, point: tuple, angle: float): + """ + Rotates a point counterclockwise by a given angle around a given origin. + + :param origin: Landmark in the (X, Y) format of the origin from which to count angle of rotation + :param point: Landmark in the (X, Y) format to be rotated + :param angle: Angle under which the point shall be rotated + :return: New landmarks (coordinates) + """ + + ox, oy = origin + px, py = point + + qx = ox + math.cos(angle) * (px - ox) - math.sin(angle) * (py - oy) + qy = oy + math.sin(angle) * (px - ox) + math.cos(angle) * (py - oy) + + return qx, qy + + +def __preprocess_row_sign(sign: dict) -> (dict, dict): + """ + Supplementary method splitting the single-dictionary skeletal data into two dictionaries of body and hand landmarks + respectively. + """ + + sign_eval = sign + + if "nose_X" in sign_eval: + body_landmarks = {identifier: [(x, y) for x, y in zip(sign_eval[identifier + "_X"], sign_eval[identifier + "_Y"])] + for identifier in BODY_IDENTIFIERS} + hand_landmarks = {identifier: [(x, y) for x, y in zip(sign_eval[identifier + "_X"], sign_eval[identifier + "_Y"])] + for identifier in HAND_IDENTIFIERS} + + else: + body_landmarks = {identifier: sign_eval[identifier] for identifier in BODY_IDENTIFIERS} + hand_landmarks = {identifier: sign_eval[identifier] for identifier in HAND_IDENTIFIERS} + + return body_landmarks, hand_landmarks + + +def __wrap_sign_into_row(body_identifiers: dict, hand_identifiers: dict) -> dict: + """ + Supplementary method for merging body and hand data into a single dictionary. + """ + + return {**body_identifiers, **hand_identifiers} + + +def augment_rotate(sign: dict, angle_range: tuple) -> dict: + """ + AUGMENTATION TECHNIQUE. All the joint coordinates in each frame are rotated by a random angle up to 13 degrees with + the center of rotation lying in the center of the frame, which is equal to [0.5; 0.5]. + + :param sign: Dictionary with sequential skeletal data of the signing person + :param angle_range: Tuple containing the angle range (minimal and maximal angle in degrees) to randomly choose the + angle by which the landmarks will be rotated from + + :return: Dictionary with augmented (by rotation) sequential skeletal data of the signing person + """ + + body_landmarks, hand_landmarks = __preprocess_row_sign(sign) + angle = math.radians(random.uniform(*angle_range)) + + body_landmarks = {key: [__rotate((0.5, 0.5), frame, angle) for frame in value] for key, value in + body_landmarks.items()} + hand_landmarks = {key: [__rotate((0.5, 0.5), frame, angle) for frame in value] for key, value in + hand_landmarks.items()} + + return __wrap_sign_into_row(body_landmarks, hand_landmarks) + + +def augment_shear(sign: dict, type: str, squeeze_ratio: tuple) -> dict: + """ + AUGMENTATION TECHNIQUE. + + - Squeeze. All the frames are squeezed from both horizontal sides. Two different random proportions up to 15% of + the original frame's width for both left and right side are cut. + + - Perspective transformation. The joint coordinates are projected onto a new plane with a spatially defined + center of projection, which simulates recording the sign video with a slight tilt. Each time, the right or left + side, as well as the proportion by which both the width and height will be reduced, are chosen randomly. This + proportion is selected from a uniform distribution on the [0; 1) interval. Subsequently, the new plane is + delineated by reducing the width at the desired side and the respective vertical edge (height) at both of its + adjacent corners. + + :param sign: Dictionary with sequential skeletal data of the signing person + :param type: Type of shear augmentation to perform (either 'squeeze' or 'perspective') + :param squeeze_ratio: Tuple containing the relative range from what the proportion of the original width will be + randomly chosen. These proportions will either be cut from both sides or used to construct the + new projection + + :return: Dictionary with augmented (by squeezing or perspective transformation) sequential skeletal data of the + signing person + """ + + body_landmarks, hand_landmarks = __preprocess_row_sign(sign) + + if type == "squeeze": + move_left = random.uniform(*squeeze_ratio) + move_right = random.uniform(*squeeze_ratio) + + src = np.array(((0, 1), (1, 1), (0, 0), (1, 0)), dtype=np.float32) + dest = np.array(((0 + move_left, 1), (1 - move_right, 1), (0 + move_left, 0), (1 - move_right, 0)), + dtype=np.float32) + mtx = cv2.getPerspectiveTransform(src, dest) + + elif type == "perspective": + + move_ratio = random.uniform(*squeeze_ratio) + src = np.array(((0, 1), (1, 1), (0, 0), (1, 0)), dtype=np.float32) + + if __random_pass(0.5): + dest = np.array(((0 + move_ratio, 1 - move_ratio), (1, 1), (0 + move_ratio, 0 + move_ratio), (1, 0)), + dtype=np.float32) + else: + dest = np.array(((0, 1), (1 - move_ratio, 1 - move_ratio), (0, 0), (1 - move_ratio, 0 + move_ratio)), + dtype=np.float32) + + mtx = cv2.getPerspectiveTransform(src, dest) + + else: + + logging.error("Unsupported shear type provided.") + return {} + + landmarks_array = __dictionary_to_numpy(body_landmarks) + augmented_landmarks = cv2.perspectiveTransform(np.array(landmarks_array, dtype=np.float32), mtx) + + augmented_zero_landmark = cv2.perspectiveTransform(np.array([[[0, 0]]], dtype=np.float32), mtx)[0][0] + augmented_landmarks = np.stack([np.where(sub == augmented_zero_landmark, [0, 0], sub) for sub in augmented_landmarks]) + + body_landmarks = __numpy_to_dictionary(augmented_landmarks) + + return __wrap_sign_into_row(body_landmarks, hand_landmarks) + + +def augment_arm_joint_rotate(sign: dict, probability: float, angle_range: tuple) -> dict: + """ + AUGMENTATION TECHNIQUE. The joint coordinates of both arms are passed successively, and the impending landmark is + slightly rotated with respect to the current one. The chance of each joint to be rotated is 3:10 and the angle of + alternation is a uniform random angle up to +-4 degrees. This simulates slight, negligible variances in each + execution of a sign, which do not change its semantic meaning. + + :param sign: Dictionary with sequential skeletal data of the signing person + :param probability: Probability of each joint to be rotated (float from the range [0, 1]) + :param angle_range: Tuple containing the angle range (minimal and maximal angle in degrees) to randomly choose the + angle by which the landmarks will be rotated from + + :return: Dictionary with augmented (by arm joint rotation) sequential skeletal data of the signing person + """ + + body_landmarks, hand_landmarks = __preprocess_row_sign(sign) + + # Iterate over both directions (both hands) + for side in ["left", "right"]: + # Iterate gradually over the landmarks on arm + for landmark_index, landmark_origin in enumerate(ARM_IDENTIFIERS_ORDER): + landmark_origin = landmark_origin.replace("$side$", side) + + # End the process on the current hand if the landmark is not present + if landmark_origin not in body_landmarks: + break + + # Perform rotation by provided probability + if __random_pass(probability): + angle = math.radians(random.uniform(*angle_range)) + + for to_be_rotated in ARM_IDENTIFIERS_ORDER[landmark_index + 1:]: + to_be_rotated = to_be_rotated.replace("$side$", side) + + # Skip if the landmark is not present + if to_be_rotated not in body_landmarks: + continue + + body_landmarks[to_be_rotated] = [__rotate(body_landmarks[landmark_origin][frame_index], frame, + angle) + for frame_index, frame in enumerate(body_landmarks[to_be_rotated])] + + return __wrap_sign_into_row(body_landmarks, hand_landmarks) diff --git a/conversion_requirements.txt b/conversion_requirements.txt new file mode 100644 index 0000000..4d37abe --- /dev/null +++ b/conversion_requirements.txt @@ -0,0 +1,21 @@ +bokeh==2.4.3 +boto3>=1.9 +clearml==1.6.4 +ipywidgets==8.0.4 +matplotlib==3.5.3 +mediapipe==0.8.11 +notebook==6.5.2 +opencv-python==4.6.0.66 +pandas==1.1.5 +pandas==1.1.5 +plotly==5.11.0 +scikit-learn==1.0.2 +torchvision==0.13.0 +tqdm==4.54.1 +# ------ +requests==2.28.1 +onnx==1.12.0 +onnx-tf==1.10.0 +onnxruntime==1.12.1 +tensorflow +tensorflow-probability diff --git a/convert.py b/convert.py new file mode 100644 index 0000000..c5d3932 --- /dev/null +++ b/convert.py @@ -0,0 +1,123 @@ +import os +import argparse +import json + +import numpy as np +import torch +import onnx +import onnxruntime + +try: + import tensorflow as tf +except ImportError: + print("Warning: Tensorflow not installed. This is required when exporting to tflite") + + +def to_numpy(tensor): + return tensor.detach().cpu().numpy() if tensor.requires_grad else tensor.cpu().numpy() + + +def print_final_message(model_path): + success_msg = f"\033[92mModel converted at {model_path} \033[0m" + try: + import requests + joke = json.loads(requests.request("GET", "https://api.chucknorris.io/jokes/random?category=dev").text)["value"] + print(f"{success_msg}\n\nNow go read a Chuck Norris joke:\n\033[1m{joke}\033[0m") + except ImportError: + print(success_msg) + + +def convert_tf_saved_model(onnx_model, output_folder): + from onnx_tf.backend import prepare + tf_rep = prepare(onnx_model) # prepare tf representation + tf_rep.export_graph(output_folder) # export the model + + +def convert_tf_to_lite(model_dir, output_path): + # Convert the model + converter = tf.lite.TFLiteConverter.from_saved_model(model_dir) # path to the SavedModel directory + # This is needed for TF Select ops: Cast, RealDiv + converter.target_spec.supported_ops = [ + tf.lite.OpsSet.TFLITE_BUILTINS, # enable TensorFlow Lite ops. + tf.lite.OpsSet.SELECT_TF_OPS # enable TensorFlow ops. + ] + + tflite_model = converter.convert() + + # Save the model. + with open(output_path, 'wb') as f: + f.write(tflite_model) + + +def validate_tflite_output(model_path, input_data, output_array): + interpreter = tf.lite.Interpreter(model_path=model_path) + + output = interpreter.get_output_details()[0] # Model has single output. + input = interpreter.get_input_details()[0] # Model has single input. + interpreter.resize_tensor_input(input['index'], input_data.shape) + interpreter.allocate_tensors() + + input_data = tf.convert_to_tensor(input_data, np.float32) + interpreter.set_tensor(input['index'], input_data) + interpreter.invoke() + + out = interpreter.get_tensor(output['index']) + np.testing.assert_allclose(out, output_array, rtol=1e-03, atol=1e-05) + + +def convert(checkpoint_path, export_tensorflow): + output_folder = "converted_models" + model = torch.load(checkpoint_path, map_location='cpu') + model.eval() + + # Input to the model + x = torch.randn(1, 10, 54, 2, requires_grad=True) + numpy_x = to_numpy(x) + + torch_out = model(x) + numpy_out = to_numpy(torch_out) + + model_path = f"{output_folder}/spoter.onnx" + if not os.path.exists(output_folder): + os.makedirs(output_folder) + + # Export the model + torch.onnx.export(model, # model being run + x, # model input (or a tuple for multiple inputs) + model_path, # where to save the model (can be a file or file-like object) + export_params=True, # store the trained parameter weights inside the model file + opset_version=11, # the ONNX version to export the model to + do_constant_folding=True, # whether to execute constant folding for optimization + input_names=['input'], # the model's input names + output_names=['output'], + dynamic_axes={'input': [1]}) # the model's output names + + # Validate conversion + onnx_model = onnx.load(model_path) + onnx.checker.check_model(onnx_model) + + ort_session = onnxruntime.InferenceSession(model_path) + + # compute ONNX Runtime output prediction + ort_inputs = {ort_session.get_inputs()[0].name: to_numpy(x)} + ort_outs = ort_session.run(None, ort_inputs) + + # compare ONNX Runtime and PyTorch results + np.testing.assert_allclose(numpy_out, ort_outs[0], rtol=1e-03, atol=1e-05) + + if export_tensorflow: + saved_model_dir = f"{output_folder}/tf_saved" + tflite_model_path = f"{output_folder}/spoter.tflite" + convert_tf_saved_model(onnx_model, saved_model_dir) + convert_tf_to_lite(saved_model_dir, tflite_model_path) + validate_tflite_output(tflite_model_path, numpy_x, numpy_out) + + print_final_message(output_folder) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('-c', '--checkpoint_path', help='Checkpoint Path') + parser.add_argument('-tf', '--export_tensorflow', help='Export Tensorflow apart from ONNX', action='store_true') + args = parser.parse_args() + convert(args.checkpoint_path, args.export_tensorflow) diff --git a/datasets/__init__.py b/datasets/__init__.py new file mode 100644 index 0000000..e92e504 --- /dev/null +++ b/datasets/__init__.py @@ -0,0 +1,3 @@ +from .czech_slr_dataset import CzechSLRDataset +from .embedding_dataset import SLREmbeddingDataset +from .datasets_utils import collate_fn_triplet_padd, collate_fn_padd diff --git a/datasets/clearml_dataset_loader.py b/datasets/clearml_dataset_loader.py new file mode 100644 index 0000000..bf41e0b --- /dev/null +++ b/datasets/clearml_dataset_loader.py @@ -0,0 +1,8 @@ +from clearml import Dataset +from .dataset_loader import DatasetLoader + + +class ClearMLDatasetLoader(DatasetLoader): + + def get_dataset_folder(self, dataset_project, dataset_name): + return Dataset.get(dataset_project=dataset_project, dataset_name=dataset_name).get_local_copy() diff --git a/datasets/czech_slr_dataset.py b/datasets/czech_slr_dataset.py new file mode 100644 index 0000000..39bd97b --- /dev/null +++ b/datasets/czech_slr_dataset.py @@ -0,0 +1,72 @@ +import torch +import numpy as np +import torch.utils.data as torch_data + +from datasets.datasets_utils import load_dataset, tensor_to_dictionary, dictionary_to_tensor, \ + random_augmentation +from normalization.body_normalization import normalize_single_dict as normalize_single_body_dict +from normalization.hand_normalization import normalize_single_dict as normalize_single_hand_dict + + +class CzechSLRDataset(torch_data.Dataset): + """Advanced object representation of the HPOES dataset for loading hand joints landmarks utilizing the Torch's + built-in Dataset properties""" + + data: [np.ndarray] + labels: [np.ndarray] + + def __init__(self, dataset_filename: str, num_labels=5, transform=None, augmentations=False, + augmentations_prob=0.5, normalize=True): + """ + Initiates the HPOESDataset with the pre-loaded data from the h5 file. + + :param dataset_filename: Path to the h5 file + :param transform: Any data transformation to be applied (default: None) + """ + + loaded_data = load_dataset(dataset_filename) + data, labels = loaded_data[0], loaded_data[1] + + self.data = data + self.labels = labels + self.targets = list(labels) + self.num_labels = num_labels + self.transform = transform + + self.augmentations = augmentations + self.augmentations_prob = augmentations_prob + self.normalize = normalize + + def __getitem__(self, idx): + """ + Allocates, potentially transforms and returns the item at the desired index. + + :param idx: Index of the item + :return: Tuple containing both the depth map and the label + """ + + depth_map = torch.from_numpy(np.copy(self.data[idx])) + # label = torch.Tensor([self.labels[idx] - 1]) + label = torch.Tensor([self.labels[idx]]) + + depth_map = tensor_to_dictionary(depth_map) + + # Apply potential augmentations + depth_map = random_augmentation(self.augmentations, self.augmentations_prob, depth_map) + + if self.normalize: + depth_map = normalize_single_body_dict(depth_map) + depth_map = normalize_single_hand_dict(depth_map) + + depth_map = dictionary_to_tensor(depth_map) + + # Move the landmark position interval to improve performance + depth_map = depth_map - 0.5 + + if self.transform: + depth_map = self.transform(depth_map) + + return depth_map, label + + def __len__(self): + return len(self.labels) diff --git a/datasets/dataset_loader.py b/datasets/dataset_loader.py new file mode 100644 index 0000000..dd44082 --- /dev/null +++ b/datasets/dataset_loader.py @@ -0,0 +1,17 @@ + +import os + + +class DatasetLoader(): + """Abstract class that serves to load datasets from different sources (local, ClearML, other tracker) + """ + + def get_dataset_folder(self, dataset_project, dataset_name): + return NotImplementedError() + + +class LocalDatasetLoader(DatasetLoader): + + def get_dataset_folder(self, dataset_project, dataset_name): + base_folder = os.environ.get("BASE_DATA_FOLDER", "data") + return os.path.join(base_folder, dataset_name) diff --git a/datasets/datasets_utils.py b/datasets/datasets_utils.py new file mode 100644 index 0000000..e031805 --- /dev/null +++ b/datasets/datasets_utils.py @@ -0,0 +1,133 @@ +import pandas as pd +import ast +import torch +import random +import numpy as np +from torch.nn.utils.rnn import pad_sequence +from random import randrange + +from augmentations import augment_arm_joint_rotate, augment_rotate, augment_shear +from normalization.body_normalization import BODY_IDENTIFIERS +from augmentations.augment import HAND_IDENTIFIERS + + +def load_dataset(file_location: str): + + # Load the datset csv file + df = pd.read_csv(file_location, encoding="utf-8") + df.columns = [item.replace("_left_", "_0_").replace("_right_", "_1_") for item in list(df.columns)] + + # TEMP + labels = df["labels"].to_list() + + data = [] + + for row_index, row in df.iterrows(): + current_row = np.empty(shape=(len(ast.literal_eval(row["leftEar_X"])), + len(BODY_IDENTIFIERS + HAND_IDENTIFIERS), + 2) + ) + for index, identifier in enumerate(BODY_IDENTIFIERS + HAND_IDENTIFIERS): + current_row[:, index, 0] = ast.literal_eval(row[identifier + "_X"]) + current_row[:, index, 1] = ast.literal_eval(row[identifier + "_Y"]) + + data.append(current_row) + + return data, labels + + +def tensor_to_dictionary(landmarks_tensor: torch.Tensor) -> dict: + + data_array = landmarks_tensor.numpy() + output = {} + + for landmark_index, identifier in enumerate(BODY_IDENTIFIERS + HAND_IDENTIFIERS): + output[identifier] = data_array[:, landmark_index] + + return output + + +def dictionary_to_tensor(landmarks_dict: dict) -> torch.Tensor: + + output = np.empty(shape=(len(landmarks_dict["leftEar"]), len(BODY_IDENTIFIERS + HAND_IDENTIFIERS), 2)) + + for landmark_index, identifier in enumerate(BODY_IDENTIFIERS + HAND_IDENTIFIERS): + output[:, landmark_index, 0] = [frame[0] for frame in landmarks_dict[identifier]] + output[:, landmark_index, 1] = [frame[1] for frame in landmarks_dict[identifier]] + + return torch.from_numpy(output) + + +def random_augmentation(augmentations, augmentations_prob, depth_map): + if augmentations and random.random() < augmentations_prob: + selected_aug = randrange(4) + if selected_aug == 0: + depth_map = augment_arm_joint_rotate(depth_map, 0.3, (-4, 4)) + elif selected_aug == 1: + depth_map = augment_shear(depth_map, "perspective", (0, 0.1)) + elif selected_aug == 2: + depth_map = augment_shear(depth_map, "squeeze", (0, 0.15)) + elif selected_aug == 3: + depth_map = augment_rotate(depth_map, (-13, 13)) + + return depth_map + + +def collate_fn_triplet_padd(batch): + ''' + Padds batch of variable length + + note: it converts things ToTensor manually here since the ToTensor transform + assume it takes in images rather than arbitrary tensors. + ''' + # batch: list of length batch_size, each element contains ouput of dataset + # MASKING + anchor_lengths = [element[0].shape[0] for element in batch] + max_anchor_l = max(anchor_lengths) + positive_lengths = [element[1].shape[0] for element in batch] + max_positive_l = max(positive_lengths) + negative_lengths = [element[2].shape[0] for element in batch] + max_negative_l = max(negative_lengths) + + anchor_mask = [[False] * anchor_lengths[n] + [True] * (max_anchor_l - anchor_lengths[n]) + for n in range(len(batch))] + positive_mask = [[False] * positive_lengths[n] + [True] * (max_positive_l - positive_lengths[n]) + for n in range(len(batch))] + negative_mask = [[False] * negative_lengths[n] + [True] * (max_negative_l - negative_lengths[n]) + for n in range(len(batch))] + + # PADDING + anchor_batch = [element[0] for element in batch] + positive_batch = [element[1] for element in batch] + negative_batch = [element[2] for element in batch] + + anchor_batch = pad_sequence(anchor_batch, batch_first=True) + positive_batch = pad_sequence(positive_batch, batch_first=True) + negative_batch = pad_sequence(negative_batch, batch_first=True) + + return anchor_batch, positive_batch, negative_batch, \ + torch.Tensor(anchor_mask), torch.Tensor(positive_mask), torch.Tensor(negative_mask) + + +def collate_fn_padd(batch): + ''' + Padds batch of variable length + + note: it converts things ToTensor manually here since the ToTensor transform + assume it takes in images rather than arbitrary tensors. + ''' + # batch: list of length batch_size, each element contains ouput of dataset + # MASKING + anchor_lengths = [element[0].shape[0] for element in batch] + max_anchor_l = max(anchor_lengths) + + anchor_mask = [[False] * anchor_lengths[n] + [True] * (max_anchor_l - anchor_lengths[n]) + for n in range(len(batch))] + + # PADDING + anchor_batch = [element[0] for element in batch] + anchor_batch = pad_sequence(anchor_batch, batch_first=True) + + labels = torch.Tensor([element[1] for element in batch]) + + return anchor_batch, labels, torch.Tensor(anchor_mask) diff --git a/datasets/embedding_dataset.py b/datasets/embedding_dataset.py new file mode 100644 index 0000000..a6a093b --- /dev/null +++ b/datasets/embedding_dataset.py @@ -0,0 +1,103 @@ +import torch +import torch.utils.data as torch_data +from random import sample +from typing import List +import numpy as np + +from datasets.datasets_utils import load_dataset, tensor_to_dictionary, dictionary_to_tensor, \ + random_augmentation +from normalization.body_normalization import normalize_single_dict as normalize_single_body_dict +from normalization.hand_normalization import normalize_single_dict as normalize_single_hand_dict + + +class SLREmbeddingDataset(torch_data.Dataset): + """Advanced object representation of the WLASL dataset for loading triplet used in triplet loss utilizing the + Torch's built-in Dataset properties""" + + data: List[np.ndarray] + labels: List[np.ndarray] + + def __init__(self, dataset_filename: str, triplet=True, transform=None, augmentations=False, + augmentations_prob=0.5, normalize=True): + """ + Initiates the HPOESDataset with the pre-loaded data from the h5 file. + + :param dataset_filename: Path to the h5 file + :param transform: Any data transformation to be applied (default: None) + """ + + loaded_data = load_dataset(dataset_filename) + data, labels = loaded_data[0], loaded_data[1] + + self.data = data + self.labels = labels + self.targets = list(labels) + self.transform = transform + self.triplet = triplet + self.augmentations = augmentations + self.augmentations_prob = augmentations_prob + self.normalize = normalize + + def __getitem__(self, idx): + """ + Allocates, potentially transforms and returns the item at the desired index. + + :param idx: Index of the item + :return: Tuple containing both the depth map and the label + """ + depth_map_a = torch.from_numpy(np.copy(self.data[idx])) + label = torch.Tensor([self.labels[idx]]) + + depth_map_a = tensor_to_dictionary(depth_map_a) + + if self.triplet: + positive_indexes = list(np.where(np.array(self.labels) == self.labels[idx])[0]) + positive_index_sample = sample(positive_indexes, 2) + positive_index = positive_index_sample[0] if positive_index_sample[0] != idx else positive_index_sample[1] + negative_indexes = list(np.where(np.array(self.labels) != self.labels[idx])[0]) + negative_index = sample(negative_indexes, 1)[0] + # TODO: implement hard triplets + + depth_map_p = torch.from_numpy(np.copy(self.data[positive_index])) + depth_map_n = torch.from_numpy(np.copy(self.data[negative_index])) + + depth_map_p = tensor_to_dictionary(depth_map_p) + depth_map_n = tensor_to_dictionary(depth_map_n) + + # TODO: Add Data augmentation to positive and negative ? + + # Apply potential augmentations + depth_map_a = random_augmentation(self.augmentations, self.augmentations_prob, depth_map_a) + + if self.normalize: + depth_map_a = normalize_single_body_dict(depth_map_a) + depth_map_a = normalize_single_hand_dict(depth_map_a) + if self.triplet: + depth_map_p = normalize_single_body_dict(depth_map_p) + depth_map_p = normalize_single_hand_dict(depth_map_p) + depth_map_n = normalize_single_body_dict(depth_map_n) + depth_map_n = normalize_single_hand_dict(depth_map_n) + + depth_map_a = dictionary_to_tensor(depth_map_a) + # Move the landmark position interval to improve performance + depth_map_a = depth_map_a - 0.5 + + if self.triplet: + depth_map_p = dictionary_to_tensor(depth_map_p) + depth_map_p = depth_map_p - 0.5 + depth_map_n = dictionary_to_tensor(depth_map_n) + depth_map_n = depth_map_n - 0.5 + + if self.transform: + depth_map_a = self.transform(depth_map_a) + if self.triplet: + depth_map_p = self.transform(depth_map_p) + depth_map_n = self.transform(depth_map_n) + + if self.triplet: + return depth_map_a, depth_map_p, depth_map_n + + return depth_map_a, label + + def __len__(self): + return len(self.labels) diff --git a/models/__init__.py b/models/__init__.py new file mode 100644 index 0000000..ef94e51 --- /dev/null +++ b/models/__init__.py @@ -0,0 +1,4 @@ +from .spoter_model import SPOTER +from .spoter_embedding_model import SPOTER_EMBEDDINGS +from .utils import train_epoch, evaluate, evaluate_top_k, train_epoch_embedding, train_epoch_embedding_online, \ + evaluate_embedding, embeddings_scatter_plot, embeddings_scatter_plot_splits diff --git a/models/spoter_embedding_model.py b/models/spoter_embedding_model.py new file mode 100644 index 0000000..2d2a944 --- /dev/null +++ b/models/spoter_embedding_model.py @@ -0,0 +1,41 @@ +import torch +import torch.nn as nn + +from models.spoter_model import _get_clones, SPOTERTransformerDecoderLayer + + +class SPOTER_EMBEDDINGS(nn.Module): + """ + Implementation of the SPOTER (Sign POse-based TransformER) architecture for sign language recognition from sequence + of skeletal data. + """ + + def __init__(self, features, hidden_dim=108, nhead=9, num_encoder_layers=6, num_decoder_layers=6, + norm_emb=False, dropout=0.1): + super().__init__() + + self.pos_encoding = nn.Parameter(torch.rand(1, 1, hidden_dim)) # init positional encoding + self.class_query = nn.Parameter(torch.rand(1, 1, hidden_dim)) + self.transformer = nn.Transformer(hidden_dim, nhead, num_encoder_layers, num_decoder_layers, dropout=dropout) + self.linear_embed = nn.Linear(hidden_dim, features) + + # Deactivate the initial attention decoder mechanism + custom_decoder_layer = SPOTERTransformerDecoderLayer(self.transformer.d_model, self.transformer.nhead, 2048, + dropout, "relu") + self.transformer.decoder.layers = _get_clones(custom_decoder_layer, self.transformer.decoder.num_layers) + self.norm_emb = norm_emb + + def forward(self, inputs, src_masks=None): + + h = torch.transpose(inputs.flatten(start_dim=2), 1, 0).float() + h = self.transformer( + self.pos_encoding.repeat(1, h.shape[1], 1) + h, + self.class_query.repeat(1, h.shape[1], 1), + src_key_padding_mask=src_masks + ).transpose(0, 1) + embedding = self.linear_embed(h) + + if self.norm_emb: + embedding = nn.functional.normalize(embedding, dim=2) + + return embedding diff --git a/models/spoter_model.py b/models/spoter_model.py new file mode 100644 index 0000000..ed253ca --- /dev/null +++ b/models/spoter_model.py @@ -0,0 +1,66 @@ + +import copy +import torch + +import torch.nn as nn +from typing import Optional + + +def _get_clones(mod, n): + return nn.ModuleList([copy.deepcopy(mod) for _ in range(n)]) + + +class SPOTERTransformerDecoderLayer(nn.TransformerDecoderLayer): + """ + Edited TransformerDecoderLayer implementation omitting the redundant self-attention operation as opposed to the + standard implementation. + """ + + def __init__(self, d_model, nhead, dim_feedforward, dropout, activation): + super(SPOTERTransformerDecoderLayer, self).__init__(d_model, nhead, dim_feedforward, dropout, activation) + + del self.self_attn + + def forward(self, tgt: torch.Tensor, memory: torch.Tensor, tgt_mask: Optional[torch.Tensor] = None, + memory_mask: Optional[torch.Tensor] = None, tgt_key_padding_mask: Optional[torch.Tensor] = None, + memory_key_padding_mask: Optional[torch.Tensor] = None) -> torch.Tensor: + + tgt = tgt + self.dropout1(tgt) + tgt = self.norm1(tgt) + tgt2 = self.multihead_attn(tgt, memory, memory, attn_mask=memory_mask, + key_padding_mask=memory_key_padding_mask)[0] + tgt = tgt + self.dropout2(tgt2) + tgt = self.norm2(tgt) + tgt2 = self.linear2(self.dropout(self.activation(self.linear1(tgt)))) + tgt = tgt + self.dropout3(tgt2) + tgt = self.norm3(tgt) + + return tgt + + +class SPOTER(nn.Module): + """ + Implementation of the SPOTER (Sign POse-based TransformER) architecture for sign language recognition from sequence + of skeletal data. + """ + + def __init__(self, num_classes, hidden_dim=55): + super().__init__() + + self.row_embed = nn.Parameter(torch.rand(50, hidden_dim)) + self.pos = nn.Parameter(torch.cat([self.row_embed[0].unsqueeze(0).repeat(1, 1, 1)], dim=-1).flatten(0, 1).unsqueeze(0)) + self.class_query = nn.Parameter(torch.rand(1, hidden_dim)) + self.transformer = nn.Transformer(hidden_dim, 9, 6, 6) + self.linear_class = nn.Linear(hidden_dim, num_classes) + + # Deactivate the initial attention decoder mechanism + custom_decoder_layer = SPOTERTransformerDecoderLayer(self.transformer.d_model, self.transformer.nhead, 2048, + 0.1, "relu") + self.transformer.decoder.layers = _get_clones(custom_decoder_layer, self.transformer.decoder.num_layers) + + def forward(self, inputs): + h = torch.unsqueeze(inputs.flatten(start_dim=1), 1).float() + h = self.transformer(self.pos + h, self.class_query.unsqueeze(0)).transpose(0, 1) + res = self.linear_class(h) + + return res diff --git a/models/utils.py b/models/utils.py new file mode 100644 index 0000000..4979c79 --- /dev/null +++ b/models/utils.py @@ -0,0 +1,280 @@ +import numpy as np +import torch +from sklearn.metrics import silhouette_score +from sklearn.manifold import TSNE +from training.batch_sorter import sort_batches +from utils import get_logger + + +def train_epoch(model, dataloader, criterion, optimizer, device, scheduler=None): + + pred_correct, pred_all = 0, 0 + running_loss = 0.0 + model.train(True) + for i, data in enumerate(dataloader): + inputs, labels = data + inputs = inputs.squeeze(0).to(device) + labels = labels.to(device, dtype=torch.long) + + optimizer.zero_grad() + outputs = model(inputs).expand(1, -1, -1) + loss = criterion(outputs[0], labels[0]) + loss.backward() + optimizer.step() + running_loss += loss.item() + + # Statistics + if int(torch.argmax(torch.nn.functional.softmax(outputs, dim=2))) == int(labels[0][0]): + pred_correct += 1 + pred_all += 1 + + epoch_loss = running_loss / len(dataloader) + model.train(False) + if scheduler: + scheduler.step(epoch_loss) + + return epoch_loss, pred_correct, pred_all, (pred_correct / pred_all) + + +def train_epoch_embedding(model, epoch_iters, train_loader, val_loader, criterion, optimizer, device, scheduler=None): + + running_loss = [] + model.train(True) + for i, (anchor, positive, negative, a_mask, p_mask, n_mask) in enumerate(train_loader): + optimizer.zero_grad() + + anchor_emb = model(anchor.to(device), a_mask.to(device)) + positive_emb = model(positive.to(device), p_mask.to(device)) + negative_emb = model(negative.to(device), n_mask.to(device)) + + loss = criterion(anchor_emb.to(device), positive_emb.to(device), negative_emb.to(device)) + loss.backward() + optimizer.step() + running_loss.append(loss.item()) + + if i == epoch_iters: + break + + epoch_loss = np.mean(running_loss) + + # VALIDATION + model.train(False) + val_silhouette_coef = evaluate_embedding(model, val_loader, device) + + if scheduler: + scheduler.step(val_silhouette_coef) + + return epoch_loss, val_silhouette_coef + + +def train_epoch_embedding_online(model, epoch_iters, train_loader, val_loader, criterion, optimizer, device, + scheduler=None, enable_batch_sorting=False, mini_batch_size=None, + pre_batch_mining_count=1, batching_scheduler=None): + + running_loss = [] + iter_used_triplets = [] + iter_valid_triplets = [] + iter_pct_used = [] + model.train(True) + mini_batch = mini_batch_size or train_loader.batch_size + for i, (inputs, labels, masks) in enumerate(train_loader): + labels_size = labels.size()[0] + batch_loop_count = int(labels_size / mini_batch) + if batch_loop_count == 0: + continue + # Second condition is added so that we only run batch sorting if we have a full batch + if enable_batch_sorting: + if labels_size < train_loader.batch_size: + trim_count = labels_size % mini_batch + inputs = inputs[:-trim_count] + labels = labels[:-trim_count] + masks = masks[:-trim_count] + embeddings = None + with torch.no_grad(): + for j in range(batch_loop_count): + batch_embed = compute_batched_embeddings(model, device, inputs, masks, mini_batch, j) + if embeddings is None: + embeddings = batch_embed + else: + embeddings = torch.cat([embeddings, batch_embed], dim=0) + inputs, labels, masks = sort_batches(inputs, labels, masks, embeddings, device, + mini_batch_size=mini_batch_size, scheduler=batching_scheduler) + del embeddings + del batch_embed + mining_loop_count = pre_batch_mining_count + else: + mining_loop_count = 1 + for k in range(mining_loop_count): + for j in range(batch_loop_count): + optimizer.zero_grad(set_to_none=True) + batch_labels = labels[mini_batch * j:mini_batch * (j + 1)] + if batch_labels.size()[0] == 0: + break + embeddings = compute_batched_embeddings(model, device, inputs, masks, mini_batch, j) + loss, valid_triplets, used_triplets = criterion(embeddings, batch_labels) + + loss.backward() + optimizer.step() + running_loss.append(loss.item()) + if valid_triplets > 0: + iter_used_triplets.append(used_triplets) + iter_valid_triplets.append(valid_triplets) + iter_pct_used.append((used_triplets * 100) / valid_triplets) + + if epoch_iters > 0 and i * batch_loop_count * pre_batch_mining_count >= epoch_iters: + print("Breaking out because of epoch_iters filter") + break + + epoch_loss = np.mean(running_loss) + mean_used_triplets = np.mean(iter_used_triplets) + triplets_stats = { + "valid_triplets": np.mean(iter_valid_triplets), + "used_triplets": mean_used_triplets, + "pct_used": np.mean(iter_pct_used) + } + + if batching_scheduler: + batching_scheduler.step(mean_used_triplets) + + # VALIDATION + model.train(False) + with torch.no_grad(): + val_silhouette_coef = evaluate_embedding(model, val_loader, device) + + if scheduler: + scheduler.step(val_silhouette_coef) + + return epoch_loss, val_silhouette_coef, triplets_stats + + +def compute_batched_embeddings(model, device, inputs, masks, mini_batch, iteration): + batch_inputs = inputs[mini_batch * iteration:mini_batch * (iteration + 1)] + batch_masks = masks[mini_batch * iteration:mini_batch * (iteration + 1)] + + return model(batch_inputs.to(device), batch_masks.to(device)).squeeze(1) + + +def evaluate(model, dataloader, device, print_stats=False): + + logger = get_logger(__name__) + + pred_correct, pred_all = 0, 0 + stats = {i: [0, 0] for i in range(101)} + + for i, data in enumerate(dataloader): + inputs, labels = data + inputs = inputs.squeeze(0).to(device) + labels = labels.to(device, dtype=torch.long) + + outputs = model(inputs).expand(1, -1, -1) + + # Statistics + if int(torch.argmax(torch.nn.functional.softmax(outputs, dim=2))) == int(labels[0][0]): + stats[int(labels[0][0])][0] += 1 + pred_correct += 1 + + stats[int(labels[0][0])][1] += 1 + pred_all += 1 + + if print_stats: + stats = {key: value[0] / value[1] for key, value in stats.items() if value[1] != 0} + print("Label accuracies statistics:") + print(str(stats) + "\n") + logger.info("Label accuracies statistics:") + logger.info(str(stats) + "\n") + + return pred_correct, pred_all, (pred_correct / pred_all) + + +def evaluate_embedding(model, dataloader, device): + val_embeddings = [] + labels_emb = [] + + for i, (inputs, labels, masks) in enumerate(dataloader): + inputs = inputs.to(device) + masks = masks.to(device) + + outputs = model(inputs, masks) + for n in range(outputs.shape[0]): + val_embeddings.append(outputs[n, 0].cpu().detach().numpy()) + labels_emb.append(labels.detach().numpy()[n]) + + silhouette_coefficient = silhouette_score( + X=np.array(val_embeddings), + labels=np.array(labels_emb).reshape(len(labels_emb)) + ) + + return silhouette_coefficient + + +def embeddings_scatter_plot(model, dataloader, device, id_to_label, perplexity=40, n_iter=1000): + + val_embeddings = [] + labels_emb = [] + + with torch.no_grad(): + for i, (inputs, labels, masks) in enumerate(dataloader): + inputs = inputs.to(device) + masks = masks.to(device) + + outputs = model(inputs, masks) + for n in range(outputs.shape[0]): + val_embeddings.append(outputs[n, 0].cpu().detach().numpy()) + labels_emb.append(id_to_label[int(labels.detach().numpy()[n])]) + + tsne = TSNE(n_components=2, verbose=0, perplexity=perplexity, n_iter=n_iter) + tsne_results = tsne.fit_transform(np.array(val_embeddings)) + + return tsne_results, labels_emb + + +def embeddings_scatter_plot_splits(model, dataloaders, device, id_to_label, perplexity=40, n_iter=1000): + + labels_split = {} + embeddings_split = {} + splits = list(dataloaders.keys()) + with torch.no_grad(): + for split, dataloader in dataloaders.items(): + labels_str = [] + embeddings = [] + for i, (inputs, labels, masks) in enumerate(dataloader): + inputs = inputs.to(device) + masks = masks.to(device) + + outputs = model(inputs, masks) + for n in range(outputs.shape[0]): + embeddings.append(outputs[n, 0].cpu().detach().numpy()) + labels_str.append(id_to_label[int(labels.detach().numpy()[n])]) + labels_split[split] = labels_str + embeddings_split[split] = embeddings + + tsne = TSNE(n_components=2, verbose=0, perplexity=perplexity, n_iter=n_iter) + all_embeddings = np.vstack([embeddings_split[split] for split in splits]) + tsne_results = tsne.fit_transform(all_embeddings) + tsne_results_dict = {} + curr_index = 0 + for split in splits: + len_embeddings = len(embeddings_split[split]) + tsne_results_dict[split] = tsne_results[curr_index: curr_index + len_embeddings] + curr_index += len_embeddings + + return tsne_results_dict, labels_split + + +def evaluate_top_k(model, dataloader, device, k=5): + + pred_correct, pred_all = 0, 0 + + for i, data in enumerate(dataloader): + inputs, labels = data + inputs = inputs.squeeze(0).to(device) + labels = labels.to(device, dtype=torch.long) + + outputs = model(inputs).expand(1, -1, -1) + + if int(labels[0][0]) in torch.topk(outputs, k).indices.tolist()[0][0]: + pred_correct += 1 + + pred_all += 1 + + return pred_correct, pred_all, (pred_correct / pred_all) diff --git a/normalization/blazepose_mapping.py b/normalization/blazepose_mapping.py new file mode 100644 index 0000000..90a0672 --- /dev/null +++ b/normalization/blazepose_mapping.py @@ -0,0 +1,92 @@ + +_BODY_KEYPOINT_MAPPING = { + "nose": "nose", + "left_eye": "leftEye", + "right_eye": "rightEye", + "left_ear": "leftEar", + "right_ear": "rightEar", + "left_shoulder": "leftShoulder", + "right_shoulder": "rightShoulder", + "left_elbow": "leftElbow", + "right_elbow": "rightElbow", + "left_wrist": "leftWrist", + "right_wrist": "rightWrist" +} + +_HAND_KEYPOINT_MAPPING = { + "wrist": "wrist", + "index_finger_tip": "indexTip", + "index_finger_dip": "indexDIP", + "index_finger_pip": "indexPIP", + "index_finger_mcp": "indexMCP", + "middle_finger_tip": "middleTip", + "middle_finger_dip": "middleDIP", + "middle_finger_pip": "middlePIP", + "middle_finger_mcp": "middleMCP", + "ring_finger_tip": "ringTip", + "ring_finger_dip": "ringDIP", + "ring_finger_pip": "ringPIP", + "ring_finger_mcp": "ringMCP", + "pinky_tip": "littleTip", + "pinky_dip": "littleDIP", + "pinky_pip": "littlePIP", + "pinky_mcp": "littleMCP", + "thumb_tip": "thumbTip", + "thumb_ip": "thumbIP", + "thumb_mcp": "thumbMP", + "thumb_cmc": "thumbCMC" +} + + +def map_blazepose_keypoint(column): + # Remove _x, _y suffixes + suffix = column[-2:].upper() + column = column[:-2] + + if column.startswith("left_hand_"): + hand = "left" + finger_name = column[10:] + elif column.startswith("right_hand_"): + hand = "right" + finger_name = column[11:] + else: + if column not in _BODY_KEYPOINT_MAPPING: + return None + mapped = _BODY_KEYPOINT_MAPPING[column] + return mapped + suffix + + if finger_name not in _HAND_KEYPOINT_MAPPING: + return None + mapped = _HAND_KEYPOINT_MAPPING[finger_name] + return f"{mapped}_{hand}{suffix}" + + +def map_blazepose_df(df): + to_drop = [] + renamings = {} + for column in df.columns: + mapped_column = map_blazepose_keypoint(column) + if mapped_column: + renamings[column] = mapped_column + else: + to_drop.append(column) + df = df.rename(columns=renamings) + + for index, row in df.iterrows(): + + sequence_size = len(row["leftEar_Y"]) + lsx = row["leftShoulder_X"] + rsx = row["rightShoulder_X"] + lsy = row["leftShoulder_Y"] + rsy = row["rightShoulder_Y"] + neck_x = [] + neck_y = [] + # Treat each element of the sequence (analyzed frame) individually + for sequence_index in range(sequence_size): + neck_x.append((float(lsx[sequence_index]) + float(rsx[sequence_index])) / 2) + neck_y.append((float(lsy[sequence_index]) + float(rsy[sequence_index])) / 2) + df.loc[index, "neck_X"] = str(neck_x) + df.loc[index, "neck_Y"] = str(neck_y) + + df.drop(columns=to_drop, inplace=True) + return df diff --git a/normalization/body_normalization.py b/normalization/body_normalization.py new file mode 100644 index 0000000..015012f --- /dev/null +++ b/normalization/body_normalization.py @@ -0,0 +1,241 @@ + +from typing import Tuple +import pandas as pd +from utils import get_logger + + +BODY_IDENTIFIERS = [ + "nose", + "neck", + "rightEye", + "leftEye", + "rightEar", + "leftEar", + "rightShoulder", + "leftShoulder", + "rightElbow", + "leftElbow", + "rightWrist", + "leftWrist" +] + + +def normalize_body_full(df: pd.DataFrame) -> Tuple[pd.DataFrame, list]: + """ + Normalizes the body position data using the Bohacek-normalization algorithm. + + :param df: pd.DataFrame to be normalized + :return: pd.DataFrame with normalized values for body pose + """ + logger = get_logger(__name__) + + # TODO: Fix division by zero + + normalized_df = pd.DataFrame(columns=df.columns) + invalid_row_indexes = [] + body_landmarks = {"X": [], "Y": []} + + # Construct the relevant identifiers + for identifier in BODY_IDENTIFIERS: + body_landmarks["X"].append(identifier + "_X") + body_landmarks["Y"].append(identifier + "_Y") + + # Iterate over all of the records in the dataset + for index, row in df.iterrows(): + + sequence_size = len(row["leftEar_Y"]) + valid_sequence = True + original_row = row + + last_starting_point, last_ending_point = None, None + + # Treat each element of the sequence (analyzed frame) individually + for sequence_index in range(sequence_size): + + # Prevent from even starting the analysis if some necessary elements are not present + if (row["leftShoulder_X"][sequence_index] == 0 or row["rightShoulder_X"][sequence_index] == 0) and \ + (row["neck_X"][sequence_index] == 0 or row["nose_X"][sequence_index] == 0): + if not last_starting_point: + valid_sequence = False + continue + + else: + starting_point, ending_point = last_starting_point, last_ending_point + + else: + + # NOTE: + # + # While in the paper, it is written that the head metric is calculated by halving the shoulder distance, + # this is meant for the distance between the very ends of one's shoulder, as literature studying body + # metrics and ratios generally states. The Vision Pose Estimation API, however, seems to be predicting + # rather the center of one's shoulder. Based on our experiments and manual reviews of the data, + # employing + # this as just the plain shoulder distance seems to be more corresponding to the desired metric. + # + # Please, review this if using other third-party pose estimation libraries. + + if row["leftShoulder_X"][sequence_index] != 0 and row["rightShoulder_X"][sequence_index] != 0: + left_shoulder = (row["leftShoulder_X"][sequence_index], row["leftShoulder_Y"][sequence_index]) + right_shoulder = (row["rightShoulder_X"][sequence_index], row["rightShoulder_Y"][sequence_index]) + shoulder_distance = ((((left_shoulder[0] - right_shoulder[0]) ** 2) + ( + (left_shoulder[1] - right_shoulder[1]) ** 2)) ** 0.5) + head_metric = shoulder_distance + else: + neck = (row["neck_X"][sequence_index], row["neck_Y"][sequence_index]) + nose = (row["nose_X"][sequence_index], row["nose_Y"][sequence_index]) + neck_nose_distance = ((((neck[0] - nose[0]) ** 2) + ((neck[1] - nose[1]) ** 2)) ** 0.5) + head_metric = neck_nose_distance + + # Set the starting and ending point of the normalization bounding box + starting_point = [row["neck_X"][sequence_index] - 3 * head_metric, + row["leftEye_Y"][sequence_index] + (head_metric / 2)] + ending_point = [row["neck_X"][sequence_index] + 3 * head_metric, starting_point[1] - 6 * head_metric] + + last_starting_point, last_ending_point = starting_point, ending_point + + # Ensure that all of the bounding-box-defining coordinates are not out of the picture + if starting_point[0] < 0: + starting_point[0] = 0 + if starting_point[1] < 0: + starting_point[1] = 0 + if ending_point[0] < 0: + ending_point[0] = 0 + if ending_point[1] < 0: + ending_point[1] = 0 + + # Normalize individual landmarks and save the results + for identifier in BODY_IDENTIFIERS: + key = identifier + "_" + + # Prevent from trying to normalize incorrectly captured points + if row[key + "X"][sequence_index] == 0: + continue + + normalized_x = (row[key + "X"][sequence_index] - starting_point[0]) / (ending_point[0] - + starting_point[0]) + normalized_y = (row[key + "Y"][sequence_index] - ending_point[1]) / (starting_point[1] - + ending_point[1]) + + row[key + "X"][sequence_index] = normalized_x + row[key + "Y"][sequence_index] = normalized_y + + if valid_sequence: + normalized_df = normalized_df.append(row, ignore_index=True) + else: + logger.warning(" BODY LANDMARKS: One video instance could not be normalized.") + normalized_df = normalized_df.append(original_row, ignore_index=True) + invalid_row_indexes.append(index) + + logger.info("The normalization of body is finished.") + logger.info("\t-> Original size:", df.shape[0]) + logger.info("\t-> Normalized size:", normalized_df.shape[0]) + logger.info("\t-> Problematic videos:", len(invalid_row_indexes)) + + return normalized_df, invalid_row_indexes + + +def normalize_single_dict(row: dict): + """ + Normalizes the skeletal data for a given sequence of frames with signer's body pose data. The normalization follows + the definition from our paper. + + :param row: Dictionary containing key-value pairs with joint identifiers and corresponding lists (sequences) of + that particular joints coordinates + :return: Dictionary with normalized skeletal data (following the same schema as input data) + """ + + sequence_size = len(row["leftEar"]) + valid_sequence = True + original_row = row + logger = get_logger(__name__) + + last_starting_point, last_ending_point = None, None + + # Treat each element of the sequence (analyzed frame) individually + for sequence_index in range(sequence_size): + left_shoulder = (row["leftShoulder"][sequence_index][0], row["leftShoulder"][sequence_index][1]) + right_shoulder = (row["rightShoulder"][sequence_index][0], row["rightShoulder"][sequence_index][1]) + neck = (row["neck"][sequence_index][0], row["neck"][sequence_index][1]) + nose = (row["nose"][sequence_index][0], row["nose"][sequence_index][1]) + # Prevent from even starting the analysis if some necessary elements are not present + if (left_shoulder[0] == 0 or right_shoulder[0] == 0 + or (left_shoulder[0] == right_shoulder[0] and left_shoulder[1] == right_shoulder[1])) and ( + neck[0] == 0 or nose[0] == 0 or (neck[0] == nose[0] and neck[1] == nose[1])): + if not last_starting_point: + valid_sequence = False + continue + + else: + starting_point, ending_point = last_starting_point, last_ending_point + + else: + + # NOTE: + # + # While in the paper, it is written that the head metric is calculated by halving the shoulder distance, + # this is meant for the distance between the very ends of one's shoulder, as literature studying body + # metrics and ratios generally states. The Vision Pose Estimation API, however, seems to be predicting + # rather the center of one's shoulder. Based on our experiments and manual reviews of the data, employing + # this as just the plain shoulder distance seems to be more corresponding to the desired metric. + # + # Please, review this if using other third-party pose estimation libraries. + + if left_shoulder[0] != 0 and right_shoulder[0] != 0 and \ + (left_shoulder[0] != right_shoulder[0] or left_shoulder[1] != right_shoulder[1]): + shoulder_distance = ((((left_shoulder[0] - right_shoulder[0]) ** 2) + ( + (left_shoulder[1] - right_shoulder[1]) ** 2)) ** 0.5) + head_metric = shoulder_distance + else: + neck_nose_distance = ((((neck[0] - nose[0]) ** 2) + ((neck[1] - nose[1]) ** 2)) ** 0.5) + head_metric = neck_nose_distance + + # Set the starting and ending point of the normalization bounding box + # starting_point = [row["neck"][sequence_index][0] - 3 * head_metric, + # row["leftEye"][sequence_index][1] + (head_metric / 2)] + starting_point = [row["neck"][sequence_index][0] - 3 * head_metric, + row["leftEye"][sequence_index][1] + head_metric] + ending_point = [row["neck"][sequence_index][0] + 3 * head_metric, starting_point[1] - 6 * head_metric] + + last_starting_point, last_ending_point = starting_point, ending_point + + # Ensure that all of the bounding-box-defining coordinates are not out of the picture + if starting_point[0] < 0: + starting_point[0] = 0 + if starting_point[1] < 0: + starting_point[1] = 0 + if ending_point[0] < 0: + ending_point[0] = 0 + if ending_point[1] < 0: + ending_point[1] = 0 + + # Normalize individual landmarks and save the results + for identifier in BODY_IDENTIFIERS: + key = identifier + + # Prevent from trying to normalize incorrectly captured points + if row[key][sequence_index][0] == 0: + continue + + if (ending_point[0] - starting_point[0]) == 0 or (starting_point[1] - ending_point[1]) == 0: + logger.warning("Problematic normalization") + valid_sequence = False + break + + normalized_x = (row[key][sequence_index][0] - starting_point[0]) / (ending_point[0] - starting_point[0]) + normalized_y = (row[key][sequence_index][1] - ending_point[1]) / (starting_point[1] - ending_point[1]) + + row[key][sequence_index] = list(row[key][sequence_index]) + + row[key][sequence_index][0] = normalized_x + row[key][sequence_index][1] = normalized_y + + if valid_sequence: + return row + + else: + return original_row + + +if __name__ == "__main__": + pass diff --git a/normalization/hand_normalization.py b/normalization/hand_normalization.py new file mode 100644 index 0000000..8343491 --- /dev/null +++ b/normalization/hand_normalization.py @@ -0,0 +1,195 @@ + +import pandas as pd +from utils import get_logger + + +HAND_IDENTIFIERS = [ + "wrist", + "indexTip", + "indexDIP", + "indexPIP", + "indexMCP", + "middleTip", + "middleDIP", + "middlePIP", + "middleMCP", + "ringTip", + "ringDIP", + "ringPIP", + "ringMCP", + "littleTip", + "littleDIP", + "littlePIP", + "littleMCP", + "thumbTip", + "thumbIP", + "thumbMP", + "thumbCMC" +] + + +def normalize_hands_full(df: pd.DataFrame) -> pd.DataFrame: + """ + Normalizes the hands position data using the Bohacek-normalization algorithm. + + :param df: pd.DataFrame to be normalized + :return: pd.DataFrame with normalized values for hand pose + """ + + logger = get_logger(__name__) + # TODO: Fix division by zero + df.columns = [item.replace("_left_", "_0_").replace("_right_", "_1_") for item in list(df.columns)] + + normalized_df = pd.DataFrame(columns=df.columns) + + hand_landmarks = {"X": {0: [], 1: []}, "Y": {0: [], 1: []}} + + # Determine how many hands are present in the dataset + range_hand_size = 1 + if "wrist_1_X" in df.columns: + range_hand_size = 2 + + # Construct the relevant identifiers + for identifier in HAND_IDENTIFIERS: + for hand_index in range(range_hand_size): + hand_landmarks["X"][hand_index].append(identifier + "_" + str(hand_index) + "_X") + hand_landmarks["Y"][hand_index].append(identifier + "_" + str(hand_index) + "_Y") + + # Iterate over all of the records in the dataset + for index, row in df.iterrows(): + # Treat each hand individually + for hand_index in range(range_hand_size): + + sequence_size = len(row["wrist_" + str(hand_index) + "_X"]) + + # Treat each element of the sequence (analyzed frame) individually + for sequence_index in range(sequence_size): + + # Retrieve all of the X and Y values of the current frame + landmarks_x_values = [row[key][sequence_index] + for key in hand_landmarks["X"][hand_index] if row[key][sequence_index] != 0] + landmarks_y_values = [row[key][sequence_index] + for key in hand_landmarks["Y"][hand_index] if row[key][sequence_index] != 0] + + # Prevent from even starting the analysis if some necessary elements are not present + if not landmarks_x_values or not landmarks_y_values: + logger.warning( + " HAND LANDMARKS: One frame could not be normalized as there is no data present. Record: " + + str(index) + + ", Frame: " + str(sequence_index)) + continue + + # Calculate the deltas + width, height = max(landmarks_x_values) - min(landmarks_x_values), max(landmarks_y_values) - min( + landmarks_y_values) + if width > height: + delta_x = 0.1 * width + delta_y = delta_x + ((width - height) / 2) + else: + delta_y = 0.1 * height + delta_x = delta_y + ((height - width) / 2) + + # Set the starting and ending point of the normalization bounding box + starting_point = (min(landmarks_x_values) - delta_x, min(landmarks_y_values) - delta_y) + ending_point = (max(landmarks_x_values) + delta_x, max(landmarks_y_values) + delta_y) + + # Normalize individual landmarks and save the results + for identifier in HAND_IDENTIFIERS: + key = identifier + "_" + str(hand_index) + "_" + + # Prevent from trying to normalize incorrectly captured points + if row[key + "X"][sequence_index] == 0 or (ending_point[0] - starting_point[0]) == 0 or \ + (starting_point[1] - ending_point[1]) == 0: + continue + + normalized_x = (row[key + "X"][sequence_index] - starting_point[0]) / (ending_point[0] - + starting_point[0]) + normalized_y = (row[key + "Y"][sequence_index] - ending_point[1]) / (starting_point[1] - + ending_point[1]) + + row[key + "X"][sequence_index] = normalized_x + row[key + "Y"][sequence_index] = normalized_y + + normalized_df = normalized_df.append(row, ignore_index=True) + + return normalized_df + + +def normalize_single_dict(row: dict): + """ + Normalizes the skeletal data for a given sequence of frames with signer's hand pose data. The normalization follows + the definition from our paper. + + :param row: Dictionary containing key-value pairs with joint identifiers and corresponding lists (sequences) of + that particular joints coordinates + :return: Dictionary with normalized skeletal data (following the same schema as input data) + """ + + hand_landmarks = {0: [], 1: []} + + # Determine how many hands are present in the dataset + range_hand_size = 1 + if "wrist_1" in row.keys(): + range_hand_size = 2 + + # Construct the relevant identifiers + for identifier in HAND_IDENTIFIERS: + for hand_index in range(range_hand_size): + hand_landmarks[hand_index].append(identifier + "_" + str(hand_index)) + + # Treat each hand individually + for hand_index in range(range_hand_size): + + sequence_size = len(row["wrist_" + str(hand_index)]) + + # Treat each element of the sequence (analyzed frame) individually + for sequence_index in range(sequence_size): + + # Retrieve all of the X and Y values of the current frame + landmarks_x_values = [row[key][sequence_index][0] for key in hand_landmarks[hand_index] if + row[key][sequence_index][0] != 0] + landmarks_y_values = [row[key][sequence_index][1] for key in hand_landmarks[hand_index] if + row[key][sequence_index][1] != 0] + + # Prevent from even starting the analysis if some necessary elements are not present + if not landmarks_x_values or not landmarks_y_values: + continue + + # Calculate the deltas + width, height = max(landmarks_x_values) - min(landmarks_x_values), max(landmarks_y_values) - min( + landmarks_y_values) + if width > height: + delta_x = 0.1 * width + delta_y = delta_x + ((width - height) / 2) + else: + delta_y = 0.1 * height + delta_x = delta_y + ((height - width) / 2) + + # Set the starting and ending point of the normalization bounding box + starting_point = (min(landmarks_x_values) - delta_x, min(landmarks_y_values) - delta_y) + ending_point = (max(landmarks_x_values) + delta_x, max(landmarks_y_values) + delta_y) + + # Normalize individual landmarks and save the results + for identifier in HAND_IDENTIFIERS: + key = identifier + "_" + str(hand_index) + + # Prevent from trying to normalize incorrectly captured points + if row[key][sequence_index][0] == 0 or (ending_point[0] - starting_point[0]) == 0 or ( + starting_point[1] - ending_point[1]) == 0: + continue + + normalized_x = (row[key][sequence_index][0] - starting_point[0]) / (ending_point[0] - + starting_point[0]) + normalized_y = (row[key][sequence_index][1] - starting_point[1]) / (ending_point[1] - + starting_point[1]) + + row[key][sequence_index] = list(row[key][sequence_index]) + + row[key][sequence_index][0] = normalized_x + row[key][sequence_index][1] = normalized_y + + return row + + +if __name__ == "__main__": + pass diff --git a/normalization/main.py b/normalization/main.py new file mode 100644 index 0000000..4c619a2 --- /dev/null +++ b/normalization/main.py @@ -0,0 +1,47 @@ +import os +import ast +import pandas as pd + +from normalization.hand_normalization import normalize_hands_full +from normalization.body_normalization import normalize_body_full + +DATASET_PATH = './data' +# Load the dataset +df = pd.read_csv(os.path.join(DATASET_PATH, "WLASL_test_15fps.csv"), encoding="utf-8") + +# Retrieve metadata +video_size_heights = df["video_size_height"].to_list() +video_size_widths = df["video_size_width"].to_list() + +# Delete redundant (non-related) properties +del df["video_size_height"] +del df["video_size_width"] + +# Temporarily remove other relevant metadata +labels = df["labels"].to_list() +video_fps = df["video_fps"].to_list() +del df["labels"] +del df["video_fps"] + +# Convert the strings into lists + + +def convert(x): return ast.literal_eval(str(x)) + + +for column in df.columns: + df[column] = df[column].apply(convert) + +# Perform the normalizations +df = normalize_hands_full(df) +df, invalid_row_indexes = normalize_body_full(df) + +# Clear lists of items from deleted rows +# labels = [t for i, t in enumerate(labels) if i not in invalid_row_indexes] +# video_fps = [t for i, t in enumerate(video_fps) if i not in invalid_row_indexes] + +# Return the metadata back to the dataset +df["labels"] = labels +df["video_fps"] = video_fps + +df.to_csv(os.path.join(DATASET_PATH, "WLASL_test_15fps_normalized.csv"), encoding="utf-8", index=False) diff --git a/notebooks/embeddings_evaluation.ipynb b/notebooks/embeddings_evaluation.ipynb new file mode 100644 index 0000000..1db8999 --- /dev/null +++ b/notebooks/embeddings_evaluation.ipynb @@ -0,0 +1,411 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "c20f7fd5", + "metadata": {}, + "outputs": [], + "source": [ + "%load_ext autoreload\n", + "%autoreload 2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ada032d0", + "metadata": {}, + "outputs": [], + "source": [ + "import sys\n", + "import os\n", + "import os.path as op\n", + "import pandas as pd\n", + "import json\n", + "import base64" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "05682e73", + "metadata": {}, + "outputs": [], + "source": [ + "sys.path.append(op.abspath('..'))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fede7684", + "metadata": {}, + "outputs": [], + "source": [ + "os.environ[\"CUBLAS_WORKSPACE_CONFIG\"] = \":16:8\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ce531994", + "metadata": {}, + "outputs": [], + "source": [ + "from collections import Counter\n", + "from itertools import chain\n", + "\n", + "import torch\n", + "import multiprocessing\n", + "from scipy.spatial import distance_matrix\n", + "import numpy as np" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f4a2d672", + "metadata": {}, + "outputs": [], + "source": [ + "from torch.utils.data import DataLoader\n", + "\n", + "from datasets import SLREmbeddingDataset, collate_fn_padd\n", + "from datasets.dataset_loader import LocalDatasetLoader\n", + "from models import embeddings_scatter_plot_splits\n", + "from models import SPOTER_EMBEDDINGS" + ] + }, + { + "cell_type": "markdown", + "id": "af8fbe32", + "metadata": {}, + "source": [ + "## Model and dataset loading" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1d9db764", + "metadata": {}, + "outputs": [], + "source": [ + "import random\n", + "seed = 43\n", + "random.seed(seed)\n", + "np.random.seed(seed)\n", + "os.environ[\"PYTHONHASHSEED\"] = str(seed)\n", + "torch.manual_seed(seed)\n", + "torch.cuda.manual_seed(seed)\n", + "torch.cuda.manual_seed_all(seed)\n", + "torch.backends.cudnn.deterministic = True\n", + "torch.use_deterministic_algorithms(True) \n", + "generator = torch.Generator()\n", + "generator.manual_seed(seed)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "71224139", + "metadata": {}, + "outputs": [], + "source": [ + "BASE_DATA_FOLDER = '../data/'\n", + "os.environ[\"BASE_DATA_FOLDER\"] = BASE_DATA_FOLDER\n", + "device = torch.device(\"cpu\")\n", + "if torch.cuda.is_available():\n", + " device = torch.device(\"cuda\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "013d3774", + "metadata": {}, + "outputs": [], + "source": [ + "# LOAD MODEL FROM CLEARML\n", + "# from clearml import InputModel\n", + "# model = InputModel(model_id='1b736da469b04e91b8451d2342aef6ce')\n", + "# checkpoint = torch.load(model.get_weights())\n", + "\n", + "## Set your path to checkoint here\n", + "CHECKPOINT_PATH = \"../checkpoints/checkpoint_embed_992.pth\"\n", + "checkpoint = torch.load(CHECKPOINT_PATH, map_location=device)\n", + "\n", + "model = SPOTER_EMBEDDINGS(\n", + " features=checkpoint[\"config_args\"].vector_length,\n", + " hidden_dim=checkpoint[\"config_args\"].hidden_dim,\n", + " norm_emb=checkpoint[\"config_args\"].normalize_embeddings,\n", + ").to(device)\n", + "\n", + "model.load_state_dict(checkpoint[\"state_dict\"])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ba6b58f0", + "metadata": {}, + "outputs": [], + "source": [ + "SL_DATASET = 'wlasl' # or 'lsa'\n", + "if SL_DATASET == 'wlasl':\n", + " dataset_name = \"wlasl_mapped_mediapipe_only_landmarks_25fps\"\n", + " num_classes = 100\n", + " split_dataset_path = \"WLASL100_{}_25fps.csv\"\n", + "else:\n", + " dataset_name = \"lsa64_mapped_mediapipe_only_landmarks_25fps\"\n", + " num_classes = 64\n", + " split_dataset_path = \"LSA64_{}.csv\"\n", + " \n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5643a72c", + "metadata": {}, + "outputs": [], + "source": [ + "def get_dataset_loader(loader_name=None):\n", + " if loader_name == 'CLEARML':\n", + " from datasets.clearml_dataset_loader import ClearMLDatasetLoader\n", + " return ClearMLDatasetLoader()\n", + " else:\n", + " return LocalDatasetLoader()\n", + "\n", + "dataset_loader = get_dataset_loader()\n", + "dataset_project = \"Sign Language Recognition\"\n", + "batch_size = 1\n", + "dataset_folder = dataset_loader.get_dataset_folder(dataset_project, dataset_name)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "04a62088", + "metadata": {}, + "outputs": [], + "source": [ + "def seed_worker(worker_id):\n", + " worker_seed = torch.initial_seed() % 2**32\n", + " np.random.seed(worker_seed)\n", + " random.seed(worker_seed)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "79c837c1", + "metadata": {}, + "outputs": [], + "source": [ + "dataloaders = {}\n", + "splits = ['train', 'val']\n", + "dfs = {}\n", + "for split in splits:\n", + " split_set_path = op.join(dataset_folder, split_dataset_path.format(split))\n", + " split_set = SLREmbeddingDataset(split_set_path, triplet=False, augmentations=False)\n", + " data_loader = DataLoader(\n", + " split_set,\n", + " batch_size=batch_size,\n", + " shuffle=False,\n", + " collate_fn=collate_fn_padd,\n", + " pin_memory=torch.cuda.is_available(),\n", + " num_workers=multiprocessing.cpu_count(),\n", + " worker_init_fn=seed_worker,\n", + " generator=generator,\n", + " )\n", + " dataloaders[split] = data_loader\n", + " dfs[split] = pd.read_csv(split_set_path)\n", + "\n", + "with open(op.join(dataset_folder, 'id_to_label.json')) as fid:\n", + " id_to_label = json.load(fid)\n", + "id_to_label = {int(key): value for key, value in id_to_label.items()}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8b5bda73", + "metadata": {}, + "outputs": [], + "source": [ + "labels_split = {}\n", + "embeddings_split = {}\n", + "splits = list(dataloaders.keys())\n", + "with torch.no_grad():\n", + " for split, dataloader in dataloaders.items():\n", + " labels_str = []\n", + " embeddings = []\n", + " k = 0\n", + " for i, (inputs, labels, masks) in enumerate(dataloader):\n", + " k += 1\n", + " inputs = inputs.to(device)\n", + " masks = masks.to(device)\n", + " outputs = model(inputs, masks)\n", + " for n in range(outputs.shape[0]):\n", + " embeddings.append(outputs[n, 0].cpu().detach().numpy())\n", + " embeddings_split[split] = embeddings" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0efa0871", + "metadata": {}, + "outputs": [], + "source": [ + "len(embeddings_split['train']), len(dfs['train'])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ab83c6e2", + "metadata": { + "lines_to_next_cell": 2 + }, + "outputs": [], + "source": [ + "for split in splits:\n", + " df = dfs[split]\n", + " df['embeddings'] = embeddings_split[split]" + ] + }, + { + "cell_type": "markdown", + "id": "2951638d", + "metadata": {}, + "source": [ + "## Compute metrics\n", + "Here computing top1 and top5 metrics either by using only a class centroid or by using the whole dataset to classify vectors.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7399b8ae", + "metadata": {}, + "outputs": [], + "source": [ + "for use_centroids, str_use_centroids in zip([True, False],\n", + " ['Using centroids only', 'Using all embeddings']):\n", + "\n", + " df_val = dfs['val']\n", + " df_train = dfs['train']\n", + " if use_centroids:\n", + " df_train = dfs['train'].groupby('labels')['embeddings'].apply(np.mean).reset_index()\n", + " x_train = np.vstack(df_train['embeddings'])\n", + " x_val = np.vstack(df_val['embeddings'])\n", + "\n", + " d_mat = distance_matrix(x_val, x_train, p=2)\n", + "\n", + " top5_embs = 0\n", + " top5_classes = 0\n", + " knn = 0\n", + " top1 = 0\n", + "\n", + " len_val_dataset = len(df_val)\n", + " good_samples = []\n", + "\n", + " for i in range(d_mat.shape[0]):\n", + " true_label = df_val.loc[i, 'labels']\n", + " labels = df_train['labels'].values\n", + " argsort = np.argsort(d_mat[i])\n", + " sorted_labels = labels[argsort]\n", + " if sorted_labels[0] == true_label:\n", + " top1 += 1\n", + " if use_centroids:\n", + " good_samples.append(df_val.loc[i, 'video_id'])\n", + " else:\n", + " good_samples.append((df_val.loc[i, 'video_id'],\n", + " df_train.loc[argsort[0], 'video_id'],\n", + " i,\n", + " argsort[0]))\n", + "\n", + "\n", + " if true_label == Counter(sorted_labels[:5]).most_common()[0][0]:\n", + " knn += 1\n", + " if true_label in sorted_labels[:5]:\n", + " top5_embs += 1\n", + " if true_label in list(dict.fromkeys(sorted_labels))[:5]:\n", + " top5_classes += 1\n", + " else:\n", + " continue\n", + "\n", + "\n", + " print(str_use_centroids)\n", + "\n", + "\n", + " print(f'Top-1 accuracy: {100 * top1 / len_val_dataset : 0.2f} %')\n", + " if not use_centroids:\n", + " print(f'5-nn accuracy: {100 * knn / len_val_dataset : 0.2f} % (Picks the class that appears most often in the 5 closest embeddings)')\n", + " print(f'Top-5 embeddings class match: {100 * top5_embs / len_val_dataset: 0.2f} % (Picks any class in the 5 closest embeddings)')\n", + " if not use_centroids:\n", + " print(f'Top-5 unique class match: {100 * top5_classes / len_val_dataset: 0.2f} % (Picks the 5 closest distinct classes)')\n", + " print('\\n' + '#'*32 + '\\n')" + ] + }, + { + "cell_type": "markdown", + "id": "d2aaac6c", + "metadata": {}, + "source": [ + "## Show some examples (only for WLASL)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b9d1d309", + "metadata": {}, + "outputs": [], + "source": [ + "from IPython.display import Video" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fd2a0cd8", + "metadata": {}, + "outputs": [], + "source": [ + "for row in df_train[df_train.label_name == 'thursday'][:3].itertuples():\n", + " display(Video(op.join(BASE_DATA_FOLDER, f'wlasl/videos/{row.video_id}.mp4'), embed=True))" + ] + } + ], + "metadata": { + "jupytext": { + "cell_metadata_filter": "-all", + "main_language": "python", + "notebook_metadata_filter": "-all" + }, + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.13" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/visualize_embeddings.ipynb b/notebooks/visualize_embeddings.ipynb new file mode 100644 index 0000000..c49a9fa --- /dev/null +++ b/notebooks/visualize_embeddings.ipynb @@ -0,0 +1,491 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "8ef5cd92", + "metadata": {}, + "outputs": [], + "source": [ + "%load_ext autoreload\n", + "%autoreload 2" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "78c4643a", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import sys\n", + "import os.path as op\n", + "import pandas as pd\n", + "import json\n", + "import base64" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "ffba4333", + "metadata": {}, + "outputs": [], + "source": [ + "sys.path.append(op.abspath('..'))" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "5bc81f71", + "metadata": {}, + "outputs": [], + "source": [ + "os.environ[\"CUBLAS_WORKSPACE_CONFIG\"] = \":16:8\"" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "3de8bcf2", + "metadata": { + "lines_to_next_cell": 0 + }, + "outputs": [], + "source": [ + "import torch\n", + "import multiprocessing\n", + "from itertools import chain\n", + "import numpy as np\n", + "import random" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "91a045ba", + "metadata": {}, + "outputs": [], + "source": [ + "from bokeh.io import output_notebook, output_file\n", + "from bokeh.plotting import figure, show\n", + "from bokeh.models import LinearColorMapper, ColumnDataSource\n", + "from bokeh.transform import factor_cmap, factor_mark\n", + "from torch.utils.data import DataLoader\n", + "\n", + "\n", + "from datasets import SLREmbeddingDataset, collate_fn_padd\n", + "from datasets.dataset_loader import LocalDatasetLoader\n", + "from models import embeddings_scatter_plot_splits\n", + "from models import SPOTER_EMBEDDINGS" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "bc50c296", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "seed = 43\n", + "random.seed(seed)\n", + "np.random.seed(seed)\n", + "os.environ[\"PYTHONHASHSEED\"] = str(seed)\n", + "torch.manual_seed(seed)\n", + "torch.cuda.manual_seed(seed)\n", + "torch.cuda.manual_seed_all(seed)\n", + "torch.backends.cudnn.deterministic = True\n", + "torch.use_deterministic_algorithms(True) \n", + "generator = torch.Generator()\n", + "generator.manual_seed(seed)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "82766a17", + "metadata": {}, + "outputs": [], + "source": [ + "BASE_DATA_FOLDER = '../data/'\n", + "os.environ[\"BASE_DATA_FOLDER\"] = BASE_DATA_FOLDER\n", + "device = torch.device(\"cpu\")\n", + "if torch.cuda.is_available():\n", + " device = torch.device(\"cuda\")" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "ead15a36", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# LOAD MODEL FROM CLEARML\n", + "# from clearml import InputModel\n", + "# model = InputModel(model_id='1b736da469b04e91b8451d2342aef6ce')\n", + "# checkpoint = torch.load(model.get_weights())\n", + "\n", + "\n", + "CHECKPOINT_PATH = \"../checkpoints/checkpoint_embed_992.pth\"\n", + "checkpoint = torch.load(CHECKPOINT_PATH, map_location=device)\n", + "\n", + "\n", + "model = SPOTER_EMBEDDINGS(\n", + " features=checkpoint[\"config_args\"].vector_length,\n", + " hidden_dim=checkpoint[\"config_args\"].hidden_dim,\n", + " norm_emb=checkpoint[\"config_args\"].normalize_embeddings,\n", + ").to(device)\n", + "\n", + "model.load_state_dict(checkpoint[\"state_dict\"])" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "20f8036d", + "metadata": {}, + "outputs": [], + "source": [ + "SL_DATASET = 'wlasl' # or 'lsa'\n", + "if SL_DATASET == 'wlasl':\n", + " dataset_name = \"wlasl_mapped_mediapipe_only_landmarks_25fps\"\n", + " num_classes = 100\n", + " split_dataset_path = \"WLASL100_{}_25fps.csv\"\n", + "else:\n", + " dataset_name = \"lsa64_mapped_mediapipe_only_landmarks_25fps\"\n", + " num_classes = 64\n", + " split_dataset_path = \"LSA64_{}.csv\"\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "758716b6", + "metadata": {}, + "outputs": [], + "source": [ + "def get_dataset_loader(loader_name=None):\n", + " if loader_name == 'CLEARML':\n", + " from datasets.clearml_dataset_loader import ClearMLDatasetLoader\n", + " return ClearMLDatasetLoader()\n", + " else:\n", + " return LocalDatasetLoader()\n", + "\n", + "dataset_loader = get_dataset_loader()\n", + "dataset_project = \"Sign Language Recognition\"\n", + "batch_size = 1\n", + "dataset_folder = dataset_loader.get_dataset_folder(dataset_project, dataset_name)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "f1527959", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/opt/conda/lib/python3.7/site-packages/sklearn/manifold/_t_sne.py:783: FutureWarning: The default initialization in TSNE will change from 'random' to 'pca' in 1.2.\n", + " FutureWarning,\n", + "/opt/conda/lib/python3.7/site-packages/sklearn/manifold/_t_sne.py:793: FutureWarning: The default learning rate in TSNE will change from 200.0 to 'auto' in 1.2.\n", + " FutureWarning,\n" + ] + } + ], + "source": [ + "dataloaders = {}\n", + "splits = ['train', 'val']\n", + "dfs = {}\n", + "for split in splits:\n", + " split_set_path = op.join(dataset_folder, split_dataset_path.format(split))\n", + " split_set = SLREmbeddingDataset(split_set_path, triplet=False)\n", + " data_loader = DataLoader(\n", + " split_set,\n", + " batch_size=batch_size,\n", + " shuffle=False,\n", + " collate_fn=collate_fn_padd,\n", + " pin_memory=torch.cuda.is_available(),\n", + " num_workers=multiprocessing.cpu_count()\n", + " )\n", + " dataloaders[split] = data_loader\n", + " dfs[split] = pd.read_csv(split_set_path)\n", + "\n", + "with open(op.join(dataset_folder, 'id_to_label.json')) as fid:\n", + " id_to_label = json.load(fid)\n", + "id_to_label = {int(key): value for key, value in id_to_label.items()}\n", + "\n", + "tsne_results, labels_results = embeddings_scatter_plot_splits(model,\n", + " dataloaders,\n", + " device,\n", + " id_to_label,\n", + " perplexity=40,\n", + " n_iter=1000)\n", + "\n", + "\n", + "set_labels = list(set(next(chain(labels_results.values()))))" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "3c3af5bf", + "metadata": { + "lines_to_next_cell": 0 + }, + "outputs": [ + { + "data": { + "text/plain": [ + "1533" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dfs = {}\n", + "for split in splits:\n", + " split_set_path = op.join(dataset_folder, split_dataset_path.format(split))\n", + " df = pd.read_csv(split_set_path)\n", + " df['tsne_x'] = tsne_results[split][:, 0]\n", + " df['tsne_y'] = tsne_results[split][:, 1]\n", + " df['split'] = split\n", + " if SL_DATASET == 'wlasl':\n", + " df['video_fn'] = df['video_id'].apply(lambda video_id: os.path.join(BASE_DATA_FOLDER, f'wlasl/videos/{video_id:05d}.mp4'))\n", + " else:\n", + " df['video_fn'] = df['video_id'].apply(lambda video_id: os.path.join(BASE_DATA_FOLDER, f'lsa/videos/{video_id}.mp4'))\n", + " dfs[split] = df\n", + "\n", + "df = pd.concat([dfs['train'].sample(100), dfs['val']]).reset_index(drop=True)\n", + "len(df)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "dccbe1b9", + "metadata": {}, + "outputs": [], + "source": [ + "from tqdm.auto import tqdm\n", + "\n", + "def load_videos(video_list):\n", + " print('loading videos')\n", + " videos = []\n", + " for video_fn in tqdm(video_list):\n", + " if video_fn is None:\n", + " video_data = None\n", + " else:\n", + " with open(video_fn, 'rb') as fid:\n", + " video_data = base64.b64encode(fid.read()).decode()\n", + " videos.append(video_data)\n", + " print('Done loading videos')\n", + " return videos" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "904298f0", + "metadata": {}, + "outputs": [], + "source": [ + "use_img_div = False\n", + "if use_img_div:\n", + " # sample dataframe data to avoid overloading scatter plot with too many videos\n", + " df = df.loc[(df['tsne_x'] > 10) & (df['tsne_x'] < 20)]\n", + " df = df.loc[(df['tsne_y'] > 10) & (df['tsne_y'] < 20)]" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "42832f7c", + "metadata": { + "scrolled": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "

\n", + " \n", + " Loading BokehJS ...\n", + "
\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/javascript": "(function(root) {\n function now() {\n return new Date();\n }\n\n const force = true;\n\n if (typeof root._bokeh_onload_callbacks === \"undefined\" || force === true) {\n root._bokeh_onload_callbacks = [];\n root._bokeh_is_loading = undefined;\n }\n\nconst JS_MIME_TYPE = 'application/javascript';\n const HTML_MIME_TYPE = 'text/html';\n const EXEC_MIME_TYPE = 'application/vnd.bokehjs_exec.v0+json';\n const CLASS_NAME = 'output_bokeh rendered_html';\n\n /**\n * Render data to the DOM node\n */\n function render(props, node) {\n const script = document.createElement(\"script\");\n node.appendChild(script);\n }\n\n /**\n * Handle when an output is cleared or removed\n */\n function handleClearOutput(event, handle) {\n const cell = handle.cell;\n\n const id = cell.output_area._bokeh_element_id;\n const server_id = cell.output_area._bokeh_server_id;\n // Clean up Bokeh references\n if (id != null && id in Bokeh.index) {\n Bokeh.index[id].model.document.clear();\n delete Bokeh.index[id];\n }\n\n if (server_id !== undefined) {\n // Clean up Bokeh references\n const cmd_clean = \"from bokeh.io.state import curstate; print(curstate().uuid_to_server['\" + server_id + \"'].get_sessions()[0].document.roots[0]._id)\";\n cell.notebook.kernel.execute(cmd_clean, {\n iopub: {\n output: function(msg) {\n const id = msg.content.text.trim();\n if (id in Bokeh.index) {\n Bokeh.index[id].model.document.clear();\n delete Bokeh.index[id];\n }\n }\n }\n });\n // Destroy server and session\n const cmd_destroy = \"import bokeh.io.notebook as ion; ion.destroy_server('\" + server_id + \"')\";\n cell.notebook.kernel.execute(cmd_destroy);\n }\n }\n\n /**\n * Handle when a new output is added\n */\n function handleAddOutput(event, handle) {\n const output_area = handle.output_area;\n const output = handle.output;\n\n // limit handleAddOutput to display_data with EXEC_MIME_TYPE content only\n if ((output.output_type != \"display_data\") || (!Object.prototype.hasOwnProperty.call(output.data, EXEC_MIME_TYPE))) {\n return\n }\n\n const toinsert = output_area.element.find(\".\" + CLASS_NAME.split(' ')[0]);\n\n if (output.metadata[EXEC_MIME_TYPE][\"id\"] !== undefined) {\n toinsert[toinsert.length - 1].firstChild.textContent = output.data[JS_MIME_TYPE];\n // store reference to embed id on output_area\n output_area._bokeh_element_id = output.metadata[EXEC_MIME_TYPE][\"id\"];\n }\n if (output.metadata[EXEC_MIME_TYPE][\"server_id\"] !== undefined) {\n const bk_div = document.createElement(\"div\");\n bk_div.innerHTML = output.data[HTML_MIME_TYPE];\n const script_attrs = bk_div.children[0].attributes;\n for (let i = 0; i < script_attrs.length; i++) {\n toinsert[toinsert.length - 1].firstChild.setAttribute(script_attrs[i].name, script_attrs[i].value);\n toinsert[toinsert.length - 1].firstChild.textContent = bk_div.children[0].textContent\n }\n // store reference to server id on output_area\n output_area._bokeh_server_id = output.metadata[EXEC_MIME_TYPE][\"server_id\"];\n }\n }\n\n function register_renderer(events, OutputArea) {\n\n function append_mime(data, metadata, element) {\n // create a DOM node to render to\n const toinsert = this.create_output_subarea(\n metadata,\n CLASS_NAME,\n EXEC_MIME_TYPE\n );\n this.keyboard_manager.register_events(toinsert);\n // Render to node\n const props = {data: data, metadata: metadata[EXEC_MIME_TYPE]};\n render(props, toinsert[toinsert.length - 1]);\n element.append(toinsert);\n return toinsert\n }\n\n /* Handle when an output is cleared or removed */\n events.on('clear_output.CodeCell', handleClearOutput);\n events.on('delete.Cell', handleClearOutput);\n\n /* Handle when a new output is added */\n events.on('output_added.OutputArea', handleAddOutput);\n\n /**\n * Register the mime type and append_mime function with output_area\n */\n OutputArea.prototype.register_mime_type(EXEC_MIME_TYPE, append_mime, {\n /* Is output safe? */\n safe: true,\n /* Index of renderer in `output_area.display_order` */\n index: 0\n });\n }\n\n // register the mime type if in Jupyter Notebook environment and previously unregistered\n if (root.Jupyter !== undefined) {\n const events = require('base/js/events');\n const OutputArea = require('notebook/js/outputarea').OutputArea;\n\n if (OutputArea.prototype.mime_types().indexOf(EXEC_MIME_TYPE) == -1) {\n register_renderer(events, OutputArea);\n }\n }\n if (typeof (root._bokeh_timeout) === \"undefined\" || force === true) {\n root._bokeh_timeout = Date.now() + 5000;\n root._bokeh_failed_load = false;\n }\n\n const NB_LOAD_WARNING = {'data': {'text/html':\n \"
\\n\"+\n \"

\\n\"+\n \"BokehJS does not appear to have successfully loaded. If loading BokehJS from CDN, this \\n\"+\n \"may be due to a slow or bad network connection. Possible fixes:\\n\"+\n \"

\\n\"+\n \"
    \\n\"+\n \"
  • re-rerun `output_notebook()` to attempt to load from CDN again, or
  • \\n\"+\n \"
  • use INLINE resources instead, as so:
  • \\n\"+\n \"
\\n\"+\n \"\\n\"+\n \"from bokeh.resources import INLINE\\n\"+\n \"output_notebook(resources=INLINE)\\n\"+\n \"\\n\"+\n \"
\"}};\n\n function display_loaded() {\n const el = document.getElementById(\"1107\");\n if (el != null) {\n el.textContent = \"BokehJS is loading...\";\n }\n if (root.Bokeh !== undefined) {\n if (el != null) {\n el.textContent = \"BokehJS \" + root.Bokeh.version + \" successfully loaded.\";\n }\n } else if (Date.now() < root._bokeh_timeout) {\n setTimeout(display_loaded, 100)\n }\n }\n\n function run_callbacks() {\n try {\n root._bokeh_onload_callbacks.forEach(function(callback) {\n if (callback != null)\n callback();\n });\n } finally {\n delete root._bokeh_onload_callbacks\n }\n console.debug(\"Bokeh: all callbacks have finished\");\n }\n\n function load_libs(css_urls, js_urls, callback) {\n if (css_urls == null) css_urls = [];\n if (js_urls == null) js_urls = [];\n\n root._bokeh_onload_callbacks.push(callback);\n if (root._bokeh_is_loading > 0) {\n console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n return null;\n }\n if (js_urls == null || js_urls.length === 0) {\n run_callbacks();\n return null;\n }\n console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n root._bokeh_is_loading = css_urls.length + js_urls.length;\n\n function on_load() {\n root._bokeh_is_loading--;\n if (root._bokeh_is_loading === 0) {\n console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n run_callbacks()\n }\n }\n\n function on_error(url) {\n console.error(\"failed to load \" + url);\n }\n\n for (let i = 0; i < css_urls.length; i++) {\n const url = css_urls[i];\n const element = document.createElement(\"link\");\n element.onload = on_load;\n element.onerror = on_error.bind(null, url);\n element.rel = \"stylesheet\";\n element.type = \"text/css\";\n element.href = url;\n console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n document.body.appendChild(element);\n }\n\n for (let i = 0; i < js_urls.length; i++) {\n const url = js_urls[i];\n const element = document.createElement('script');\n element.onload = on_load;\n element.onerror = on_error.bind(null, url);\n element.async = false;\n element.src = url;\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n document.head.appendChild(element);\n }\n };\n\n function inject_raw_css(css) {\n const element = document.createElement(\"style\");\n element.appendChild(document.createTextNode(css));\n document.body.appendChild(element);\n }\n\n const js_urls = [\"https://cdn.bokeh.org/bokeh/release/bokeh-2.4.3.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-gl-2.4.3.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-widgets-2.4.3.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-tables-2.4.3.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-mathjax-2.4.3.min.js\"];\n const css_urls = [];\n\n const inline_js = [ function(Bokeh) {\n Bokeh.set_log_level(\"info\");\n },\nfunction(Bokeh) {\n }\n ];\n\n function run_inline_js() {\n if (root.Bokeh !== undefined || force === true) {\n for (let i = 0; i < inline_js.length; i++) {\n inline_js[i].call(root, root.Bokeh);\n }\nif (force === true) {\n display_loaded();\n }} else if (Date.now() < root._bokeh_timeout) {\n setTimeout(run_inline_js, 100);\n } else if (!root._bokeh_failed_load) {\n console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n root._bokeh_failed_load = true;\n } else if (force !== true) {\n const cell = $(document.getElementById(\"1107\")).parents('.cell').data().cell;\n cell.output_area.append_execute_result(NB_LOAD_WARNING)\n }\n }\n\n if (root._bokeh_is_loading === 0) {\n console.debug(\"Bokeh: BokehJS loaded, going straight to plotting\");\n run_inline_js();\n } else {\n load_libs(css_urls, js_urls, function() {\n console.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n run_inline_js();\n });\n }\n}(window));", + "application/vnd.bokehjs_load.v0+json": "" + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "img_div = '''\n", + "
\n", + " \n", + "
\n", + "'''\n", + "TOOLTIPS = f\"\"\"\n", + "
\n", + " {img_div if use_img_div else ''}\n", + "
\n", + " @label_desc - @split\n", + " [#@video_id]\n", + "
\n", + "
\n", + " \n", + "\"\"\"\n", + "cmap = LinearColorMapper(palette=\"Turbo256\", low=0, high=len(set_labels))\n", + "\n", + "output_notebook()\n", + "# or \n", + "# output_file(\"scatter_plot.html\")\n", + "\n", + "p = figure(width=1000,\n", + " height=800,\n", + " tooltips=TOOLTIPS,\n", + " title=f\"Check {'video' if use_img_div else 'label'} by hovering mouse over the dots\")" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "ead4daf7", + "metadata": { + "scrolled": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "
\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/javascript": "(function(root) {\n function embed_document(root) {\n const docs_json = {\"458dc79d-472d-4f36-92df-cc9c2d7d3fb7\":{\"defs\":[],\"roots\":{\"references\":[{\"attributes\":{\"below\":[{\"id\":\"1119\"}],\"center\":[{\"id\":\"1122\"},{\"id\":\"1126\"}],\"height\":800,\"left\":[{\"id\":\"1123\"}],\"renderers\":[{\"id\":\"1148\"}],\"title\":{\"id\":\"1109\"},\"toolbar\":{\"id\":\"1135\"},\"width\":1000,\"x_range\":{\"id\":\"1111\"},\"x_scale\":{\"id\":\"1115\"},\"y_range\":{\"id\":\"1113\"},\"y_scale\":{\"id\":\"1117\"}},\"id\":\"1108\",\"subtype\":\"Figure\",\"type\":\"Plot\"},{\"attributes\":{},\"id\":\"1128\",\"type\":\"WheelZoomTool\"},{\"attributes\":{},\"id\":\"1165\",\"type\":\"BasicTickFormatter\"},{\"attributes\":{},\"id\":\"1163\",\"type\":\"AllLabels\"},{\"attributes\":{},\"id\":\"1124\",\"type\":\"BasicTicker\"},{\"attributes\":{\"data\":{\"label\":[53,51,57,89,19,27,51,42,18,18,60,61,94,35,72,51,80,1,40,50,10,38,87,62,19,36,49,32,75,72,98,7,38,59,14,59,3,94,68,67,2,30,63,3,8,28,82,80,35,46,19,57,82,9,51,69,69,38,5,3,42,26,89,1,92,85,33,92,53,34,42,52,51,18,49,50,22,89,7,24,94,73,31,61,63,23,88,73,76,80,0,98,11,13,82,47,65,73,66,57,71,52,87,62,3,19,89,82,68,34,95,2,34,93,22,93,5,58,92,86,31,52,10,42,36,13,42,1,35,23,89,16,7,9,59,13,45,67,48,74,53,26,11,36,92,63,87,48,49,16,73,7,87,94,93,97,61,44,97,6,80,60,13,59,47,53,5,18,10,13,70,19,35,55,30,81,99,7,86,40,94,12,15,46,0,88,31,62,13,20,65,19,34,7,81,84,48,67,38,78,91,41,2,75,49,43,26,44,18,94,34,26,42,87,93,41,7,76,56,1,19,67,40,16,60,10,81,85,32,39,64,25,62,14,28,39,99,88,73,53,71,14,69,67,3,73,53,83,2,45,85,58,16,32,35,37,16,62,3,4,76,15,17,48,37,99,64,47,69,95,69,5,72,44,24,49,66,61,8,38,49,33,40,53,24,92,21,40,47,50,97,34,57,26,80,72,19,31,5,10,98,81,88,25,11,11,40,82,79,81,25,70,37,73,12,72,15,12,99,48,71,81,3,4,50,32,3,91,50,97,74,9,45,46,41,67,63,37,57,50,8,66,83,13,97,34,54,83,2,48,18,51,5,60,42,5,97,27,20,27,70,37,37,36,51,88,11,90,10,39,85,44,18,41,98,3,29,72,49,96,10,21,39,91,3,15,22,21,79,32,52,92,45,48,29,50,38,70,26,3,67,17,23,51,32,45,66,61,98,17,49,69,68,43,44,5,82,20,84,8,27,80,46,22,70,44,33,50,77,31,32,34,64,39,4,55,16,36,76,59,45,25,70,95,21,66,76,1,35,9,85,57,99,69,50,5,90,90,20,78,96,12,43,70,4,78,93,35,17,35,55,52,99,23,3,10,36,75,52,91,76,35,86,74,78,37,5,28,17,69,10,2,47,95,77,70,36,80,93,43,91,14,22,13,90,71,0,27,55,65,54,40,9,26,0,77,75,23,30,11,22,55,13,33,59,13,56,66,14,68,16,9,78,75,61,85,82,52,66,12,51,38,15,51,8,0,22,83,57,41,48,99,28,11,91,55,12,15,84,9,22,24,41,21,73,64,27,88,1,60,22,64,52,67,33,35,92,17,63,4,2,82,65,56,17,40,52,67,40,77,54,88,29,56,25,42,23,38,7,82,64,44,42,18,20,19,14,29,71,33,69,46,41,59,94,33,49,76,11,42,64,96,66,32,1,12,79,75,15,6,35,2,4,24,27,44,93,23,22,62,50,72,45,78,55,87,58,84,63,6,96,1,9,41,32,65,24,30,73,71,65,87,30,78,8,54,83,77,20,29,68,32,71,27,98,0,80,24,23,34,7,10,9,6,58,14,7,54,8,72,17,0,69,54,85,75,6,85,47,87,72,71,47,68,44,6,56,16,31,33,96,77,86,43,12,49,39,92,26,24,21,5,59,58,4,93,90,86,30,8,40,59,55,79,18,21,70,15,60,86,63,45,29,43,84,89,64,15,45,55,2,47,41,23,48,53,66,4,85,65,15,39,65,84,12,12,42,66,19,40,30,35,53,65,23,1,48,86,90,30,57,43,66,68,5,68,9,5,11,21,92,95,31,95,41,90,79,34,98,43,46,36,5,52,2,90,74,36,29,17,63,99,58,22,56,57,20,23,16,81,37,1,88,94,85,71,14,60,88,18,38,14,21,78,17,94,46,32,69,76,71,47,36,90,37,25,54,98,35,74,17,27,1,18,8,74,3,42,77,23,96,47,58,20,74,5,44,54,34,82,54,31,33,43,2,63,68,83,61,9,0,57,33,54,14,27,79,56,26,45,28,81,55,68,75,85,27,6,12,25,10,88,20,22,62,71,76,96,77,46,88,57,8,27,90,17,23,79,8,48,66,93,24,2,36,77,48,13,75,90,25,2,9,84,31,20,43,2,36,83,12,25,60,96,14,52,43,51,24,49,20,49,27,76,46,25,6,75,9,65,91,12,89,96,10,86,71,56,33,39,74,46,84,74,1,7,41,56,93,95,3,64,41,64,74,20,40,62,79,16,68,86,8,96,28,91,18,74,24,46,72,53,47,46,64,45,94,11,17,70,4,99,26,23,24,34,99,95,29,97,85,97,92,87,99,82,96,31,47,80,57,34,22,10,25,29,0,46,30,26,91,78,20,29,21,57,62,19,24,38,98,2,67,63,19,32,44,60,24,75,39,1,59,77,83,39,2,28,83,13,84,79,67,97,14,50,93,49,31,84,79,29,84,48,59,68,56,74,10,50,25,30,19,83,31,58,89,64,3,39,37,58,83,29,95,53,26,16,1,25,97,81,68,92,21,42,72,7,13,8,9,91,1,2,79,4,89,23,58,5,11,15,38,28,81,62,5,32,63,53,50,3,7,61,67,14,98,54,1,1,14,18,85,99,69,46,43,20,63,97,37,85,59,58,94,47,22,97,23,9,5,13,87,72,11,22,52,0,13,11,22,0,70,1,39,9,38,29,96,7,14,51,71,74,22,17,48,41,90,61,97,5,36,42,83,45,91,32,68,76,84,78,66,49,67,63,29,5,9,19,50,10,79,51,86,80,2,76,88,7,89,15,4,46,47,41,57,39,16,81,63,17,82,71,27,3,49,64,50,95,61,87,1,21,73,74,24,48,25,44,19,8,20,68,19,6,28,59,98,46,32,53,9,30,76,90,75,73,79,77,38,36,58,83,1,16,46,14,21,35,10,7,96,73,18,70,42,93,24,61,72,64,13,15,58,76,31,65,59,82,90,38,32,42,99,57,33,37,10,69,95,10,41,26,45,33,80,21,51,41,68,22,47,26,88,71,17,24,99,74,24,14,91,47,13,52,36,34,18,36,60,55,27,98,12,57,18,65,60,64,16,79,99,98,40,12,44,1,81,93,48,77,23,62,72,8,35,54,5,94,59,75,28,67,30,67,23,97,12,15,92,84,17,3,60,86,37,27,2,49,49,2,12,13,20,56,74,2,25,14,52,55,84,75,23,91,95,29,16,33,44,92,93,54,29,24,6,4,3,26,45,0,43,44,40,82,9,20,21,11,69,82,5,87,86,70,4,56,34,19,92,48,53,56,34,30,57,75,28,77,88,3,55,66,8,89,53,7,27,25,40,93,40,50,68,25,27,78,35,43,67,66,2,2,54,85,71,37,42,66,63,32,38,34,65,62,45,26,51,39,23,17,81,72,10,83,62,31,53,85,52,25,8,31,80,1,90,89,96,64,11,5,50,31,54,94,78,55,68,34,30,3,35],\"label_desc\":[\"bird\",\"accident\",\"cow\",\"medicine\",\"black\",\"orange\",\"accident\",\"kiss\",\"all\",\"all\",\"doctor\",\"eat\",\"purple\",\"can\",\"short\",\"accident\",\"but\",\"drink\",\"hat\",\"wrong\",\"deaf\",\"fish\",\"jacket\",\"enjoy\",\"black\",\"dog\",\"white\",\"bed\",\"work\",\"short\",\"tell\",\"who\",\"fish\",\"dark\",\"thin\",\"dark\",\"before\",\"purple\",\"pizza\",\"pink\",\"computer\",\"what\",\"forget\",\"before\",\"candy\",\"table\",\"city\",\"but\",\"can\",\"shirt\",\"black\",\"cow\",\"city\",\"cousin\",\"accident\",\"play\",\"play\",\"fish\",\"go\",\"before\",\"kiss\",\"now\",\"medicine\",\"drink\",\"paper\",\"full\",\"blue\",\"paper\",\"bird\",\"bowling\",\"kiss\",\"apple\",\"accident\",\"all\",\"white\",\"wrong\",\"hot\",\"medicine\",\"who\",\"many\",\"purple\",\"time\",\"woman\",\"eat\",\"forget\",\"like\",\"letter\",\"time\",\"africa\",\"but\",\"book\",\"tell\",\"fine\",\"no\",\"city\",\"study\",\"last\",\"time\",\"meet\",\"cow\",\"secretary\",\"apple\",\"jacket\",\"enjoy\",\"before\",\"black\",\"medicine\",\"city\",\"pizza\",\"bowling\",\"right\",\"computer\",\"bowling\",\"pull\",\"hot\",\"pull\",\"go\",\"dance\",\"paper\",\"how\",\"woman\",\"apple\",\"deaf\",\"kiss\",\"dog\",\"no\",\"kiss\",\"drink\",\"can\",\"like\",\"medicine\",\"year\",\"who\",\"cousin\",\"dark\",\"no\",\"man\",\"pink\",\"tall\",\"want\",\"bird\",\"now\",\"fine\",\"dog\",\"paper\",\"forget\",\"jacket\",\"tall\",\"white\",\"year\",\"time\",\"who\",\"jacket\",\"purple\",\"pull\",\"son\",\"eat\",\"later\",\"son\",\"clothes\",\"but\",\"doctor\",\"no\",\"dark\",\"study\",\"bird\",\"go\",\"all\",\"deaf\",\"no\",\"school\",\"black\",\"can\",\"color\",\"what\",\"cheat\",\"thursday\",\"who\",\"how\",\"hat\",\"purple\",\"help\",\"walk\",\"shirt\",\"book\",\"letter\",\"woman\",\"enjoy\",\"no\",\"cool\",\"last\",\"black\",\"bowling\",\"who\",\"cheat\",\"decide\",\"tall\",\"pink\",\"fish\",\"birthday\",\"paint\",\"hearing\",\"computer\",\"work\",\"white\",\"language\",\"now\",\"later\",\"all\",\"purple\",\"bowling\",\"now\",\"kiss\",\"jacket\",\"pull\",\"hearing\",\"who\",\"africa\",\"corn\",\"drink\",\"black\",\"pink\",\"hat\",\"year\",\"doctor\",\"deaf\",\"cheat\",\"full\",\"bed\",\"graduate\",\"give\",\"mother\",\"enjoy\",\"thin\",\"table\",\"graduate\",\"thursday\",\"letter\",\"time\",\"bird\",\"secretary\",\"thin\",\"play\",\"pink\",\"before\",\"time\",\"bird\",\"cook\",\"computer\",\"man\",\"full\",\"dance\",\"year\",\"bed\",\"can\",\"family\",\"year\",\"enjoy\",\"before\",\"chair\",\"africa\",\"walk\",\"yes\",\"tall\",\"family\",\"thursday\",\"give\",\"study\",\"play\",\"right\",\"play\",\"go\",\"short\",\"later\",\"many\",\"white\",\"meet\",\"eat\",\"candy\",\"fish\",\"white\",\"blue\",\"hat\",\"bird\",\"many\",\"paper\",\"finish\",\"hat\",\"study\",\"wrong\",\"son\",\"bowling\",\"cow\",\"now\",\"but\",\"short\",\"black\",\"woman\",\"go\",\"deaf\",\"tell\",\"cheat\",\"letter\",\"mother\",\"fine\",\"fine\",\"hat\",\"city\",\"brown\",\"cheat\",\"mother\",\"school\",\"family\",\"time\",\"help\",\"short\",\"walk\",\"help\",\"thursday\",\"tall\",\"secretary\",\"cheat\",\"before\",\"chair\",\"wrong\",\"bed\",\"before\",\"paint\",\"wrong\",\"son\",\"want\",\"cousin\",\"man\",\"shirt\",\"hearing\",\"pink\",\"forget\",\"family\",\"cow\",\"wrong\",\"candy\",\"meet\",\"cook\",\"no\",\"son\",\"bowling\",\"change\",\"cook\",\"computer\",\"tall\",\"all\",\"accident\",\"go\",\"doctor\",\"kiss\",\"go\",\"son\",\"orange\",\"cool\",\"orange\",\"school\",\"family\",\"family\",\"dog\",\"accident\",\"letter\",\"fine\",\"need\",\"deaf\",\"graduate\",\"full\",\"later\",\"all\",\"hearing\",\"tell\",\"before\",\"thanksgiving\",\"short\",\"white\",\"same\",\"deaf\",\"finish\",\"graduate\",\"paint\",\"before\",\"walk\",\"hot\",\"finish\",\"brown\",\"bed\",\"apple\",\"paper\",\"man\",\"tall\",\"thanksgiving\",\"wrong\",\"fish\",\"school\",\"now\",\"before\",\"pink\",\"yes\",\"like\",\"accident\",\"bed\",\"man\",\"meet\",\"eat\",\"tell\",\"yes\",\"white\",\"play\",\"pizza\",\"language\",\"later\",\"go\",\"city\",\"cool\",\"decide\",\"candy\",\"orange\",\"but\",\"shirt\",\"hot\",\"school\",\"later\",\"blue\",\"wrong\",\"basketball\",\"woman\",\"bed\",\"bowling\",\"give\",\"graduate\",\"chair\",\"color\",\"year\",\"dog\",\"africa\",\"dark\",\"man\",\"mother\",\"school\",\"right\",\"finish\",\"meet\",\"africa\",\"drink\",\"can\",\"cousin\",\"full\",\"cow\",\"thursday\",\"play\",\"wrong\",\"go\",\"need\",\"need\",\"cool\",\"birthday\",\"same\",\"help\",\"language\",\"school\",\"chair\",\"birthday\",\"pull\",\"can\",\"yes\",\"can\",\"color\",\"apple\",\"thursday\",\"like\",\"before\",\"deaf\",\"dog\",\"work\",\"apple\",\"paint\",\"africa\",\"can\",\"how\",\"want\",\"birthday\",\"family\",\"go\",\"table\",\"yes\",\"play\",\"deaf\",\"computer\",\"study\",\"right\",\"basketball\",\"school\",\"dog\",\"but\",\"pull\",\"language\",\"paint\",\"thin\",\"hot\",\"no\",\"need\",\"secretary\",\"book\",\"orange\",\"color\",\"last\",\"change\",\"hat\",\"cousin\",\"now\",\"book\",\"basketball\",\"work\",\"like\",\"what\",\"fine\",\"hot\",\"color\",\"no\",\"blue\",\"dark\",\"no\",\"corn\",\"meet\",\"thin\",\"pizza\",\"year\",\"cousin\",\"birthday\",\"work\",\"eat\",\"full\",\"city\",\"apple\",\"meet\",\"help\",\"accident\",\"fish\",\"walk\",\"accident\",\"candy\",\"book\",\"hot\",\"cook\",\"cow\",\"hearing\",\"tall\",\"thursday\",\"table\",\"fine\",\"paint\",\"color\",\"help\",\"walk\",\"decide\",\"cousin\",\"hot\",\"many\",\"hearing\",\"finish\",\"time\",\"give\",\"orange\",\"letter\",\"drink\",\"doctor\",\"hot\",\"give\",\"apple\",\"pink\",\"blue\",\"can\",\"paper\",\"yes\",\"forget\",\"chair\",\"computer\",\"city\",\"last\",\"corn\",\"yes\",\"hat\",\"apple\",\"pink\",\"hat\",\"basketball\",\"change\",\"letter\",\"thanksgiving\",\"corn\",\"mother\",\"kiss\",\"like\",\"fish\",\"who\",\"city\",\"give\",\"later\",\"kiss\",\"all\",\"cool\",\"black\",\"thin\",\"thanksgiving\",\"secretary\",\"blue\",\"play\",\"shirt\",\"hearing\",\"dark\",\"purple\",\"blue\",\"white\",\"africa\",\"fine\",\"kiss\",\"give\",\"same\",\"meet\",\"bed\",\"drink\",\"help\",\"brown\",\"work\",\"walk\",\"clothes\",\"can\",\"computer\",\"chair\",\"many\",\"orange\",\"later\",\"pull\",\"like\",\"hot\",\"enjoy\",\"wrong\",\"short\",\"man\",\"birthday\",\"color\",\"jacket\",\"dance\",\"decide\",\"forget\",\"clothes\",\"same\",\"drink\",\"cousin\",\"hearing\",\"bed\",\"last\",\"many\",\"what\",\"time\",\"secretary\",\"last\",\"jacket\",\"what\",\"birthday\",\"candy\",\"change\",\"cook\",\"basketball\",\"cool\",\"thanksgiving\",\"pizza\",\"bed\",\"secretary\",\"orange\",\"tell\",\"book\",\"but\",\"many\",\"like\",\"bowling\",\"who\",\"deaf\",\"cousin\",\"clothes\",\"dance\",\"thin\",\"who\",\"change\",\"candy\",\"short\",\"yes\",\"book\",\"play\",\"change\",\"full\",\"work\",\"clothes\",\"full\",\"study\",\"jacket\",\"short\",\"secretary\",\"study\",\"pizza\",\"later\",\"clothes\",\"corn\",\"year\",\"woman\",\"blue\",\"same\",\"basketball\",\"how\",\"language\",\"help\",\"white\",\"graduate\",\"paper\",\"now\",\"many\",\"finish\",\"go\",\"dark\",\"dance\",\"chair\",\"pull\",\"need\",\"how\",\"what\",\"candy\",\"hat\",\"dark\",\"color\",\"brown\",\"all\",\"finish\",\"school\",\"walk\",\"doctor\",\"how\",\"forget\",\"man\",\"thanksgiving\",\"language\",\"decide\",\"medicine\",\"give\",\"walk\",\"man\",\"color\",\"computer\",\"study\",\"hearing\",\"like\",\"tall\",\"bird\",\"meet\",\"chair\",\"full\",\"last\",\"walk\",\"graduate\",\"last\",\"decide\",\"help\",\"help\",\"kiss\",\"meet\",\"black\",\"hat\",\"what\",\"can\",\"bird\",\"last\",\"like\",\"drink\",\"tall\",\"how\",\"need\",\"what\",\"cow\",\"language\",\"meet\",\"pizza\",\"go\",\"pizza\",\"cousin\",\"go\",\"fine\",\"finish\",\"paper\",\"right\",\"woman\",\"right\",\"hearing\",\"need\",\"brown\",\"bowling\",\"tell\",\"language\",\"shirt\",\"dog\",\"go\",\"apple\",\"computer\",\"need\",\"want\",\"dog\",\"thanksgiving\",\"yes\",\"forget\",\"thursday\",\"dance\",\"hot\",\"corn\",\"cow\",\"cool\",\"like\",\"year\",\"cheat\",\"family\",\"drink\",\"letter\",\"purple\",\"full\",\"secretary\",\"thin\",\"doctor\",\"letter\",\"all\",\"fish\",\"thin\",\"finish\",\"birthday\",\"yes\",\"purple\",\"shirt\",\"bed\",\"play\",\"africa\",\"secretary\",\"study\",\"dog\",\"need\",\"family\",\"mother\",\"change\",\"tell\",\"can\",\"want\",\"yes\",\"orange\",\"drink\",\"all\",\"candy\",\"want\",\"before\",\"kiss\",\"basketball\",\"like\",\"same\",\"study\",\"dance\",\"cool\",\"want\",\"go\",\"later\",\"change\",\"bowling\",\"city\",\"change\",\"woman\",\"blue\",\"language\",\"computer\",\"forget\",\"pizza\",\"cook\",\"eat\",\"cousin\",\"book\",\"cow\",\"blue\",\"change\",\"thin\",\"orange\",\"brown\",\"corn\",\"now\",\"man\",\"table\",\"cheat\",\"color\",\"pizza\",\"work\",\"full\",\"orange\",\"clothes\",\"help\",\"mother\",\"deaf\",\"letter\",\"cool\",\"hot\",\"enjoy\",\"secretary\",\"africa\",\"same\",\"basketball\",\"shirt\",\"letter\",\"cow\",\"candy\",\"orange\",\"need\",\"yes\",\"like\",\"brown\",\"candy\",\"tall\",\"meet\",\"pull\",\"many\",\"computer\",\"dog\",\"basketball\",\"tall\",\"no\",\"work\",\"need\",\"mother\",\"computer\",\"cousin\",\"decide\",\"woman\",\"cool\",\"language\",\"computer\",\"dog\",\"cook\",\"help\",\"mother\",\"doctor\",\"same\",\"thin\",\"apple\",\"language\",\"accident\",\"many\",\"white\",\"cool\",\"white\",\"orange\",\"africa\",\"shirt\",\"mother\",\"clothes\",\"work\",\"cousin\",\"last\",\"paint\",\"help\",\"medicine\",\"same\",\"deaf\",\"how\",\"secretary\",\"corn\",\"blue\",\"graduate\",\"want\",\"shirt\",\"decide\",\"want\",\"drink\",\"who\",\"hearing\",\"corn\",\"pull\",\"right\",\"before\",\"give\",\"hearing\",\"give\",\"want\",\"cool\",\"hat\",\"enjoy\",\"brown\",\"year\",\"pizza\",\"how\",\"candy\",\"same\",\"table\",\"paint\",\"all\",\"want\",\"many\",\"shirt\",\"short\",\"bird\",\"study\",\"shirt\",\"give\",\"man\",\"purple\",\"fine\",\"yes\",\"school\",\"chair\",\"thursday\",\"now\",\"like\",\"many\",\"bowling\",\"thursday\",\"right\",\"thanksgiving\",\"son\",\"full\",\"son\",\"paper\",\"jacket\",\"thursday\",\"city\",\"same\",\"woman\",\"study\",\"but\",\"cow\",\"bowling\",\"hot\",\"deaf\",\"mother\",\"thanksgiving\",\"book\",\"shirt\",\"what\",\"now\",\"paint\",\"birthday\",\"cool\",\"thanksgiving\",\"finish\",\"cow\",\"enjoy\",\"black\",\"many\",\"fish\",\"tell\",\"computer\",\"pink\",\"forget\",\"black\",\"bed\",\"later\",\"doctor\",\"many\",\"work\",\"graduate\",\"drink\",\"dark\",\"basketball\",\"cook\",\"graduate\",\"computer\",\"table\",\"cook\",\"no\",\"decide\",\"brown\",\"pink\",\"son\",\"thin\",\"wrong\",\"pull\",\"white\",\"woman\",\"decide\",\"brown\",\"thanksgiving\",\"decide\",\"tall\",\"dark\",\"pizza\",\"corn\",\"want\",\"deaf\",\"wrong\",\"mother\",\"what\",\"black\",\"cook\",\"woman\",\"dance\",\"medicine\",\"give\",\"before\",\"graduate\",\"family\",\"dance\",\"cook\",\"thanksgiving\",\"right\",\"bird\",\"now\",\"year\",\"drink\",\"mother\",\"son\",\"cheat\",\"pizza\",\"paper\",\"finish\",\"kiss\",\"short\",\"who\",\"no\",\"candy\",\"cousin\",\"paint\",\"drink\",\"computer\",\"brown\",\"chair\",\"medicine\",\"like\",\"dance\",\"go\",\"fine\",\"walk\",\"fish\",\"table\",\"cheat\",\"enjoy\",\"go\",\"bed\",\"forget\",\"bird\",\"wrong\",\"before\",\"who\",\"eat\",\"pink\",\"thin\",\"tell\",\"change\",\"drink\",\"drink\",\"thin\",\"all\",\"full\",\"thursday\",\"play\",\"shirt\",\"language\",\"cool\",\"forget\",\"son\",\"family\",\"full\",\"dark\",\"dance\",\"purple\",\"study\",\"hot\",\"son\",\"like\",\"cousin\",\"go\",\"no\",\"jacket\",\"short\",\"fine\",\"hot\",\"apple\",\"book\",\"no\",\"fine\",\"hot\",\"book\",\"school\",\"drink\",\"graduate\",\"cousin\",\"fish\",\"thanksgiving\",\"same\",\"who\",\"thin\",\"accident\",\"secretary\",\"want\",\"hot\",\"yes\",\"tall\",\"hearing\",\"need\",\"eat\",\"son\",\"go\",\"dog\",\"kiss\",\"cook\",\"man\",\"paint\",\"bed\",\"pizza\",\"africa\",\"decide\",\"birthday\",\"meet\",\"white\",\"pink\",\"forget\",\"thanksgiving\",\"go\",\"cousin\",\"black\",\"wrong\",\"deaf\",\"brown\",\"accident\",\"how\",\"but\",\"computer\",\"africa\",\"letter\",\"who\",\"medicine\",\"walk\",\"chair\",\"shirt\",\"study\",\"hearing\",\"cow\",\"graduate\",\"year\",\"cheat\",\"forget\",\"yes\",\"city\",\"secretary\",\"orange\",\"before\",\"white\",\"give\",\"wrong\",\"right\",\"eat\",\"jacket\",\"drink\",\"finish\",\"time\",\"want\",\"many\",\"tall\",\"mother\",\"later\",\"black\",\"candy\",\"cool\",\"pizza\",\"black\",\"clothes\",\"table\",\"dark\",\"tell\",\"shirt\",\"bed\",\"bird\",\"cousin\",\"what\",\"africa\",\"need\",\"work\",\"time\",\"brown\",\"basketball\",\"fish\",\"dog\",\"dance\",\"cook\",\"drink\",\"year\",\"shirt\",\"thin\",\"finish\",\"can\",\"deaf\",\"who\",\"same\",\"time\",\"all\",\"school\",\"kiss\",\"pull\",\"many\",\"eat\",\"short\",\"give\",\"no\",\"walk\",\"dance\",\"africa\",\"woman\",\"last\",\"dark\",\"city\",\"need\",\"fish\",\"bed\",\"kiss\",\"thursday\",\"cow\",\"blue\",\"family\",\"deaf\",\"play\",\"right\",\"deaf\",\"hearing\",\"now\",\"man\",\"blue\",\"but\",\"finish\",\"accident\",\"hearing\",\"pizza\",\"hot\",\"study\",\"now\",\"letter\",\"secretary\",\"yes\",\"many\",\"thursday\",\"want\",\"many\",\"thin\",\"paint\",\"study\",\"no\",\"apple\",\"dog\",\"bowling\",\"all\",\"dog\",\"doctor\",\"color\",\"orange\",\"tell\",\"help\",\"cow\",\"all\",\"last\",\"doctor\",\"give\",\"year\",\"brown\",\"thursday\",\"tell\",\"hat\",\"help\",\"later\",\"drink\",\"cheat\",\"pull\",\"tall\",\"basketball\",\"like\",\"enjoy\",\"short\",\"candy\",\"can\",\"change\",\"go\",\"purple\",\"dark\",\"work\",\"table\",\"pink\",\"what\",\"pink\",\"like\",\"son\",\"help\",\"walk\",\"paper\",\"decide\",\"yes\",\"before\",\"doctor\",\"how\",\"family\",\"orange\",\"computer\",\"white\",\"white\",\"computer\",\"help\",\"no\",\"cool\",\"corn\",\"want\",\"computer\",\"mother\",\"thin\",\"apple\",\"color\",\"decide\",\"work\",\"like\",\"paint\",\"right\",\"thanksgiving\",\"year\",\"blue\",\"later\",\"paper\",\"pull\",\"change\",\"thanksgiving\",\"many\",\"clothes\",\"chair\",\"before\",\"now\",\"man\",\"book\",\"language\",\"later\",\"hat\",\"city\",\"cousin\",\"cool\",\"finish\",\"fine\",\"play\",\"city\",\"go\",\"jacket\",\"how\",\"school\",\"chair\",\"corn\",\"bowling\",\"black\",\"paper\",\"tall\",\"bird\",\"corn\",\"bowling\",\"what\",\"cow\",\"work\",\"table\",\"basketball\",\"letter\",\"before\",\"color\",\"meet\",\"candy\",\"medicine\",\"bird\",\"who\",\"orange\",\"mother\",\"hat\",\"pull\",\"hat\",\"wrong\",\"pizza\",\"mother\",\"orange\",\"birthday\",\"can\",\"language\",\"pink\",\"meet\",\"computer\",\"computer\",\"change\",\"full\",\"secretary\",\"family\",\"kiss\",\"meet\",\"forget\",\"bed\",\"fish\",\"bowling\",\"last\",\"enjoy\",\"man\",\"now\",\"accident\",\"graduate\",\"like\",\"yes\",\"cheat\",\"short\",\"deaf\",\"cook\",\"enjoy\",\"woman\",\"bird\",\"full\",\"apple\",\"mother\",\"candy\",\"woman\",\"but\",\"drink\",\"need\",\"medicine\",\"same\",\"give\",\"fine\",\"go\",\"wrong\",\"woman\",\"change\",\"purple\",\"birthday\",\"color\",\"pizza\",\"bowling\",\"what\",\"before\",\"can\"],\"split\":[\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"train\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\",\"val\"],\"video_id\":[6343,639,13697,35460,6476,40115,631,31757,1999,1992,17023,18323,70247,8949,70355,65009,8435,17722,69359,64089,14888,22121,30843,19259,6478,68035,63209,5632,63792,51236,57278,63227,22114,14685,57943,14669,5740,45438,42960,42835,12332,62968,65761,70348,68018,56552,10904,8429,8936,51069,6455,13698,10894,13633,629,69433,68133,22115,24970,5729,31753,70345,35453,17712,41037,23771,6839,68127,6334,65241,31758,2999,633,1998,63203,64091,28123,35462,63231,34829,45432,58499,63673,18332,68054,33266,68089,58502,1386,8431,70266,57273,21887,38536,10902,55369,32257,58498,35517,13695,50042,3000,30835,70051,5724,6474,35457,10899,42959,7400,48107,12320,7394,45271,28125,66351,24946,65434,41026,28205,63668,65085,14885,31755,17095,38544,31749,17727,65294,33269,35467,64222,63225,13636,14671,38524,34737,42841,66591,62251,70107,38982,21886,65507,41034,22954,30849,56844,63202,64221,58508,69534,30831,45436,69439,66531,18325,32319,53268,11310,8437,17026,69411,14674,55370,6331,24948,1991,68033,38482,49595,6481,8945,11773,62987,10149,66637,63242,28204,26712,45443,27209,62164,51071,7075,32954,66798,19267,38530,13196,32263,6472,7395,63239,65341,65449,56846,42829,22124,6355,40837,26974,12335,63789,63208,68085,39000,32322,1988,45434,7402,39003,31750,30834,45265,26972,63232,1384,65408,17713,6477,42830,26719,64212,17016,14900,10158,23766,5638,25318,24643,36932,68046,57942,56564,25324,58370,32946,58497,6332,50036,57950,43180,66296,5730,58503,6341,13160,12316,34732,23774,70152,64210,65162,8955,20983,64209,19264,5731,9849,1385,62169,64281,56849,20987,58367,24651,68162,43173,48108,43170,24954,51225,66014,34826,69533,68099,18331,8927,22109,63214,6843,26721,6335,34839,41008,68050,26717,66575,64094,53279,7389,13699,38994,8424,51233,6484,63672,24955,14898,57282,65342,32949,36946,68048,21883,26715,10901,7961,10160,36930,49577,20982,69511,27216,51223,62163,27215,58368,56850,50040,10157,5744,65328,64085,5637,5728,40847,64086,53273,62257,13641,66099,51056,26983,70246,70376,20981,13681,70132,8926,66112,13154,38534,53269,7391,9953,13158,12336,56843,1987,625,24857,17014,31756,24973,53274,69422,13200,40117,69455,20986,20989,17097,632,32953,21869,37891,14883,25333,23782,32338,2000,26975,57284,5746,57641,51224,63212,49186,14896,21943,25323,40841,68007,62168,28110,21954,7963,65161,65084,41025,69395,56845,57632,64097,22130,49599,39006,5734,68132,69546,33279,635,5642,34734,35523,18333,57277,64295,63211,43166,42974,32163,32337,24941,10893,13198,70119,8916,40114,8433,51067,69368,49597,32320,6833,64088,5233,63679,5639,7401,24648,25326,9869,11772,66816,17085,1392,14676,34743,36934,49596,48120,70361,35520,65029,17734,8938,13640,23775,13710,58361,43174,64090,24960,37883,37886,13197,6363,49178,69364,32167,49602,9855,6360,45261,8952,64294,69257,11777,3002,58362,33274,5739,14895,17091,63806,3001,40835,1388,8950,28201,62249,6368,20978,24971,70323,64280,43169,14884,12329,55365,48126,69225,49598,65506,68016,45252,32158,40844,57919,28115,38532,37889,50048,7076,40126,11768,32260,9957,26688,13646,39001,70212,5234,63803,33273,62984,21872,28107,11752,38541,6822,14675,38527,13333,35515,57934,42966,64224,13632,6366,63793,69307,65792,10900,68003,35521,27213,634,22126,62152,618,8917,7099,28108,13167,13696,68070,56839,66639,56563,21885,40836,11767,27217,62160,15037,13635,28112,34825,26980,21942,68171,69343,40122,32959,68042,17020,28109,24638,3003,69430,69238,8942,41029,64293,22952,9851,12330,10888,32255,69282,64287,26726,3008,42840,26739,5241,9945,32945,57647,65409,69402,31765,33283,22120,63229,10892,24645,32325,31767,1995,13216,65200,57939,57638,50039,6842,43175,51064,26981,69290,69440,6834,63205,1387,21890,31759,24642,49184,35506,5641,17723,65891,7973,63799,62170,11316,8948,12328,9854,34830,70310,32333,45270,33285,28116,19265,64082,51227,66098,70359,11770,30832,14623,15035,22965,11309,49176,17721,13648,70016,5628,32246,34824,62975,58488,50037,32248,30842,62988,6365,8921,9968,13156,5227,69281,57631,42972,5630,50043,40123,57283,69241,8434,34823,33268,7397,63236,14855,13643,68024,14633,57940,68183,70379,8929,51231,64275,7074,43179,9967,23772,63790,11311,23776,55366,30840,51232,50050,55364,66297,32335,11305,13323,68190,63669,6835,49181,5231,28214,32154,27219,63210,25339,70211,39002,34822,21951,24965,65440,68032,9856,45262,37882,28202,62964,8920,26724,65439,11774,7967,65043,21949,70207,68177,17017,28203,22962,34742,57628,32164,15034,35458,24636,62172,34738,11780,68028,55361,26976,33271,56842,6337,35512,70230,23767,32249,62171,25329,32254,15039,27207,27214,31746,35514,6471,26713,69531,8951,65187,68086,33282,17710,56841,69370,37892,62970,13704,66008,35513,42962,24950,42969,13630,69345,21870,21944,41035,48106,63666,48124,26986,37888,7957,65242,57291,32156,51072,69298,24943,69213,12313,37881,62254,17083,57629,64284,22953,58359,14628,28122,13325,13707,65403,69389,64211,10161,20976,17729,32956,45435,23768,50052,66606,17015,70325,1986,22116,57936,21933,6367,64292,45440,51054,5634,43176,1382,50041,55375,17076,70237,20992,36929,9950,57288,8947,68178,64283,40119,17709,69206,8915,62253,5733,31754,65145,68093,49180,55368,14625,13214,62259,24952,32324,9966,7393,69269,9960,63665,6845,66007,12317,68053,42953,70030,18316,13634,68011,13706,6840,9955,57941,40124,65263,13334,38990,34746,56560,10146,11775,42967,63802,23777,40121,11315,65889,36939,14899,32951,13201,28121,19258,50044,1398,49179,5230,51066,32950,13701,8909,40118,37887,68192,70299,7962,65300,66592,35516,45267,34834,12319,17087,5232,56837,38540,63788,37884,36942,12315,13638,15031,63667,13209,68084,12318,17092,13165,27218,36944,70049,49173,57953,3005,32160,626,34831,63201,13202,63207,40116,1391,51063,36933,11314,63795,13639,32253,40843,27206,35455,49185,14886,28210,50046,13337,6841,70176,62241,51060,15043,62247,17730,63237,26985,13329,45268,48117,5732,24655,26973,24660,62245,13213,26714,19261,7971,64219,42958,28211,70326,49183,56567,40840,1912,69524,34828,51070,51220,6339,55373,51068,24639,34744,68137,21878,64296,68145,9850,58366,38991,33286,34837,7399,58363,48115,57642,70335,70029,53270,41032,30841,58365,10898,49182,63662,55372,8426,13705,7392,70270,14893,36938,57639,7069,51061,62966,69413,40816,6371,13199,57643,21952,69283,19269,6480,34833,22117,57276,12312,42831,22960,6486,5631,66015,17018,34836,63804,25330,17733,14683,5243,13161,25321,12311,56566,68029,38539,15033,69252,42836,53271,57935,64092,45264,70245,63664,65450,7968,57634,15040,56852,14672,42977,13328,62250,14887,64087,36940,62982,69236,13164,63677,14621,35463,24657,5727,25325,20980,14624,13162,57635,68142,6330,39004,66818,17728,36941,53277,10159,42963,41033,21950,31764,51221,63226,38533,8918,13637,40845,17711,12326,70242,68019,35454,33277,14627,24951,21891,62159,22125,56557,10166,19266,24962,65163,22963,69233,64095,5741,63240,18324,42833,57948,57287,9956,69302,17731,57949,1996,23778,66638,43167,51059,32155,70271,22964,53275,20988,23779,14673,14630,45433,55363,28120,53258,33278,65415,24940,38531,68079,51226,21884,28119,70309,7070,38529,70234,28074,68012,49600,65540,25322,13647,22113,57633,49188,63238,57937,623,66441,62248,28111,64297,56838,26971,37890,18335,66532,24947,17093,31762,13157,66097,40842,5636,69431,1393,15038,6369,35511,63191,42843,22961,57645,24961,13642,6482,64093,65445,7966,627,70295,8425,12338,1395,32947,63228,35452,62158,9848,51081,55362,65884,13708,65843,64213,10151,22955,64288,10896,50038,66246,5749,68182,24649,64084,48105,68044,30830,17720,21955,66644,62256,69396,56848,36936,32326,6483,65298,13203,42971,70244,11313,56556,14680,57285,51057,5629,6326,13631,62944,1383,37885,63791,70356,7960,5238,69325,17090,14622,13168,70173,64218,51058,57933,21941,8937,14894,63219,49174,58504,68001,49606,31763,45263,34832,18329,51235,24652,68110,62173,14631,1394,63675,66010,14682,10895,37879,65731,5644,31752,70026,13703,65216,69316,14903,43168,48109,14882,26984,38999,34733,6832,8421,21953,628,26982,42964,28118,55371,68114,32955,50045,64291,34827,66640,62246,70308,66607,40834,55356,38525,2997,17084,7383,1997,17086,17019,68027,68122,57286,65890,13702,2003,32250,17013,24641,64201,7969,58360,57289,26723,27208,32321,17724,10148,70378,56840,5229,33267,19255,51206,8925,8935,9970,24956,66355,14681,66804,56579,42827,62967,42832,33281,53276,27221,62175,41030,15041,64300,5747,17007,28212,20979,70249,12333,63204,63200,12327,27194,38538,13217,13326,62244,12314,36927,57947,3006,11778,15032,63769,33270,68125,48114,57630,70306,68010,32334,41028,45269,9963,57640,34835,11330,70263,5750,38995,34736,7068,32146,32323,26722,65362,70332,13208,21945,21874,43171,65363,24969,30833,28187,49603,9847,13309,7390,6473,41027,56835,6340,13327,7388,62965,13700,63801,56558,5239,32948,5743,11769,35509,8924,35456,6338,63230,40130,36931,26741,45273,68068,70296,42956,66147,40129,6359,8944,32157,42838,35518,12331,12306,9949,23773,50049,65677,31751,35519,22967,5633,22127,7396,32261,19260,34685,38997,624,25332,33280,64298,10147,66469,70015,13155,19257,68187,6333,23769,65086,36937,8919,63676,8432,17725,37894,35461,49175,24640,21871,65824,68189,66799,9954,45439,70357,69274,42961,7398,62979,5742,8946],\"x\":{\"__ndarray__\":\"TfY7QVYQC8FdZHpBebgoQaGjIsFc+ZDA3zPVQdhVgcGzihK/hLGwPj3knUGz7QPCe74Lwon6CUIErTfBODP5wLKrj8CHSAXBY+xdQYcGa8Fp+4ZAznt7P+atEUI9vqvA5/IawrUNdMEdPfTBNcyvwfgrFELFpy7B5PWrwTQT08FyURDAvpbnQEEbCz9GbaBAgeWuwLpoDcIXZB7B54PAwd5FicDJxvRBBC++wd6GosHbhQfCItfMQTsZ4sBdRofA+3ECQtW/G8D1MLzA9xYhQlwZW8DChYjBVsMMwcS07UEEqvZBYdq+wDMfmUGD0dNBAQSIwc9mCEJpcCRBXMdVwZSQoj/87FtBIHyiwagUHUBW9SNB+QeYP70HicFzr/bAlUBdwTlK8T8FP5PBfH5mwdWj3MFZdCBBK4TAwW3Q3UH7dsM/dT0HQiMS/8E6/wPCTL2ewYTnBsIwHwhCPsEHQtUmw8B7T2lBRtCdQcldrMED+sPBat2LQctaxcB6f71BxQCbwVNQCUJ0mGtB6zVxQSk6k0GzJvbAAiQQQt6tBMHWoIHB1OYVwpXtIEHVZ7vAZQyJwSrBFUDgt77B1OggwDNFrjxLGBNCX3HmwcW9FEKvK4pBKufXQdd4Vz9The5B98bHwVncG0KBAudApFSOwNHxe8EQvQQ/2/KMwbpHU8EXuQNCJXr/wSz0KEFnoCBCscnOwY8RjMECIgJBdAT0PUGLicHe5ejB2RU7QSorB0KcrClBMVXxQSVvxsH5Jw3BCJqWPyJ8xcGA5RhC5Y82QQjN98GhKSFCS+UKQiNb0sHvBBFChtIHwg50FEJdNgDB/vkBwsr9esC35gXBsOK8QUDXR0FGEJpBrS/av/ULvUAAxcpBtYcsQUAVlEEfQp1BouyAQM7Btb/2t8lAEhEZwjprB0LtZpzB/OL/Qe8T+0F4WOHBS+3MwYv+80FpBBrCXb4KwuVBakF7AKpBTwjsv1P+o0F1rAxC3tzKwYoCysCBWVW/egbwwHVehsF+GxjC0tQDQJRk1cEbrANCD1c3QZyRX8FyBvDBHEVEwLfbo8EgD01BP9dmwQrcjcCkThxCs/CcwRXOxEHyof9B5DzcQNr0nL9HwwnCuw9YP5KaCkIqSfrBEfUKQtX0EUKsMXjBIgfKwZ+GN8HEw1LBm5dPwSl7NcFx/erBxQ0BQWVdI0KwSZdBh0WQQDceAkKJE3hB8kTRwfEQB0FGBYTA4JjLwauws8C/ieS9F2zCQbEOTcE3yebBzAUJQqK2CEJGjTFBIJ1vQRuVQr7eXPxBuT/owfl/1MAqQwpCgNc7Qbn7xD+upV7AINqDwR8TZ8GS4M1BnSYhQg+v4sEuUP9Bwru+wE4MI0IQmbfALBeFwb3Jp0GmRtLAlGOuQdAOqcBHbSJA5G+UwPuE1sFkZXjANN3JQSTR+EFY2a/BjOjzQUU5lkHQRjDBNFzgQGXfzUFebfbBbfg0Qe+HBsIt3M/BjOfZPrFZ88GR6+rBTmMIwtzoK0HpvdNB//c/QGJcxD/AOM7Ao6TNQTZqTcEidRHBLC2PP2UFikFJhwZCef44QZv6FME+MxnCjnCEwQmzlUE6uXxAxUy8wTJi/0Fe9QtCpsjHwe7+0MHH+IlBYeYWwo0olMBrZ6jB+VYAQl10xcGiX7pAEYajwM2KCkIRbnVBGLQfwWnkr0F0vXpB/9uMwXeZIkCR76ZBJHWwQcFZ7cC256lBUVhYwbyD2sFhRW7Bwj9UQYOYgsG5LwzBwWgNQimzecED1YbBX/7wv4rFgMFi0/bBsVHEwYpvkcDy7oJB3EJdwYGVBsL8ai1BY0ymPbsonr96XArBpF0cQVz7EELPDbE/WchpwPPKZkDGQ1e/l3hFwTA8oUHkbJxBt8FjwfO6nkEPwvbA6xO0wJT/kMEnb6fAbJ7qQA7wicA4MLPA+QuBwZiWD8Hg9g5Ca0rMwWc74b4JGQq/YNlvQUTGfEE9ktRAkOjWv6AzgcERM6nBqHV3wWMGs8GVpijBSY1gwSggEELSTYRA21jUwXdpgUE230hBGoihwXfMqkFEftbBaTZ/wdxqq8FO3tzBSHsBwSfhfUCH8YBBdPlfQc83H8Hy5H7BS2wywKlU20BakgtCLrejwUsk4cEGpqRACOv8wQTNBsFYf9bB8niBwbhGKkFPLgLCiAaswcec9kA7A/zBdNjzQazvOMG29MFBeLffQEUImkEqkLvAbZmqwYjMSUED8A7COZPSwA+Lf8DXxGnB5SzfwcJgtkBnp+dAqa7owW1SesEb1MlAhAPGwYUr28E4hw1AD0mVQcPCaEFJ1aRBN4KOwZx1JkKRyWvBBeM6wdBTDEHDwI3BBFZNv1LLsUAoEkLAT+WLP6FsM0H4ip3BuJdEwTt7BELXg3XB8OtcwfqogUHRYeHBZ9HyQQ+QZ8GKppLB9u8lvgTfb79XfZLBa+sTwlv+uUDbOGVBhfTOQbcGo0DB+KVB2Da3wV7nEkJStwdCy/H9QAaABkLXo/XB8I6twNG4uMECtJ3BBFjCP2AGjUCZuy5Am4YbQvnzB8Gv1SrAQS+lwW38BUIIrO5ByoUFQqf7FMK7+5zABGmmQYelEcKQPtpAFnr8QcwXRkCttP+/YVXLQRtV4L90pdFAMHfHQCqHdsEKCWRB4HnxQSQu1kFvQFxB6IMkQovdecFcdqI/4B5RvpNznEHC4qJB+ohewBCX8cFA8IPBiigQQvsn0MGF4ozBAJcHQlRlFMJ8B8FAg+8VQoCcwUBeJf1BHDDuwesq4sHF8ZXBbwqqP/ZElcEefutAAVkGPxRWSME7eX5BsgSuPqyvF8FfaSRCZsmHwaOzvMFp9RdCytIAwiYnbUHf+o7ABfL7wLY+NUF243pBcSzZQSzWpMDF2qhBXSnRQRQ06MHqGZ1BLojdwadfQkCZLHdBViaCwTM4SEBr3ufBvofOQbHexcHMzhrAWSDywSgtQEElR6RBWQgrQWxBisEa9ePBgHXFQUWln8EIVkg/F0kHQjbZosAZ2J3AjMWLQfcJWsGvd6NB1KriweTBmcGBZgjB+9TpwSmn48FPlAhC/0cUQJjlx0BmQ77BcjCkQbpO7r82JXbADyaNwYPMb8FoqTzBzucbwoNs5sDRZvDBTacbwkaxw0BnwQVCgAUPQkfoRMHVPSbBmgW/wQWZg8ELnf/BIlqnwEA4vcEQ+6vAAU+fwNbH6UAJSIjBtZnpv/nipsGophnCpOcjQjBng8EavZtBhWfiwafq90G8ZGfBy/GCwZzIBEGBaArCEz3bwYwr/cFVmK7AKcfFwR+/j8EVQIjAtPqwQG1DhUFaftzBzRCHwclobkH1X6zBQqQZQp0drkHtrMRBgWcFQsvQHcCmlqlB4ZXDQf1cgMCWGOxAvPgWQrIMAMLL7nnBgfXhwNQHXcHkZBzBS5GFwYqgvsE2eOnB6PUTQqW900H7fDhBR6W2wT0mw0Exas5AoZgHwfdzhMG/7xLCnDPUwfbcfsFIftdBCkcJQo5gCkKbEqNBd1SAQWgNFkLJ98fBXxehwZmGxMFG6g5ChpJFQEVy5kD3bh5Cash+wV1KL8Ehc+TBPzmYQTJAg8AQeZ7BXR2iQW2wSkFzZNdBUPcAwq/V4T+M0cTBwBuwwbrnhsHpYcFB9AzSQfc3mb9P38/BoQkOQpnuCcIJQ5hBjvJSwKCC00E9bfxBMlQSQmXcasHX3xpCzxS8QTbpmMHXVs1BxWgRQql1ocBVyadBXLHIQbBNHsHbTNdAXejGQQdtF8J6diBCqZXLwTxk8MFRT71A74C9QC269EGiyMNBojZaQUnr+MFgXWRBAYoIP9NhCEI2IMFB00+JwR+ioEFQPQVBro7ZQUs/qUFNCxNCf20lvp6r80FOt/5BiJDdwbbhGMI2Nv1AARwHwrCTtsHC8OY9G4GGwcRQsUB8u6JB/hWnQbZe8UGUgMbB8xpvwQIzh8FfQ8FBaeo5QTzmN0HFfZXASlGwQZrDh8FSiPPB8OtvwNwmwkFVKXfBTcaYwQ7oYUHwvSNBzJQ5Qbd6oEEdwnVB1DOFwZzerEFCFnZBBjZ4webAQ0FiYWhBnE5kQYXChMGG3IlBr4sUwrK1FcKEPgBCD5MEQmCQP0EEHYHBCG8Cwsm6Z8FcbVbBkL/wQQN4SD9Ad/tBCal4QbPZx0GEWitBWxImwTe3eMEmA3bBu72NwXTQlEEP883BOKZGwHc9BUCG4Z/BKXbLwcQAp8FW9nHBi3Kev/nIRr8a759BHKnQwZMTwUFMau+/jsl/wQirZ0Fg2/DAZUEEwIDRPL9e8QlCw9lwwUeWN8HNqPpAbCa2wVU+oMF3T9RBNNPmwS6zKMHiKJFBhUO9wQFi/cGFGCNC3o8BQvslgsDSL9fBe5UPQrrlDcIU94dBA+6fQeG/+L56rp5BjL0QQnnvFb+slvG/IfIPwbL+nT6FHLXB8c7IQB5TC8KR6FrB+zLbwarV90GEdDBAR2ShQUAqxkFgqG7BP2CYv8CZ08B8+D6/wNkPQoAPtMEMyApCRg8OQi7RFEGTXYfAcHZOwfEOcL8N1QvCfW0KQoZpD8EB2YPBF2DbQOLjA8L3qfBAZxy7QYUV0UFA9rjB/i8PQmgZicHdEexAWtcJQtffrD/sdrnAEyoNQt8uy8EHhOrBeynKQTSRh8C+6cDBL/N1wA+0SUCya27BmPKMwVUgmsBvvYJBpeemwZpXEELQTue/CSxwwNbetsE2SGnB1pkIQlv4jMFW+M1Bn+P/QTn8BsLv2ra/ZJ0YQpxXYUEM7rjAkxfGQUK4aUEvq8fBcLNXQGcoDUIRQIPBpM7RwWHgu8DLE5NBcog1wSwRhkD3ZbZA2YZJwRIjFkKrAGFBz0egwQgvlMD/UIW/X2HpQOf/BcJHjrHBp83ZwQeOK0GzVopBKD0QQlvi1EGS/LPAr36GwQWjskBWOkNB0aWoP9LgGEIa88a/lunJwW+uGMBCOIrBKhwqQbLVpUBwyqXBdo3fQbFXYsCBN2HB5PiAP59YWkGFKsTBTo/aQFUDwkBBvAc/CsIOwTiDwUHhatpBdE5Pwaga9cFLBZPBZ5t7wYmCf8BrYjrBxaZoQVuUxMEmM7hBdeMVQqqOjMG2GYbBXVBMQVU7c0HXWSdBE7HCQJ5jhEATqYJB4AzRQGjCF8L3GebBN2thQYkXCEIOPmdBdFg3QTnuDkIEGUzBIQrFwfyveMGSWFzBRAEWQh0NpcHT+6DBWqWHwDZcX8Etu3rAhj4IQgMbb8EEvMnBMP2zwEUQwcE8HB5CLowcwSRy70Gt2wrC7K36QNTGx0EXuU9BgUW9v9yWCkKf3t1BZYz0v8ZrMMF7JThBWnnIQZpmcsGAeda+IcWLwWATCMIAO4DBFB3wQPyrvkDtUqxBQuqxwXtdBkITjAbC+125QU037j9D0uDB6SUjQlREccGwaADB/GpZQdk/CcG/1vE/LrAWQs9nZ8H5f17AUDPtQP6mbMGyLsZBezo+QS/CfUHZBBhBVA7XwfbV579pj8fBGtN3wc2lzb5wFlnAo+39QApaCELy12BBJMC4wQqsp8GhDEXBDjmzP39ghEEn+s7AcA4awsgNz0HKFdG/l2OlwXVqP8CXee7B/H2pwb8XGsJodNXBpFeJv8oqmUHhgNRBVgIaQoSyf0ErTkTBk/4PQeon10DuJug/YppmQc4WDMBp6c5BE38pQIv8uz5VjzpBDi65wbpS18EVHgXBp5NYP/QwXMH1O9hBP5ASwjmawsH+rkNB9mzDwZrhlsGNI0ZB5WM1Qff2qkBOdojBhCcewelLCkLKVx5AhpZpwTJUxcFWFQBC+y8Wwngj/r3yucjBgtvSQQySHUHuyLnAyW8HwbphiEFPPcHA6LjYQeRAOz9uw4/BZ2ylwYPnOkHLPgtCglcbQv86qj9F7MLBsa4BwULY+kEgJ6bBo5aSPwUNgL55zYXBp3IiwUi7xMFffjXB+HzgwS7bj8HUZF5BHWR2wXIfPMB9CLbBsemlQSL7JEFK8fnBMyPYQfrufkGXANfBecmmQcWFj8BMyMpBJXMAQiKCs8Cb2Z/B5FLkwVcCvcGsBjJB2NxcwV0ajMFeHtPBmNYAwn7MxcH4yAvBbYa8wdh4F0Kwjg7BxxVNwRy/C8F5G4K/jRt9QXyR4MHi4gBCjnEbQqg170GOXAZCIIS5wTB1/MARg7DAwD2GQRndGkHM4NNBIFaFwbXRPkGosQnC6WCZv+CjAcIKmIbBCbKlQTyLQ79HwxtCSae4QNxpy8FGAArC/AzgwYuIZ0Ep4ai7ZOO8wSiTfsE+Gj+/CiuWQEJKd0Htl6VBK02IwfDy20FGxonBQDeoQRNg0sHDOEK/hqEiwa3Tr0HMZwVCayc1wZD/AEFmPSRBISyEwbwbLcB2D+PBnMwUwc7aWUFe4obBgdyIwXapVL+luonBDxVZQeSKl8AnB6XBXG28wZ1dEMF0HQnC8S5HQTs8dcGvSfDBTOTBwS+hi8HlzJHBSC1swVWJy8GkhYbBoH6jQNqAt8FRdVHByldRwfRTWUFLKUPAbi64wWKdY0HXTMTBVqA3QbgD0EHxraVBCn4YwH8IxEG1l4nBBi1kQaH7ckHFAx9Cn9vhQVSLtMHrJqhA4LXnwHGbDUKT253A4FSfwa0nQsEcsnTAJ6BywZc8+b+buJjBki0NQsxUBMFCQ4DBVN8IQscoBkKjmAdCUFNfQbHGxcETbM9AyMnLwVne28EV/tDBMuHCwOTwUcGkSb5B4RvRQZKtAcB6Jl3BIFzuv9R00sGqKUVBdfJjwRDK4UH3c6fBFhkuwHg0GEI/GhHC8WLCwb9j4EFEqL1A6kKlvg6l20GTycI/pGDFQdeeG0KcljLAU67cP75Sgz+gdgFCirfEv5GuZMET5n1B5QILQk1JC0DnYLpA5k2OwWJuzD+EsutBRbibwaNxi0EFSbHA53jOvyqGrEGoR9RBvTe5wbJqz8GBdIfB8pHwQFoJG8IW+prAglZZvzb14MEk2IjB48z7weDLhEEX9ufBGxa8wHL25kAc3ptBnRS0wfORn0C+93TBuDUxQbNpjMFUSufBH1dtQd/fij8w2UfB2LNvwWsfPMEd6OzB6czCQZ+SBEKnAQxCnErjQWzk50B1hclBHFGFwTxGisHIDRLCKlNMPgkvjsDlRb5Bo4pCvr4SF8FKR2/BMYwVwHpSEEBbA7rBQvGhQafAkcE4uKHAtlhewbRuXUHw/mlBg2rPvpmLBkLa8aZBWcKqwPtMHkIc37LBhpbowWfp1MDhkhjCLwRhQeHNRUB7QS1BcrTXQV3++8GwkeLAFJgAQdtrAsLmlytASl6DQBIIs8HYPQRCr/ARQpbcg0GjWA3C4fkSQZvLFkKUlMxBtayYwZLQFMLpcOzBWiACwqEc8MBGYXBBDmKoQePTNUBs+BvCNuHzQE4HYkG3upRB3QwSQiDjvMB41fLBuzP5QXtv7cEWzvfBdvZvwNnZVUHpRCVBcxzGwX+5FcGLqfpBodcxwOOxkr9SPhxCIQkBwQmJ7cFNw4VBqHcXQheyokCCClxB1SZLwOnlF8E8hJxBOzxxweisyEDcw2rA/MgUQr4G+0HPihDBAWLUQSLhukF42zvA9i1yQdXpBkLJoLS/3YNTv54LbUEax7xAMIpIQbkHtMABUphBeEuRwcN/Qj/YFMjBHWvvQQKg08Cpg5pB4DMQQg2pgL6nb+NAVGM2QeAXdcF6MS1B8Y4awsim2r/OC2TBCx4lQbpjncAijxlAvk8DQtLQiEGXihpCqGPKQXuduEHpG5FBhrSfwa6k6cHCQDNB0VWhwdpydEAoaSlBil7PwWbr2cBZgMrB34YXwmTeFEJDOovBchrKwapDtj7zI7zBmxjXwIPRrMFitARCirfAQZU328E5lolBS94kwDVGRcDl2wdCASyDQV2ps0GCnODA0QmDwQ6lIkE0zLvBx3DfwWuej0EY+ZhAFb2pwRRSqsCxVGnBpOAEQqAk9sAmD2xBY1T/wVtcpkHBWodBZ3Y2wZ9kzEEEEQhAPSHQwO1EhsFPjjdB32RwQTKTHkL4cLzBfwAJwmmt98Hx2X/AqXKJwb0rD8A7hh1BWJXCQBwrzUFAP6jBJIycQQwOa8H8QMTBFIIfQmlnDsKQzeJAxFYFwskOQ0Aeo40/0U65QYhri8FqHAhC\",\"dtype\":\"float32\",\"order\":\"little\",\"shape\":[1533]},\"y\":{\"__ndarray__\":\"bt7Rwe+lR8F82rTBB0SFQSoWBcK6menBWoiLQZBW8EAQnpJAZqyLQKWxQUEEJsRAZBc6wZCKw0Gcc9ZB1JhFwVhRYEHP8cLBdhoUwCn0hMFhAEhBCO8awfUlWkEB4eLAfezAwbNQEcDmaR3BKcpYQecsrUFLkthBMSnswaDM/cGdcirBEOurvy+q8MHnWVW+VXwQQA1aVcH354fBQl7ZwXVy2UHdswjBFdeMwaLWUcHs3sHBSYeDwCRuxr5dtlRBb7W4QbePocHX6AHCeJSkQWRJvz9DvkBBPdNKwfLMgkHxg35BugkhwcRv4EFw1pFBiua/QL1wjMHRqY5BnZ5sQfn7n0ERH0/B6NoOQHC8qEHAusfBxLI2wdCu+UDxeQnC8J6PwW5mE0EEbqvBGyqKwRumMMFO/YVBOVHlwTAFesGOWP3AY1zVQcnqocCce8FAltqRwT1vuMDLc5ZBq0fOQYrAz8HY5OtBNKXKvyS/78HbhOo/nYMFQglplL5+XzNB7naQwWGt1UGKGc1BMd29we6ypkGUNQvCU91ZQfw59MBEq5fB9aSzwWN7gUHbjH6/oUp6wSebZcHE+7VBnXDRQW+OXcEj+8E/hP0fweNJtT8H+/FBV329Pm0prEF22UFBzpgwwLRt8j4vsCBBfY0QP0cNMMDy1gxC4Df0QGjFeEG65rZBtb60wM4Mi0F4zDxBv5HxwQbEREF74CDASQIMQssCr0EA18zBBoTbQb/AE8HMSNvBMDx8wYDu+T+UUflBYHmlQf/BjMGJe1BBsIHeQYf4PsEe3jpBHZTdQZ1W98GiAVRBB5hBwYZICz8rDapBtFnKQOyTMkGjlqFBjUyoQL1++EEMezlBMvMMQsMkYr+n3zRBVrPQwf/K7EF6GlpB/t1bQZwwBkLZ+ZpBZFrHwXwptkEEwrvBIfQMwTmNtz/hP0FA6/32wVrCSEHAXpzBrktAwStOUkGVBlLB5ZCawZw6AsCG44pBgyOhwOMC7sCb/ARCaHkQwm4TN8F0EMDBrDBNwd0p9MFHemQ/oMqtQPs8ikBts8zBs9AfwaDtxcHKub3AdQkGwpCp1kFu+6BByMnGwZBLr0Fx/3zB12DdQbRQjkDzxj/BFIZbwfMYhcG37eZA2hFhQc89wj9MsgzCqH/uwRb9CsENdRPC3sJmQU0tCcI37MXB+dSXwXdPMUFPHkBBQehfQeAzyT9x/kvB4HZMQfzenEGBaUjB/4AZQT2h5cCtZPXB0rNfwGBZx8H4AiZAOSGKQQss20Hvyc3BImnWQU268MGHcYRBeEnRwVSkIcEUZ9BBIVjXwc/qAkHAlcxBN+GvQdoY5cHDACC/FD8uQR85dEG9CL9BYzSTQJlAOkGPKfHAdiOdwbxF9D9FR8rB3Y1cwauC1MGhc9hBokieQNDtnkB20m3BXOMsQcIgiUH4FLZB8R1+Qbbn5UGWZttBIpPUQYU3b8H2sR3BaGXVQZhrez8FGNPBDcpCwQVCK8HqZPRA995bQNyQ1cHlUG7BVg6mQYxaFsDUM8nBrDIPQT39i8EG6Z9BGMBbwRPnsMFmiXnBetz4Qde0y0EifLzBLITVwQb75UG2KU5B/yrdwV1Slj9PnYlBs8oHQRMazj9fI4xBawCiwbXYh74mNYlBo2mMP52TLUFg4qFBVbSYQDbH1kGnI15BUrfOQfTmUcFby1tBYuT7vzRW2EE1s8hB9pc4wUmKLcFtNA9A5cZ8wa1kgEFxGJnBYEHQwFs9lMH7MatB228awZ9NeUDX3rFBxd2VwRUnEMI8S8rBugCBweinjkA3iLLBKz55wVoDxcHXDM5BJinsQBwxB0Ky56RBIdBAQeJp+UBuz/dAAD7fQcyE1kF4qKNA/66YwTL83EFrMzxBTbrDQBK15kE1xp1BJYPrwci29cE3K/rB1ZyWQVrsnkAFmqpArfbIvxMsRcGv6pRBzTY1P1BPc0H29k9Bxh6PQTptTcFfDuJB7JTcQBG/DsLy4d3BvLyTwf7CzsGVRdNB3i69wd/kz0E1mFtBHKE8wdsKn0G2pbXA/L9WwdgNWsHciBfBu+26we4YakENK2ZBYC8Jwlo/n0GWSYnAWiDgQaLw/MGTsIfBkvApwdbLn0GEkoLBFf9kwSbpy8FJUqbBNkTIwFZiT8HO/3tBe8qrQbh6yEHdl8lAHhDxwSOZscGfKxbBLD6DQRrJgsHAaLhBP/fgQfgO3kGf6hi+fgKZP7zGj0AJcrrB0j7rwXDEXUEH9d3BKZwawVVfnEGyldxBE0bgQJAcjcFQHxZAw568wOeuaUFtIFTBlMUzwZJDn0GPyvA//smqwc+FLUGnIiDAd1gSwQ1I+b7Vda1B2OC/QZQnoUFUpA1CDERAwAlWy0H+T45Bc2VoQeNWw0GYj4FAW7PfwW52vsG0Uuw/DA6IQZpekMFEWInBb892QWmBb0GnUPTBmfuqwHGjJsHtN1lBt3GhQVBLnUFvuNY/PKi1wWTrej/TJL9BLnKvwUK1tkFGba7Be1AGwpT6tUE9C9XBcRd7QM93WkFb7G3BPkCqQWy/DcJ5sSU/zsuVQfKYwkG6R0BBGEdBwXwz/sCpL4xAFpvfQfaWC8GErbfBJPFsQarHVUH+K9NBVfE2QQkuEUJjgfY/pjSfQfV+9L/WIO5BwR0uQb2RlkGJ8dLAHdZWQVLtr8EcLAtCxqdvQc5YvkH+NfK/Ntrxwd5VqsHn7jrBMiXmQCH8mcE4y1pBJ+2HwZJAGME/q+w/H2qnQST7O8FkgxXBOn7OwBORJcF48L7BsbgMQp51FUDyzP6/ebkNQiO6B8Jhuc5BbOv0wRgtjMHgADtBa65UQSsxq8Gi96hBc8zJQJgrPsGLYUW+HQQMwuaBxkHV/F1B6ASKQV0EHcEuh0HB9JiNQTJavsGMJee/u+ccwZDQy0D9abjBjIwTwlFx10G4m0NAoz2awL95tkGHSPI/TKCrwdu4WUG5vkrBQg1hQFDrOEGdZwvB6P+Cwcs35MEHNwrAMNfXQfNKgcF2RfDB7iX1QQ1NWEFlBDlBQ4MuwQjOBcBiVwvCQsjIwQj2BEG5pcNB/2ykQTWNpMEFwH3BmRKNP26y2kHxBVG9CLadwdWPAMJSM/vA6MCdwS3jC8K5C8jBetWjwZqZOkDHQSpBo6yMQc2W/MF71Zc/zUUWQcXB70A3zdbA+P4bwUU08sFTlKM7EUeLwY5Y2EE0ig1B8giBQIt5JEBJhLrBxchHQa55sMGq8L5BGqn+QM8peUEtHtHBcgcTwpHbeL9fljbBXksKQSuAHMHvGddB2h+5QW303kA1glLBEIstwYf9z0FfM29B5qvlwaYKSEHxcHlBylegQbxBacEwO8lAoru+QSRczkGBMSdAGMWDwX0m68EIBN1BUI/IPrX458DVMNjBvtX5wFnAiMFG28ZB4ImsQR1aoMH8wqzBzMZWQV0h3r4aPZNAIkKSwUkOsUBFjjDBlznCwYeAPUEBsRPBCQFKQd7nM8F6Q3DBfxZuwW+fy0EcarxBXhJpwapfWEFggcFB2ejCweD028E+9A9BVw7OQCfMF0Cu4/FAUPOrwTG+isF7dXBBuF+pQUqo8MEer+XBdh7tvwXS9kEwC4rB2wq0wAZ4VsEQpf7BV0HXwVpTTUFETL1AgNgGPqL088EYePrBLn3ZQNfLusERq0PBeabxQSFWTkFqg4lBvGzVQMUP+MEGMalBg9mmQGaasEG7zBVBcdhkQe440EFOvcdBkgZNQWu6gcHN7dtBYbPSQPx9zsHr3yVBAN6xwK+LBUDVVDzBuqIbQBGjN0HJQKxBhZJWQfxbMsFmUqRB+xWbQYsHiMFwR4XBxjSnvrXW6UFlsKK/nK4cP0p0jD+A2Ek+LRNmQZdtS0HqoBbBOx3FwaQHocFw+7G/dxenP+zvdUEtFUtAt2M+PmbvlEHd6EjB2KYyQXN+PEGKZ4rBRPioQZLcs8HvprBBxW99QCOSe0H3/IfBqqRKwSfIqUGzAbDBNzXRQc3WL0FyKhHC5HbTwSuT20HxnszBeMLLQQZVzj/IAUfBY4M6wSTYSME7tIlBF54wwTRgoECDv19BrtJfQUu/okAoT8hBBJC1wfQBosHyFQnBCLOzQXek0MGeBTHB6xPLwEeKZEEzB4pA1cdYQSZibkHBsBDBi0G8wQJfr0E7CsZBE7GEwbfKXL9/IvLBgkVOQRBs20FJ5QRA7sZPQZuSoUHAiaZBIVaJwHCPuUHfWBDCWSNlQZDbokEhEFjBuCvgwSItpUEC/I3BYvwAwNuj9kHBUg7CHujNQcB/fEEByifB0m48wAUABMIuqrTBI9aPwXFstD8XA50/LAoTwbjKnT/oqbTBEqgZQJ/228C18CtBY7xnP622YUDcEgPCqbFtQeNGN8FSO0bBLte6QUre6cH0WzdBNT+KQYlSiUATQkDBlV30we16xr+RHLrB3/GkwcM4S8HA89XBtgR5QVpWhUE5xQvBMxizQeSHK0FXN5q/4sGEQaUPrECCSoVBk2LWQOGX68FgBr5B2aobwSYRxMFBlvXBFmZhQcpXnEC1lb3BjBoqwZZpNcG+2J5AqiEzQDeXvcALvi7BLvg2QRKNHT/XcTpAFvYVwXgAlEBmHSFB/tQSQWjMRcGpHtC+ZCcMQYYDbcDVxd9AGm2sQV6KzkFBT4HBYuAwQavK3ECqn8TBOWo5Qf18HUBZfbvBmiZ2QCzG4kBkBuHBwKzwwWvaZUGSHQzCFDCDwUfIpkHcCK3AubzIPuPZrj9YJQxC0zKtQUJKRsGBq/nBzfnRQOkOQ0G7iP9AwwdWQVxBj0H59cXB0rogwRGLAsFo7ntBfQQAwVYhMcHr1bE/cuPMwf9MIUFAmbvBeQy1wZPP8sGyFmVBEga5wZGi3cApQXlB+Oe+wR9m40EeOshBvzFrP6Z7Y8Eo9sZBIbZywD72DUCDJeJBPGkNQsb3r0GcHXlBlSDzQHu/10EsjRBA82VeQFWkM8F8rds/QuuYQUDG2kHejC7AAYDaQMlOUUFOdhtBIupQQWK/HcFjie3BL2oFwmV1rEFKYopBcDarwftFLMG+wfPBgU3OwWGU9cHyI53BJe0WwUNIs0BvyJ9A3NKiQbu1FUCgTKHBiGLQwPgST0FZYYdBx+EowTFYUUHhr21BRousQZf4zMHcdQdBKe6kQQokOMEinw7BCte1QBglLMG/bG5BaLj4wWDHD8IzYRPC7hh4Pzi1t0HljFvBQnF0wc+zBMKckoLB1O0LwS/duMHWfnpBUN4FwcXHQ0G88SBBvfuDwZ8yYkFPkLjBhTMkwelqccDV5LfA+jCXQAbcEsEGRnrBJ76YwfL42kFFm9PBZxE8QUWY0sEX40vBea2nQT+JUcE5LsnBd0q7wXqgl0FhqRtAJz0QQKSEh8Hf0s/AwAxhwSuORMErZso/i+kNQYXkscE056VBhq5RwdFBn0FkBqVBwOlPQSJiycFOSEc+Z1Umwd5S2cF8ZThBlrT3QQ7cucHAjz9BNd4dwanoS0FpuxJB7iniwWKkb78LrqDBGFOXwWRUfsEmO7nAbb22wfpvUEA13/zB7NgBwAtUt8GINAjB767CwZBHcMH4bS7BxFYBwsNj2EHC8dvBkxOXwauZtMEDV0ZBWy/SQeJcJ0HICGTBfmmnQfRTkUHdsWJBkS7qvuAP4z/uRdhAeqiiQTkNyUEFbW/AL9PRQLzeA0K5LJpA8rxrQUZ3xcEdsZ1BW+XswXs5f8GOEuG/DPQOwZHUwsBzV4xAGnRBQWJW48Ft2J9A5AjfQX9og79jc3/BaGkNwo4bHcHOnVBBIQSCwV/t3EBCHBPBGvm4weeiykBO5LHAViacP6GHjUEokXDBfhQwwU4lkUGoOKBAH5Q8vqqCzUDhYcvB6467QSblyMH5S4bBxmEwQXrAacG1FCRB8MehQRGRGj5DNs3BTESlQSq6x7/3AQFBo7XPQacN78HeMoVBynjBwdu7FUCPDsrAwBtUQSUl1UHxUXFBBfxTPwYXi0H8bb3A4GnCP+LM5kElsRlA8yZSwbzEIMFbOz7ApWuOPyoN0MDMpAs/AUdfQXDXiMFmfMzBtUCGwdRHosENiPTBuvfYQCtC2cFd3fnBwNvfwRaLNEFvccDBr5xqQYT2+cEc3n5A++I5wSltUECy3nxBWfVjQZXNjEHCFOFBUf+MwTuGmkG8d7tAlPM3wZemwz7OsbQ/QdwTwlt3rMBIxKg/3lWjQcYI7sD1j1tBm73lQXIwEEIcsU9Bf/0dwfemmz+59LE/Ie/wP5o4dkFHcgNCyxCrweBf3MFprQK/nNakQeFcwMG07A1BsQgqQaLLlUEvYb/BNl1kQFnh7MHjUfDBxJNHwbaBskFKEkbBYdkHwdRQrcGAmeJBIQUQwl5bb0FtorvBZmiiQZgk+EGOLgbAA8vhQM13xEBhMLdBNZGrwPQvtcBc7UnB+LYpQX+69MCBWr/AQ+PTQW1/zMFaRdbBLiKIwXOS0MHwF4rBqCJ8QJBancECFI/BmdtQQbm0s0Cq4ZfB5kbMQTkf+0HexNlBjwwmQcEq5kH9A+3BcqyLQbrEKUGJcwE+RFidwWDqJkFEUw/CYe6/waiPl0H4iUZBc+UwPzOqksF7mafBgpKCvlVGz0By1uvBekJiwRFoysG+6mXBKEmFwbMGE0Lu279A8KlqQYy2mMCocrvB9+zdQTK2DcHcrPjABn7VQa8pkUBxUNxBn7udwdB29sHvFipA5HeSwVBmzEExq89Ac+VfwPo9JkAox5S/71DbwT76QEF07rnBP31WQf7cwT+OPpFBMfxYQQHgqEGqvwjBQv8xQfpUmkGKS4hBabJMwZCVGL82iOpA2jQ9wJyaP0FteZ7BuOHrwfJR1L/4HsJB82xHQSxxAcKEKvpBZ7nZQdh6H0Fu8o9B3HvvQPR55MEFvnrBaXytwQC0BUL8enrB5o4KQhZlS8GnUwc/8ekkQRjoGEDOEDXBcl2iv3NXMcEsTFJB8JspwXBQcUHYOgVBfv3PwTcIrsFB59RAupO7QMrvN0HqtzXB51UrQasBT0FBWA7CnChJQYCD2cHKpJ5APlLsQczL0b9kM5nBfTMUwtOVdcEWcyzBZ908QYRQgMEGcoVBLLpNQdl7ksF+onrB1s2nQTpcA8IjhCHBzBLtwWCZnj9CtT5BZt4IQi6EA8KZwFTAdhQ6wTR4IUFF8NDBqjE5QUogtsGhivfBMsuhv2fsQUGZdLzB7ItJQFxtKUETNkRB9MtmwW/TPUFlo4BBW72vP+4Qf8EvkqTBQgVKQeLSSEEqmsPBy0rlv5hjtsFg7cVB6pMfQEzP4sC6tTfBw5cdwaF23sFehrdBBMIPQRdT9kHaYzvBtrLIOx0wskHDblfAghnJwVJk58BaAs3Bc+nSwKZhpEGXEjtBXL1mwR0To0Hi56rBoimzwRuC5cBLwnNBa1kVQJPstECqFN/BAVDIQcwizcA9FBzBl6vEQfoFY0HW0Uc/J9evP8UXDsJcihrBdf/gQcKmwUGt3jdBeL4OwujnsMEIjfXAaPWeQbMrGcFmdKXAa/QMQiIC/MHZUrlBJvq3wRbA1EGebTe/MRlEP67+PUGEOgTCQL9jwaZpp0CtYDLB5xkCwTK+g8HIGAJCXhrbPf4A6kFvpUVBsXLHwfjEkz5m5t8/tYmOwYlk4r+lScVBI9R3QZiQCz8Ttt9BefRhQUjLBkEN2p1B+IcpP9ZH58G6/T1ByL+wwbe4X0BoxI1A6KDOwSt3B8KL8E3BsVMSwZxEssF/1bNBGtxdwByeskFsQzjB+NCRwbeVrsGen8VBB3G0wTldA0DiT9TBFqvywQEkB8JV789APOqjwcKSAr+PxYZB45hZQRVcG8Fe8FZBYNYFwkSzvsHDNcxBSHqoQRhv7MEeQtJBOCTeQUuH4EE5exFBRT42wWC7skFZ3b5ANJ0QQb7j10HFJ4HBhMZ2QQj2MMEms2LB1ZlpwRKassDJpOPBNpFVwW9wO8Hjdo9BZ1bBwI4JU0Fzq5lBZVbYQd9Uvr+YuN9A3QTpwBog18F3EsfB+mFLwaPQ7kAB18NAxkPBwT684MCwtl5Bf45WwHrwd0FATolBJocuwb0LjMH2CWxBXQzmQbq0isEVPrjAxIUUQRcvQMGpsEZBTkrSP2qNQ8EIMlbBBYZUQX+ansGMEbNB\",\"dtype\":\"float32\",\"order\":\"little\",\"shape\":[1533]}},\"selected\":{\"id\":\"1168\"},\"selection_policy\":{\"id\":\"1167\"}},\"id\":\"1143\",\"type\":\"ColumnDataSource\"},{\"attributes\":{},\"id\":\"1111\",\"type\":\"DataRange1d\"},{\"attributes\":{\"coordinates\":null,\"data_source\":{\"id\":\"1143\"},\"glyph\":{\"id\":\"1145\"},\"group\":null,\"hover_glyph\":null,\"muted_glyph\":{\"id\":\"1147\"},\"nonselection_glyph\":{\"id\":\"1146\"},\"view\":{\"id\":\"1149\"}},\"id\":\"1148\",\"type\":\"GlyphRenderer\"},{\"attributes\":{\"tools\":[{\"id\":\"1127\"},{\"id\":\"1128\"},{\"id\":\"1129\"},{\"id\":\"1130\"},{\"id\":\"1131\"},{\"id\":\"1132\"},{\"id\":\"1134\"}]},\"id\":\"1135\",\"type\":\"Toolbar\"},{\"attributes\":{\"fill_color\":{\"field\":\"label\",\"transform\":{\"id\":\"1106\"}},\"line_color\":{\"field\":\"label\",\"transform\":{\"id\":\"1106\"}},\"size\":{\"value\":10},\"x\":{\"field\":\"x\"},\"y\":{\"field\":\"y\"}},\"id\":\"1145\",\"type\":\"Scatter\"},{\"attributes\":{},\"id\":\"1131\",\"type\":\"ResetTool\"},{\"attributes\":{},\"id\":\"1120\",\"type\":\"BasicTicker\"},{\"attributes\":{\"high\":100,\"low\":0,\"palette\":[\"#30123b\",\"#311542\",\"#32184a\",\"#341b51\",\"#351e58\",\"#36215f\",\"#372365\",\"#38266c\",\"#392972\",\"#3a2c79\",\"#3b2f7f\",\"#3c3285\",\"#3c358b\",\"#3d3791\",\"#3e3a96\",\"#3f3d9c\",\"#4040a1\",\"#4043a6\",\"#4145ab\",\"#4148b0\",\"#424bb5\",\"#434eba\",\"#4350be\",\"#4353c2\",\"#4456c7\",\"#4458cb\",\"#455bce\",\"#455ed2\",\"#4560d6\",\"#4563d9\",\"#4666dd\",\"#4668e0\",\"#466be3\",\"#466de6\",\"#4670e8\",\"#4673eb\",\"#4675ed\",\"#4678f0\",\"#467af2\",\"#467df4\",\"#467ff6\",\"#4682f8\",\"#4584f9\",\"#4587fb\",\"#4589fc\",\"#448cfd\",\"#438efd\",\"#4291fe\",\"#4193fe\",\"#4096fe\",\"#3f98fe\",\"#3e9bfe\",\"#3c9dfd\",\"#3ba0fc\",\"#39a2fc\",\"#38a5fb\",\"#36a8f9\",\"#34aaf8\",\"#33acf6\",\"#31aff5\",\"#2fb1f3\",\"#2db4f1\",\"#2bb6ef\",\"#2ab9ed\",\"#28bbeb\",\"#26bde9\",\"#25c0e6\",\"#23c2e4\",\"#21c4e1\",\"#20c6df\",\"#1ec9dc\",\"#1dcbda\",\"#1ccdd7\",\"#1bcfd4\",\"#1ad1d2\",\"#19d3cf\",\"#18d5cc\",\"#18d7ca\",\"#17d9c7\",\"#17dac4\",\"#17dcc2\",\"#17debf\",\"#18e0bd\",\"#18e1ba\",\"#19e3b8\",\"#1ae4b6\",\"#1be5b4\",\"#1de7b1\",\"#1ee8af\",\"#20e9ac\",\"#22eba9\",\"#24eca6\",\"#27eda3\",\"#29eea0\",\"#2cef9d\",\"#2ff09a\",\"#32f197\",\"#35f394\",\"#38f491\",\"#3bf48d\",\"#3ff58a\",\"#42f687\",\"#46f783\",\"#4af880\",\"#4df97c\",\"#51f979\",\"#55fa76\",\"#59fb72\",\"#5dfb6f\",\"#61fc6c\",\"#65fc68\",\"#69fd65\",\"#6dfd62\",\"#71fd5f\",\"#74fe5c\",\"#78fe59\",\"#7cfe56\",\"#80fe53\",\"#84fe50\",\"#87fe4d\",\"#8bfe4b\",\"#8efe48\",\"#92fe46\",\"#95fe44\",\"#98fe42\",\"#9bfd40\",\"#9efd3e\",\"#a1fc3d\",\"#a4fc3b\",\"#a6fb3a\",\"#a9fb39\",\"#acfa37\",\"#aef937\",\"#b1f836\",\"#b3f835\",\"#b6f735\",\"#b9f534\",\"#bbf434\",\"#bef334\",\"#c0f233\",\"#c3f133\",\"#c5ef33\",\"#c8ee33\",\"#caed33\",\"#cdeb34\",\"#cfea34\",\"#d1e834\",\"#d4e735\",\"#d6e535\",\"#d8e335\",\"#dae236\",\"#dde036\",\"#dfde36\",\"#e1dc37\",\"#e3da37\",\"#e5d838\",\"#e7d738\",\"#e8d538\",\"#ead339\",\"#ecd139\",\"#edcf39\",\"#efcd39\",\"#f0cb3a\",\"#f2c83a\",\"#f3c63a\",\"#f4c43a\",\"#f6c23a\",\"#f7c039\",\"#f8be39\",\"#f9bc39\",\"#f9ba38\",\"#fab737\",\"#fbb537\",\"#fbb336\",\"#fcb035\",\"#fcae34\",\"#fdab33\",\"#fda932\",\"#fda631\",\"#fda330\",\"#fea12f\",\"#fe9e2e\",\"#fe9b2d\",\"#fe982c\",\"#fd952b\",\"#fd9229\",\"#fd8f28\",\"#fd8c27\",\"#fc8926\",\"#fc8624\",\"#fb8323\",\"#fb8022\",\"#fa7d20\",\"#fa7a1f\",\"#f9771e\",\"#f8741c\",\"#f7711b\",\"#f76e1a\",\"#f66b18\",\"#f56817\",\"#f46516\",\"#f36315\",\"#f26014\",\"#f15d13\",\"#ef5a11\",\"#ee5810\",\"#ed550f\",\"#ec520e\",\"#ea500d\",\"#e94d0d\",\"#e84b0c\",\"#e6490b\",\"#e5460a\",\"#e3440a\",\"#e24209\",\"#e04008\",\"#de3e08\",\"#dd3c07\",\"#db3a07\",\"#d93806\",\"#d73606\",\"#d63405\",\"#d43205\",\"#d23005\",\"#d02f04\",\"#ce2d04\",\"#cb2b03\",\"#c92903\",\"#c72803\",\"#c52602\",\"#c32402\",\"#c02302\",\"#be2102\",\"#bb1f01\",\"#b91e01\",\"#b61c01\",\"#b41b01\",\"#b11901\",\"#ae1801\",\"#ac1601\",\"#a91501\",\"#a61401\",\"#a31201\",\"#a01101\",\"#9d1001\",\"#9a0e01\",\"#970d01\",\"#940c01\",\"#910b01\",\"#8e0a01\",\"#8b0901\",\"#870801\",\"#840701\",\"#810602\",\"#7d0502\",\"#7a0402\"]},\"id\":\"1106\",\"type\":\"LinearColorMapper\"},{\"attributes\":{\"axis\":{\"id\":\"1119\"},\"coordinates\":null,\"group\":null,\"ticker\":null},\"id\":\"1122\",\"type\":\"Grid\"},{\"attributes\":{\"axis\":{\"id\":\"1123\"},\"coordinates\":null,\"dimension\":1,\"group\":null,\"ticker\":null},\"id\":\"1126\",\"type\":\"Grid\"},{\"attributes\":{},\"id\":\"1168\",\"type\":\"Selection\"},{\"attributes\":{},\"id\":\"1132\",\"type\":\"HelpTool\"},{\"attributes\":{\"callback\":null,\"tooltips\":\"\\n
\\n \\n
\\n @label_desc - @split\\n [#@video_id]\\n
\\n
\\n \\n\"},\"id\":\"1134\",\"type\":\"HoverTool\"},{\"attributes\":{},\"id\":\"1113\",\"type\":\"DataRange1d\"},{\"attributes\":{\"bottom_units\":\"screen\",\"coordinates\":null,\"fill_alpha\":0.5,\"fill_color\":\"lightgrey\",\"group\":null,\"left_units\":\"screen\",\"level\":\"overlay\",\"line_alpha\":1.0,\"line_color\":\"black\",\"line_dash\":[4,4],\"line_width\":2,\"right_units\":\"screen\",\"syncable\":false,\"top_units\":\"screen\"},\"id\":\"1133\",\"type\":\"BoxAnnotation\"},{\"attributes\":{\"coordinates\":null,\"formatter\":{\"id\":\"1162\"},\"group\":null,\"major_label_policy\":{\"id\":\"1163\"},\"ticker\":{\"id\":\"1124\"}},\"id\":\"1123\",\"type\":\"LinearAxis\"},{\"attributes\":{\"overlay\":{\"id\":\"1133\"}},\"id\":\"1129\",\"type\":\"BoxZoomTool\"},{\"attributes\":{\"fill_alpha\":{\"value\":0.1},\"fill_color\":{\"field\":\"label\",\"transform\":{\"id\":\"1106\"}},\"hatch_alpha\":{\"value\":0.1},\"line_alpha\":{\"value\":0.1},\"line_color\":{\"field\":\"label\",\"transform\":{\"id\":\"1106\"}},\"size\":{\"value\":10},\"x\":{\"field\":\"x\"},\"y\":{\"field\":\"y\"}},\"id\":\"1146\",\"type\":\"Scatter\"},{\"attributes\":{\"coordinates\":null,\"group\":null,\"text\":\"Check label by hovering mouse over the dots\"},\"id\":\"1109\",\"type\":\"Title\"},{\"attributes\":{},\"id\":\"1167\",\"type\":\"UnionRenderers\"},{\"attributes\":{\"coordinates\":null,\"formatter\":{\"id\":\"1165\"},\"group\":null,\"major_label_policy\":{\"id\":\"1166\"},\"ticker\":{\"id\":\"1120\"}},\"id\":\"1119\",\"type\":\"LinearAxis\"},{\"attributes\":{},\"id\":\"1130\",\"type\":\"SaveTool\"},{\"attributes\":{},\"id\":\"1117\",\"type\":\"LinearScale\"},{\"attributes\":{},\"id\":\"1162\",\"type\":\"BasicTickFormatter\"},{\"attributes\":{},\"id\":\"1115\",\"type\":\"LinearScale\"},{\"attributes\":{\"source\":{\"id\":\"1143\"}},\"id\":\"1149\",\"type\":\"CDSView\"},{\"attributes\":{},\"id\":\"1127\",\"type\":\"PanTool\"},{\"attributes\":{},\"id\":\"1166\",\"type\":\"AllLabels\"},{\"attributes\":{\"fill_alpha\":{\"value\":0.2},\"fill_color\":{\"field\":\"label\",\"transform\":{\"id\":\"1106\"}},\"hatch_alpha\":{\"value\":0.2},\"line_alpha\":{\"value\":0.2},\"line_color\":{\"field\":\"label\",\"transform\":{\"id\":\"1106\"}},\"size\":{\"value\":10},\"x\":{\"field\":\"x\"},\"y\":{\"field\":\"y\"}},\"id\":\"1147\",\"type\":\"Scatter\"}],\"root_ids\":[\"1108\"]},\"title\":\"Bokeh Application\",\"version\":\"2.4.3\"}};\n const render_items = [{\"docid\":\"458dc79d-472d-4f36-92df-cc9c2d7d3fb7\",\"root_ids\":[\"1108\"],\"roots\":{\"1108\":\"74ea627a-4164-4f24-bbf8-b9b7c4c5836c\"}}];\n root.Bokeh.embed.embed_items_notebook(docs_json, render_items);\n }\n if (root.Bokeh !== undefined) {\n embed_document(root);\n } else {\n let attempts = 0;\n const timer = setInterval(function(root) {\n if (root.Bokeh !== undefined) {\n clearInterval(timer);\n embed_document(root);\n } else {\n attempts++;\n if (attempts > 100) {\n clearInterval(timer);\n console.log(\"Bokeh: ERROR: Unable to run BokehJS code because BokehJS library is missing\");\n }\n }\n }, 10, root)\n }\n})(window);", + "application/vnd.bokehjs_exec.v0+json": "" + }, + "metadata": { + "application/vnd.bokehjs_exec.v0+json": { + "id": "1108" + } + }, + "output_type": "display_data" + } + ], + "source": [ + "column_data = dict(\n", + " x=df['tsne_x'],\n", + " y=df['tsne_y'],\n", + " label=df['labels'],\n", + " label_desc=df['label_name'],\n", + " split=df['split'],\n", + " video_id=df['video_id']\n", + ")\n", + "\n", + "if use_img_div:\n", + " emb_videos = load_videos(df['video_fn'])\n", + " column_data[\"videos\"] = emb_videos\n", + "source = ColumnDataSource(data=column_data)\n", + "\n", + "p.scatter('x', 'y',\n", + " size=10,\n", + " source=source,\n", + " fill_color={\"field\": 'label', \"transform\": cmap},\n", + " line_color={\"field\": 'label', \"transform\": cmap}, \n", + " #legend_label={\"field\": 'split', \"transform\": lambda x: df['split']},\n", + "# marker={\"field\": 'split'}\n", + " )\n", + "\n", + "show(p)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1d761766", + "metadata": {}, + "outputs": [], + "source": [ + "df" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1c73f195", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "jupytext": { + "cell_metadata_filter": "-all", + "main_language": "python", + "notebook_metadata_filter": "-all" + }, + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.13" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/preprocessing.py b/preprocessing.py new file mode 100644 index 0000000..34ac917 --- /dev/null +++ b/preprocessing.py @@ -0,0 +1,21 @@ +from argparse import ArgumentParser +from preprocessing.create_wlasl_landmarks_dataset import parse_create_args, create +from preprocessing.extract_mediapipe_landmarks import parse_extract_args, extract + + +if __name__ == '__main__': + main_parser = ArgumentParser() + subparser = main_parser.add_subparsers(dest="action") + create_subparser = subparser.add_parser("create") + extract_subparser = subparser.add_parser("extract") + parse_create_args(create_subparser) + parse_extract_args(extract_subparser) + + args = main_parser.parse_args() + + if args.action == "create": + create(args) + elif args.action == "extract": + extract(args) + else: + ValueError("action command must be either 'create' or 'extract'") diff --git a/preprocessing/__init__.py b/preprocessing/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/preprocessing/create_wlasl_landmarks_dataset.py b/preprocessing/create_wlasl_landmarks_dataset.py new file mode 100644 index 0000000..a457c89 --- /dev/null +++ b/preprocessing/create_wlasl_landmarks_dataset.py @@ -0,0 +1,155 @@ +import os +import os.path as op +import json +import shutil + +import cv2 +import mediapipe as mp +import numpy as np +import pandas as pd +from utils import get_logger +from tqdm.auto import tqdm +from sklearn.model_selection import train_test_split +from normalization.blazepose_mapping import map_blazepose_df + +BASE_DATA_FOLDER = 'data/' + +mp_drawing = mp.solutions.drawing_utils +mp_drawing_styles = mp.solutions.drawing_styles +mp_hands = mp.solutions.hands +mp_holistic = mp.solutions.holistic +pose_landmarks = mp_holistic.PoseLandmark +hand_landmarks = mp_holistic.HandLandmark + + +def get_landmarks_names(): + ''' + Returns landmark names for mediapipe holistic model + ''' + pose_lmks = ','.join([f'{lmk.name.lower()}_x,{lmk.name.lower()}_y' for lmk in pose_landmarks]) + left_hand_lmks = ','.join([f'left_hand_{lmk.name.lower()}_x,left_hand_{lmk.name.lower()}_y' + for lmk in hand_landmarks]) + right_hand_lmks = ','.join([f'right_hand_{lmk.name.lower()}_x,right_hand_{lmk.name.lower()}_y' + for lmk in hand_landmarks]) + lmks_names = f'{pose_lmks},{left_hand_lmks},{right_hand_lmks}' + return lmks_names + + +def convert_to_str(arr, precision=6): + if isinstance(arr, np.ndarray): + values = [] + for val in arr: + if val == 0: + values.append('0') + else: + values.append(f'{val:.{precision}f}') + return f"[{','.join(values)}]" + else: + return str(arr) + + +def parse_create_args(parser): + parser.add_argument('--landmarks-dataset', '-lmks', required=True, + help='Path to folder with landmarks npy files. \ + You need to run `extract_mediapipe_landmarks.py` script first') + parser.add_argument('--dataset-folder', '-df', default='data/wlasl', + help='Path to folder where original `WLASL_v0.3.json` and `id_to_label.json` are stored. \ + Note that final CSV files will be saved in this folder too.') + parser.add_argument('--videos-folder', '-videos', default=None, + help='Path to folder with videos. If None, then no information of videos (fps, length, \ + width and height) will be stored in final csv file') + parser.add_argument('--num-classes', '-nc', default=100, type=int, help='Number of classes to use in WLASL dataset') + parser.add_argument('--create-new-split', action='store_true') + parser.add_argument('--test-size', '-ts', default=0.25, type=float, + help='Test split percentage size. Only required if --create-new-split is set') + + +# python3 preprocessing.py --landmarks-dataset=data/landmarks -videos data/wlasl/videos +def create(args): + logger = get_logger(__name__) + + landmarks_dataset = args.landmarks_dataset + videos_folder = args.videos_folder + dataset_folder = args.dataset_folder + num_classes = args.num_classes + test_size = args.test_size + + os.makedirs(dataset_folder, exist_ok=True) + + shutil.copy(os.path.join(BASE_DATA_FOLDER, 'wlasl/id_to_label.json'), dataset_folder) + shutil.copy(os.path.join(BASE_DATA_FOLDER, 'wlasl/WLASL_v0.3.json'), dataset_folder) + + wlasl_json_fn = op.join(dataset_folder, 'WLASL_v0.3.json') + + with open(wlasl_json_fn) as fid: + data = json.load(fid) + + video_data = [] + for label_id, datum in enumerate(tqdm(data[:num_classes])): + instances = [] + for instance in datum['instances']: + instances.append(instance) + video_id = instance['video_id'] + print(video_id) + video_dict = {'video_id': video_id, + 'label_name': datum['gloss'], + 'labels': label_id, + 'split': instance['split']} + if videos_folder is not None: + cap = cv2.VideoCapture(op.join(videos_folder, f'{video_id}.mp4')) + if not cap.isOpened(): + logger.warning(f'Video {video_id}.mp4 not found') + continue + width = cap.get(cv2.CAP_PROP_FRAME_WIDTH) + height = cap.get(cv2.CAP_PROP_FRAME_HEIGHT) + fps = cap.get(cv2.CAP_PROP_FPS) + length = cap.get(cv2.CAP_PROP_FRAME_COUNT) / float(cap.get(cv2.CAP_PROP_FPS)) + video_info = {'video_width': width, + 'video_height': height, + 'fps': fps, + 'length': length} + video_dict.update(video_info) + video_data.append(video_dict) + df_video = pd.DataFrame(video_data) + video_ids = df_video['video_id'].unique() + lmks_data = [] + lmks_names = get_landmarks_names().split(',') + for video_id in video_ids: + lmk_fn = op.join(landmarks_dataset, f'{video_id}.npy') + if not op.exists(lmk_fn): + logger.warning(f'{lmk_fn} file not found. Skipping') + continue + lmk = np.load(lmk_fn).T + lmks_dict = {'video_id': video_id} + for lmk_, name in zip(lmk, lmks_names): + lmks_dict[name] = lmk_ + lmks_data.append(lmks_dict) + + df_lmks = pd.DataFrame(lmks_data) + print(df_lmks) + df = pd.merge(df_video, df_lmks) + print(df) + aux_columns = ['split', 'video_id', 'labels', 'label_name'] + if videos_folder is not None: + aux_columns += ['video_width', 'video_height', 'fps', 'length'] + df_aux = df[aux_columns] + df = map_blazepose_df(df) + df = pd.concat([df, df_aux], axis=1) + if args.create_new_split: + df_train, df_test = train_test_split(df, test_size=test_size, stratify=df['labels'], random_state=42) + else: + print(df['split'].unique()) + df_train = df[(df['split'] == 'train') | (df['split'] == 'val')] + df_test = df[df['split'] == 'test'] + + print(f'Num classes: {num_classes}') + print(df_train['labels'].value_counts()) + assert set(df_train['labels'].unique()) == set(df_test['labels'].unique( + )), 'The labels for train and test dataframe are different. We recommend to download the dataset again, or to use \ + the --create-new-split flag' + for split, df_split in zip(['train', 'val'], + [df_train, df_test]): + fn_out = op.join(dataset_folder, f'WLASL{num_classes}_{split}.csv') + (df_split.reset_index(drop=True) + .applymap(convert_to_str) + .to_csv(fn_out, index=False)) diff --git a/preprocessing/extract_mediapipe_landmarks.py b/preprocessing/extract_mediapipe_landmarks.py new file mode 100644 index 0000000..6d63076 --- /dev/null +++ b/preprocessing/extract_mediapipe_landmarks.py @@ -0,0 +1,154 @@ +import os +import os.path as op +from itertools import chain +from collections import namedtuple +import glob + +import cv2 +import numpy as np +import mediapipe as mp +from tqdm.auto import tqdm + +# Import drawing_utils and drawing_styles. +mp_drawing = mp.solutions.drawing_utils +mp_drawing_styles = mp.solutions.drawing_styles +mp_holistic = mp.solutions.holistic +mp_pose = mp.solutions.pose + +LEN_LANDMARKS_POSE = len(mp_holistic.PoseLandmark) +LEN_LANDMARKS_HAND = len(mp_holistic.HandLandmark) +TOTAL_LANDMARKS = LEN_LANDMARKS_POSE + 2 * LEN_LANDMARKS_HAND + +Landmark = namedtuple("Landmark", ["x", "y"]) + + +class LandmarksResults: + """ + Wrapper for landmarks results. When not available it fills with 0 + """ + + def __init__( + self, + results, + num_landmarks_pose=LEN_LANDMARKS_POSE, + num_landmarks_hand=LEN_LANDMARKS_HAND, + ): + self.results = results + self.num_landmarks_pose = num_landmarks_pose + self.num_landmarks_hand = num_landmarks_hand + + @property + def pose_landmarks(self): + if self.results.pose_landmarks is None: + return [Landmark(0, 0)] * self.num_landmarks_pose + else: + return self.results.pose_landmarks.landmark + + @property + def left_hand_landmarks(self): + if self.results.left_hand_landmarks is None: + return [Landmark(0, 0)] * self.num_landmarks_hand + else: + return self.results.left_hand_landmarks.landmark + + @property + def right_hand_landmarks(self): + if self.results.right_hand_landmarks is None: + return [Landmark(0, 0)] * self.num_landmarks_hand + else: + return self.results.right_hand_landmarks.landmark + + +def get_landmarks(image_orig, holistic, debug=False): + """ + Runs landmarks detection for single image + Returns: list of landmarks + """ + # Convert the BGR image to RGB before processing. + image = cv2.cvtColor(image_orig, cv2.COLOR_BGR2RGB) + results = LandmarksResults(holistic.process(image)) + if debug: + lmks_pose = [] + for lmk in results.pose_landmarks: + lmks_pose.append(lmk.x) + lmks_pose.append(lmk.y) + assert len(lmks_pose) == LEN_LANDMARKS_POSE + + lmks_left_hand = [] + + for lmk in results.left_hand_landmarks: + lmks_left_hand.append(lmk.x) + lmks_left_hand.append(lmk.y) + + assert ( + len(lmks_left_hand) == 2 * LEN_LANDMARKS_HAND + ), f"{len(lmks_left_hand)} != {2 * LEN_LANDMARKS_HAND}" + + lmks_right_hand = [] + + for lmk in results.right_hand_landmarks: + lmks_right_hand.append(lmk.x) + lmks_right_hand.append(lmk.y), + + assert ( + len(lmks_right_hand) == 2 * LEN_LANDMARKS_HAND + ), f"{len(lmks_right_hand)} != {2 * LEN_LANDMARKS_HAND}" + landmarks = [] + for lmk in chain( + results.pose_landmarks, + results.left_hand_landmarks, + results.right_hand_landmarks, + ): + landmarks.append(lmk.x) + landmarks.append(lmk.y) + assert ( + len(landmarks) == TOTAL_LANDMARKS * 2 + ), f"{len(landmarks)} != {TOTAL_LANDMARKS * 2}" + return landmarks + + +def parse_extract_args(parser): + parser.add_argument( + "--videos-folder", + "-videos", + help="Path of folder with videos to extract landmarks from", + required=True, + ) + parser.add_argument( + "--output-landmarks", + "-lmks", + help="Path of output folder where landmarks npy files will be saved", + required=True, + ) + + +# python3 preprocessing.py -videos=data/wlasl/videos_25fps/ -lmks=data/landmarks +def extract(args): + landmarks_output = args.output_landmarks + videos_folder = args.videos_folder + os.makedirs(landmarks_output, exist_ok=True) + for fn_video in tqdm(sorted(glob.glob(op.join(videos_folder, "*mp4")))): + cap = cv2.VideoCapture(fn_video) + ret, image_orig = cap.read() + height, width = image_orig.shape[:2] + landmarks_video = [] + with tqdm(total=int(cap.get(cv2.CAP_PROP_FRAME_COUNT))) as pbar: + with mp_holistic.Holistic( + static_image_mode=False, + min_detection_confidence=0.5, + model_complexity=2, + ) as holistic: + while ret: + try: + landmarks = get_landmarks(image_orig, holistic) + except Exception as e: + print(e) + landmarks = get_landmarks(image_orig, holistic, debug=True) + ret, image_orig = cap.read() + landmarks_video.append(landmarks) + pbar.update(1) + landmarks_video = np.vstack(landmarks_video) + np.save( + op.join(landmarks_output, op.basename(fn_video).split(".")[0]), + landmarks_video, + ) diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..2e081ab --- /dev/null +++ b/requirements.txt @@ -0,0 +1,14 @@ +bokeh==2.4.3 +boto3>=1.9 +clearml==1.6.4 +ipywidgets==8.0.4 +matplotlib==3.5.3 +mediapipe==0.8.11 +notebook==6.5.2 +opencv-python==4.6.0.66 +pandas==1.1.5 +pandas==1.1.5 +plotly==5.11.0 +scikit-learn==1.0.2 +torchvision==0.13.0 +tqdm==4.54.1 diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_batch_sorter.py b/tests/test_batch_sorter.py new file mode 100644 index 0000000..2e3b00f --- /dev/null +++ b/tests/test_batch_sorter.py @@ -0,0 +1,104 @@ +import unittest +# from traceback_with_variables import activate_by_import #noqa +import torch +from training.batch_sorter import BatchGrouper, sort_batches, get_scaled_distances, get_dist_tuple_list + + +class TestBatchSorting(unittest.TestCase): + + def get_sorted_dists(self): + device = get_device() + embeddings = torch.rand(32*8, 8).to(device) + labels = torch.rand(32*8, 1) + scaled_dist = get_scaled_distances(embeddings, labels, device) + # Get vector of (row, column, dist) + dist_list = get_dist_tuple_list(scaled_dist) + + A = dist_list.cpu().detach().numpy() + return A[:, A[-1, :].argsort()[::-1]] + + def setUp(self) -> None: + dists = self.get_sorted_dists() + self.grouper = BatchGrouper(sorted_dists=dists, total_items=32*8, mini_batch_size=32) + return super().setUp() + + def test_assigns_and_merges(self): + group0 = self.grouper.create_or_get_group() + self.grouper.assign_group(1, group0) + self.grouper.assign_group(2, group0) + + group1 = self.grouper.create_or_get_group() + self.grouper.assign_group(3, group1) + self.grouper.assign_group(4, group1) + + # Merge groups + self.grouper.merge_groups(group0, group1) + self.assertEqual(len(self.grouper.groups[group0]), 4) + self.assertFalse(group1 in self.grouper.groups) + self.assertEqual(self.grouper.item_to_group[3], group0) + self.assertEqual(self.grouper.item_to_group[4], group0) + + def test_full_groups(self): + group0 = self.grouper.create_or_get_group() + for i in range(30): + self.grouper.assign_group(i, group0) + + self.assertFalse(self.grouper.group_is_full(group0)) + initial_group_len = len(self.grouper.groups[group0]) + + group1 = self.grouper.create_or_get_group() + for i in range(30, 33): + self.grouper.assign_group(i, group1) + + self.grouper.merge_groups(group0, group1) + # Assert no merge done + self.assertEqual(len(self.grouper.groups[group0]), initial_group_len) + self.assertTrue(group1 in self.grouper.groups) + self.assertEqual(self.grouper.item_to_group[31], group1) + self.assertEqual(self.grouper.item_to_group[32], group1) + + def test_replace_groups(self): + group0 = self.grouper.create_or_get_group() + for i in range(20): + self.grouper.assign_group(i, group0) + + group1 = self.grouper.create_or_get_group() + for i in range(20, 23): + self.grouper.assign_group(i, group1) + + group2 = self.grouper.create_or_get_group() + for i in range(23, 30): + self.grouper.assign_group(i, group2) + + self.grouper.merge_groups(group1, group0) + self.assertEqual(len(self.grouper.groups[group0]), 23) + self.assertTrue(group1 in self.grouper.groups) + self.assertFalse(group2 in self.grouper.groups) + self.assertEqual(len(self.grouper.groups[group1]), 7) + + +def get_device(): + device = torch.device("cpu") + return device + + +def test_get_scaled_distances(): + device = get_device() + emb = torch.rand(4, 3) + labels = torch.tensor([0, 1, 2, 2]) + distances = get_scaled_distances(emb, labels, device) + assert torch.all(distances >= 0) + assert torch.all(distances <= 1) + + +def test_batch_sorter_indices(): + device = get_device() + inputs = torch.rand(32*16, 1000) + labels = torch.rand(32*16, 1) + masks = torch.rand(32*16, 100) + embeddings = torch.rand(32*16, 32).to(device) + + i_out, l_out, m_out = sort_batches(inputs, labels, masks, embeddings, device) + first_match_index = torch.all(inputs == i_out[0], dim=1).nonzero(as_tuple=True)[0][0] + assert torch.all(labels[first_match_index] == l_out[0]) + assert torch.all(masks[first_match_index] == m_out[0]) diff --git a/tracking/__init__.py b/tracking/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tracking/clearml_tracker.py b/tracking/clearml_tracker.py new file mode 100644 index 0000000..b91ddfa --- /dev/null +++ b/tracking/clearml_tracker.py @@ -0,0 +1,21 @@ +from clearml import Task, Logger +from .tracker import Tracker + + +class ClearMLTracker(Tracker): + + def __init__(self, project_name=None, experiment_name=None): + self.task = Task.current_task() or Task.init(project_name=project_name, task_name=experiment_name) + + def execute_remotely(self, queue_name): + self.task.execute_remotely(queue_name=queue_name) + + def log_scalar_metric(self, metric, series, iteration, value): + Logger.current_logger().report_scalar(metric, series, iteration=iteration, value=value) + + def log_chart(self, title, series, iteration, figure): + Logger.current_logger().report_plotly(title=title, series=series, iteration=iteration, figure=figure) + + def finish_run(self): + self.task.mark_completed() + self.task.close() diff --git a/tracking/tracker.py b/tracking/tracker.py new file mode 100644 index 0000000..a98297b --- /dev/null +++ b/tracking/tracker.py @@ -0,0 +1,28 @@ + +class Tracker: + + def __init__(self, project_name, experiment_name): + super().__init__() + + def execute_remotely(self, queue_name): + pass + + def track_config(self, configs): + # Used to track configuration parameters of an experiment run + pass + + def track_artifacts(self, filepath): + # Used to track artifacts like model weights + pass + + def log_scalar_metric(self, metric, series, iteration, value): + pass + + def log_chart(self, title, series, iteration, figure): + pass + + def finish_run(self): + pass + + def get_callback(self): + pass diff --git a/train.py b/train.py new file mode 100644 index 0000000..76fabfe --- /dev/null +++ b/train.py @@ -0,0 +1,287 @@ + +from datetime import datetime +import os +import os.path as op +import argparse +import json +from datasets.dataset_loader import LocalDatasetLoader +from tracking.tracker import Tracker +import torch +import multiprocessing +import torch.nn as nn +import torch.optim as optim +# import matplotlib.pyplot as plt +from torchvision import transforms +from torch.utils.data import DataLoader +from pathlib import Path +import copy + +from datasets import CzechSLRDataset, SLREmbeddingDataset, collate_fn_triplet_padd, collate_fn_padd +from models import SPOTER, SPOTER_EMBEDDINGS, train_epoch, evaluate, train_epoch_embedding, \ + train_epoch_embedding_online, evaluate_embedding +from training.online_batch_mining import BatchAllTripletLoss +from training.batching_scheduler import BatchingScheduler +from training.gaussian_noise import GaussianNoise +from training.train_utils import train_setup, create_embedding_scatter_plots +from training.train_arguments import get_default_args +from utils import get_logger +try: + # Needed for argparse patching in case clearml is used + import clearml # noqa +except ImportError: + pass + + +PROJECT_NAME = "spoter" +CLEARML = "clearml" + + +def is_pre_batch_sorting_enabled(args): + return args.start_mining_hard is not None and args.start_mining_hard > 0 + + +def get_tracker(tracker_name, project, experiment_name): + if tracker_name == CLEARML: + from tracking.clearml_tracker import ClearMLTracker + return ClearMLTracker(project_name=project, experiment_name=experiment_name) + else: + return Tracker(project_name=project, experiment_name=experiment_name) + + +def get_dataset_loader(loader_name): + if loader_name == CLEARML: + from datasets.clearml_dataset_loader import ClearMLDatasetLoader + return ClearMLDatasetLoader() + else: + return LocalDatasetLoader() + + +def build_data_loader(dataset, batch_size, shuffle, collate_fn, generator): + return DataLoader(dataset, batch_size=batch_size, shuffle=shuffle, collate_fn=collate_fn, + generator=generator, pin_memory=torch.cuda.is_available(), num_workers=multiprocessing.cpu_count()) + + +def train(args, tracker: Tracker): + tracker.execute_remotely(queue_name="default") + # Initialize all the random seeds + gen = train_setup(args.seed, args.experiment_name) + os.environ['EXPERIMENT_NAME'] = args.experiment_name + logger = get_logger(args.experiment_name) + + # Set device to CUDA only if applicable + device = torch.device("cpu") + if torch.cuda.is_available(): + device = torch.device("cuda") + + # Construct the model + if not args.classification_model: + slrt_model = SPOTER_EMBEDDINGS( + features=args.vector_length, + hidden_dim=args.hidden_dim, + norm_emb=args.normalize_embeddings, + dropout=args.dropout + ) + model_type = 'embed' + if args.hard_triplet_mining == "None": + cel_criterion = nn.TripletMarginLoss(margin=args.triplet_loss_margin, p=2) + elif args.hard_triplet_mining == "in_batch": + cel_criterion = BatchAllTripletLoss( + device=device, + margin=args.triplet_loss_margin, + filter_easy_triplets=bool(args.filter_easy_triplets) + ) + else: + slrt_model = SPOTER(num_classes=args.num_classes, hidden_dim=args.hidden_dim) + model_type = 'classif' + cel_criterion = nn.CrossEntropyLoss() + slrt_model.to(device) + + if args.optimizer == "SGD": + optimizer = optim.SGD(slrt_model.parameters(), lr=args.lr) + elif args.optimizer == "ADAM": + optimizer = optim.Adam(slrt_model.parameters(), lr=args.lr) + + if args.scheduler_factor > 0: + mode = 'min' if args.classification_model else 'max' + scheduler = optim.lr_scheduler.ReduceLROnPlateau( + optimizer, + mode=mode, + factor=args.scheduler_factor, + patience=args.scheduler_patience + ) + else: + scheduler = None + + if args.hard_mining_scheduler_triplets_threshold > 0: + batching_scheduler = BatchingScheduler(triplets_threshold=args.hard_mining_scheduler_triplets_threshold) + else: + batching_scheduler = None + + # Ensure that the path for checkpointing and for images both exist + Path("out-checkpoints/" + args.experiment_name + "/").mkdir(parents=True, exist_ok=True) + Path("out-img/").mkdir(parents=True, exist_ok=True) + + # Training set + transform = transforms.Compose([GaussianNoise(args.gaussian_mean, args.gaussian_std)]) + dataset_loader = get_dataset_loader(args.dataset_loader) + dataset_folder = dataset_loader.get_dataset_folder(args.dataset_project, args.dataset_name) + training_set_path = op.join(dataset_folder, args.training_set_path) + + with open(op.join(dataset_folder, 'id_to_label.json')) as fid: + id_to_label = json.load(fid) + id_to_label = {int(key): value for key, value in id_to_label.items()} + + if not args.classification_model: + batch_size = args.batch_size + val_batch_size = args.batch_size + if args.hard_triplet_mining == "None": + train_set = SLREmbeddingDataset(training_set_path, triplet=True, transform=transform, augmentations=True, + augmentations_prob=args.augmentations_prob) + collate_fn_train = collate_fn_triplet_padd + elif args.hard_triplet_mining == "in_batch": + train_set = SLREmbeddingDataset(training_set_path, triplet=False, transform=transform, augmentations=True, + augmentations_prob=args.augmentations_prob) + collate_fn_train = collate_fn_padd + if is_pre_batch_sorting_enabled(args): + batch_size *= args.hard_mining_pre_batch_multipler + train_val_set = SLREmbeddingDataset(training_set_path, triplet=False) + # Train dataloader for validation + train_val_loader = build_data_loader(train_val_set, val_batch_size, False, collate_fn_padd, gen) + else: + train_set = CzechSLRDataset(training_set_path, transform=transform, augmentations=True) + batch_size = 1 + val_batch_size = 1 + collate_fn_train = None + + train_loader = build_data_loader(train_set, batch_size, True, collate_fn_train, gen) + + # Validation set + validation_set_path = op.join(dataset_folder, args.validation_set_path) + + if args.classification_model: + val_set = CzechSLRDataset(validation_set_path) + collate_fn_val = None + else: + val_set = SLREmbeddingDataset(validation_set_path, triplet=False) + collate_fn_val = collate_fn_padd + + val_loader = build_data_loader(val_set, val_batch_size, False, collate_fn_val, gen) + + # MARK: TRAINING + train_acc, val_acc = 0, 0 + losses, train_accs, val_accs = [], [], [] + lr_progress = [] + top_val_acc = -999 + top_model_saved = True + + logger.info("Starting " + args.experiment_name + "...\n\n") + + if is_pre_batch_sorting_enabled(args): + mini_batch_size = int(batch_size / args.hard_mining_pre_batch_multipler) + else: + mini_batch_size = None + enable_batch_sorting = False + pre_batch_mining_count = 1 + for epoch in range(1, args.epochs + 1): + start_time = datetime.now() + if not args.classification_model: + train_kwargs = {"model": slrt_model, + "epoch_iters": args.epoch_iters, + "train_loader": train_loader, + "val_loader": val_loader, + "criterion": cel_criterion, + "optimizer": optimizer, + "device": device, + "scheduler": scheduler if epoch >= args.scheduler_warmup else None, + } + if args.hard_triplet_mining == "None": + train_loss, val_silhouette_coef = train_epoch_embedding(**train_kwargs) + elif args.hard_triplet_mining == "in_batch": + if epoch == args.start_mining_hard: + enable_batch_sorting = True + pre_batch_mining_count = args.hard_mining_pre_batch_mining_count + train_kwargs.update(dict(enable_batch_sorting=enable_batch_sorting, + mini_batch_size=mini_batch_size, + pre_batch_mining_count=pre_batch_mining_count, + batching_scheduler=batching_scheduler if enable_batch_sorting else None)) + + train_loss, val_silhouette_coef, triplets_stats = train_epoch_embedding_online(**train_kwargs) + + tracker.log_scalar_metric("triplets", "valid_triplets", epoch, triplets_stats["valid_triplets"]) + tracker.log_scalar_metric("triplets", "used_triplets", epoch, triplets_stats["used_triplets"]) + tracker.log_scalar_metric("triplets_pct", "pct_used", epoch, triplets_stats["pct_used"]) + tracker.log_scalar_metric("train_loss", "loss", epoch, train_loss) + losses.append(train_loss) + + # calculate acc on train dataset + silhouette_coefficient_train = evaluate_embedding(slrt_model, train_val_loader, device) + + tracker.log_scalar_metric("silhouette_coefficient", "train", epoch, silhouette_coefficient_train) + train_accs.append(silhouette_coefficient_train) + + val_accs.append(val_silhouette_coef) + tracker.log_scalar_metric("silhouette_coefficient", "val", epoch, val_silhouette_coef) + + else: + train_loss, _, _, train_acc = train_epoch(slrt_model, train_loader, cel_criterion, optimizer, device) + tracker.log_scalar_metric("train_loss", "loss", epoch, train_loss) + tracker.log_scalar_metric("acc", "train", epoch, train_acc) + losses.append(train_loss) + train_accs.append(train_acc) + + _, _, val_acc = evaluate(slrt_model, val_loader, device) + val_accs.append(val_acc) + tracker.log_scalar_metric("acc", "val", epoch, val_acc) + + logger.info(f"Epoch time: {datetime.now() - start_time}") + logger.info("[" + str(epoch) + "] TRAIN loss: " + str(train_loss) + " acc: " + str(train_accs[-1])) + logger.info("[" + str(epoch) + "] VALIDATION acc: " + str(val_accs[-1])) + + lr_progress.append(optimizer.param_groups[0]["lr"]) + tracker.log_scalar_metric("lr", "lr", epoch, lr_progress[-1]) + + if val_accs[-1] > top_val_acc: + top_val_acc = val_accs[-1] + top_model_name = "checkpoint_" + model_type + "_" + str(epoch) + ".pth" + top_model_dict = { + "name": top_model_name, + "epoch": epoch, + "val_acc": val_accs[-1], + "config_args": args, + "state_dict": copy.deepcopy(slrt_model.state_dict()), + } + top_model_saved = False + + # Save checkpoint if it is the best on validation and delete previous checkpoints + if args.save_checkpoints_every > 0 and epoch % args.save_checkpoints_every == 0 and not top_model_saved: + torch.save( + top_model_dict, + "out-checkpoints/" + args.experiment_name + "/" + top_model_name + ) + top_model_saved = True + logger.info("Saved new best checkpoint: " + top_model_name) + + # save top model if checkpoints are disabled + if not top_model_saved: + torch.save( + top_model_dict, + "out-checkpoints/" + args.experiment_name + "/" + top_model_name + ) + logger.info("Saved new best checkpoint: " + top_model_name) + + # Log scatter plots + if not args.classification_model and args.hard_triplet_mining == "in_batch": + logger.info("Generating Scatter Plot.") + best_model = slrt_model + best_model.load_state_dict(top_model_dict["state_dict"]) + create_embedding_scatter_plots(tracker, best_model, train_loader, val_loader, device, id_to_label, epoch, + top_model_name) + logger.info("The experiment is finished.") + + +if __name__ == '__main__': + parser = argparse.ArgumentParser("", parents=[get_default_args()], add_help=False) + args = parser.parse_args() + tracker = get_tracker(args.tracker, PROJECT_NAME, args.experiment_name) + train(args, tracker) + tracker.finish_run() diff --git a/train.sh b/train.sh new file mode 100755 index 0000000..73f0c3a --- /dev/null +++ b/train.sh @@ -0,0 +1,24 @@ +#!/bin/sh +python -m train \ + --save_checkpoints_every -1 \ + --experiment_name "augment_rotate_75_x8" \ + --epochs 10 \ + --optimizer "SGD" \ + --lr 0.001 \ + --batch_size 32 \ + --dataset_name "wlasl" \ + --training_set_path "WLASL100_train.csv" \ + --validation_set_path "WLASL100_test.csv" \ + --vector_length 32 \ + --epoch_iters -1 \ + --scheduler_factor 0 \ + --hard_triplet_mining "in_batch" \ + --filter_easy_triplets \ + --triplet_loss_margin 1 \ + --dropout 0.2 \ + --start_mining_hard=200 \ + --hard_mining_pre_batch_multipler=16 \ + --hard_mining_pre_batch_mining_count=5 \ + --augmentations_prob=0.75 \ + --hard_mining_scheduler_triplets_threshold=0 \ + # --normalize_embeddings \ diff --git a/training/__init__.py b/training/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/training/batch_sorter.py b/training/batch_sorter.py new file mode 100644 index 0000000..6891365 --- /dev/null +++ b/training/batch_sorter.py @@ -0,0 +1,215 @@ +import logging +from datetime import datetime +import numpy as np +from typing import Optional +from .batching_scheduler import BatchingScheduler +import torch + +logger = logging.getLogger("BatchGrouper") + + +class BatchGrouper: + """ + Will cluster all `total_items` into `max_groups` clusters based on distances in + `sorted_dists`. Each group has `mini_batch_size` elements and these elements are just integers in + range 0...total_items. + + Distances between these items are expected to be scaled to 0...1 in a way that distances for two items in the + same class are higher if closer to 1, while distances between elements of different classes are higher if closer + to 0. + + The logic is picking the highest value distance and assigning both items to the same cluster/group if possible. + This might include merging 2 clusters. + There are a few threshold to limit the computational cost. If the scaled distance between a pair is below + `dist_threshold`, or more than `assign_threshold` percent of items have been assigned to the groups, we stop and + assign the remanining items to the groups that have space left. + """ + # Counters + next_group = 0 + items_assigned = 0 + + # Thresholds + dist_threshold = 0.5 + assign_threshold = 0.80 + + def __init__(self, sorted_dists, total_items, mini_batch_size=32, dist_threshold=0.5, assign_threshold=0.8) -> None: + self.sorted_dists = sorted_dists + self.total_items = total_items + self.mini_batch_size = mini_batch_size + self.max_groups = int(total_items / mini_batch_size) + self.groups = {} + self.item_to_group = {} + self.items_assigned = 0 + self.next_group = 0 + self.dist_threshold = dist_threshold + self.assign_threshold = assign_threshold + + def cluster_items(self): + """Main function of this class. Does the clustering explained in class docstring. + + :raises e: _description_ + :return _type_: _description_ + """ + for i in range(self.sorted_dists.shape[-1]): # and some other conditions are unmet + a, b, dist = self.sorted_dists[:, i] + a, b = int(a), int(b) + if dist < self.dist_threshold or self.items_assigned > self.total_items * self.assign_threshold: + logger.info(f"Breaking with dist: {dist}, and {self.items_assigned} items assigned") + break + if a not in self.item_to_group and b not in self.item_to_group: + g = self.create_or_get_group() + self.assign_group(a, g) + self.assign_group(b, g) + elif a not in self.item_to_group: + if not self.group_is_full(self.item_to_group[b]): + self.assign_group(a, self.item_to_group[b]) + elif b not in self.item_to_group: + if not self.group_is_full(self.item_to_group[a]): + self.assign_group(b, self.item_to_group[a]) + else: + grp_a = self.item_to_group[a] + grp_b = self.item_to_group[b] + self.merge_groups(grp_a, grp_b) + self.assign_remaining_items() + return list(np.concatenate(list(self.groups.values())).flat) + + def assign_group(self, item, group): + """Assigns `item` to group `group` + """ + self.item_to_group[item] = group + self.groups[group].append(item) + self.items_assigned += 1 + + def create_or_get_group(self): + """Creates a new group if current group count is less than max_groups. + Otherwise returns first group with space left. + + :return int: The group id + """ + if self.next_group < self.max_groups: + group = self.next_group + self.groups[group] = [] + self.next_group += 1 + else: + for i in range(self.next_group): + if len(self.groups[i]) <= self.mini_batch_size - 2: + group = i + break # out of the for loop + return group + + def group_is_full(self, group): + return len(self.groups[group]) == self.mini_batch_size + + def can_merge_groups(self, grp_a, grp_b): + return grp_a != grp_b and (len(self.groups[grp_a]) + len(self.groups[grp_b]) < self.mini_batch_size) + + def merge_groups(self, grp_a, grp_b): + """Will merge two groups together, if possible. Otherwise does nothing. + """ + if grp_a > grp_b: + grp_a, grp_b = grp_b, grp_a + if self.can_merge_groups(grp_a, grp_b): + logger.debug(f"MERGE {grp_a} with {grp_b}: {len(self.groups[grp_a])} {len(self.groups[grp_b])}") + for b in self.groups[grp_b]: + self.item_to_group[b] = grp_a + self.groups[grp_a].extend(self.groups[grp_b]) + self.groups[grp_b] = [] + self.replace_group(grp_b) + + def replace_group(self, group): + """Replace a group with the last one in the list + + :param int group: Group to replace + """ + grp_to_change = self.next_group - 1 + if grp_to_change != group: + for item in self.groups[grp_to_change]: + self.item_to_group[item] = group + self.groups[group] = self.groups[grp_to_change] + del self.groups[grp_to_change] + self.next_group -= 1 + + def assign_remaining_items(self): + """ Assign remaining items into groups + """ + grp_pointer = 0 + i = 0 + logger.info(f"Assigning rest of items: {self.items_assigned} of {self.total_items}") + while i < self.total_items: + if i not in self.item_to_group: + if grp_pointer not in self.groups: + # This would happen if a group is still empty at this stage + assert grp_pointer < self.max_groups + new_group = self.create_or_get_group() + assert new_group == grp_pointer + if len(self.groups[grp_pointer]) < self.mini_batch_size: + self.assign_group(i, grp_pointer) + i += 1 + else: + grp_pointer += 1 + else: + i += 1 + + +def get_dist_tuple_list(dist_matrix): + batch_size = dist_matrix.size()[0] + indices = torch.tril_indices(batch_size, batch_size, offset=-1) + values = dist_matrix[indices[0], indices[1]].cpu() + return torch.cat([indices, values.unsqueeze(0)], dim=0) + + +def get_scaled_distances(embeddings, labels, device, same_label_factor=1): + """Returns distance matrix between all embeddings scaled to the 0-1 range where 0 is good and 1 is bad. + This means that small distances for embeddings of the same class will be close to 0 while small distances for + embeddings of different classes will be close to 1 + + :param _type_ embeddings: Embeddings of batch items + :param _type_ labels: Labels associated to the embeddings + :param _type_ device: Device to run on (cuda or cpu) + :param int same_label_factor: Multiplies the weight of same-class distances allowing to give more or less importance + to these compared to distinct-class distances, defaults to 1 (which means equal weight) + :return torch.Tensor: Scaled distance matrix + """ + # Get pairwise distance matrix + distance_matrix = torch.cdist(embeddings, embeddings, p=2) + # Get list of tuples with emb_A, emb_B, dist ordered by greater for same label and smaller for diff label + # shape: (batch_size, batch_size) + labels = labels.to(device) + labels_equal = (labels.unsqueeze(0) == labels.unsqueeze(1)).squeeze() + labels_distinct = torch.logical_not(labels_equal) + pos_dist = distance_matrix * labels_equal + neg_dist = distance_matrix * labels_distinct + + # Use some scaling to bring both to a range of 0-1 + pos_max = pos_dist.max() + neg_max = neg_dist.max() + # Closer to 1 is harder + pos_dist = pos_dist / pos_max * same_label_factor + neg_dist = 1 * labels_distinct - (neg_dist / neg_max) + return pos_dist + neg_dist + + +def sort_batches(inputs, labels, masks, embeddings, device, mini_batch_size=32, + scheduler: Optional[BatchingScheduler] = None): + start = datetime.now() + + same_label_factor = scheduler.get_scaling_same_label_factor() if scheduler else 1 + scaled_dist = get_scaled_distances(embeddings, labels, device, same_label_factor) + # Get vector of (row, column, dist) + dist_list = get_dist_tuple_list(scaled_dist) + + dist_list = dist_list.cpu().detach().numpy() + # Sort distances descending by last row + sorted_dists = dist_list[:, dist_list[-1, :].argsort()[::-1]] + + # Loop through list assigning both items to same group + dist_threshold = scheduler.get_dist_threshold() if scheduler else 0.5 + grouper = BatchGrouper(sorted_dists, total_items=labels.size()[0], mini_batch_size=mini_batch_size, + dist_threshold=dist_threshold) + indices = torch.tensor(grouper.cluster_items()).type(torch.IntTensor) + final_inputs = torch.index_select(inputs, dim=0, index=indices) + final_labels = torch.index_select(labels, dim=0, index=indices) + final_masks = torch.index_select(masks, dim=0, index=indices) + + logger.info(f"Batch sorting took: {datetime.now() - start}") + return final_inputs, final_labels, final_masks diff --git a/training/batching_scheduler.py b/training/batching_scheduler.py new file mode 100644 index 0000000..9324dad --- /dev/null +++ b/training/batching_scheduler.py @@ -0,0 +1,62 @@ +from collections import deque +import numpy as np + + +class BatchingScheduler(): + """ This class acts as scheduler for the batching algorithm + """ + + def __init__(self, decay_factor=0.8, min_threshold=0.2, triplets_threshold=10, cooldown=10) -> None: + # internal vars + self._step_count = 0 + self._dist_threshold = 0.5 + self._last_used_triplets = deque([], 5) + self._scaling_same_label_factor = 1 + self._last_update_step = -10 + + # Parameters + self.decay_factor = decay_factor + self.min_threshold = min_threshold + self.triplets_threshold = triplets_threshold + self.cooldown = cooldown + + def state_dict(self): + """Returns the state of the scheduler as a :class:`dict`. + """ + return {key: value for key, value in self.__dict__.items()} + + def load_state_dict(self, state_dict): + """Loads the schedulers state. + + Args: + state_dict (dict): scheduler state. Should be an object returned + from a call to :meth:`state_dict`. + """ + self.__dict__.update(state_dict) + + def step(self, used_triplets): + self._step_count += 1 + self._last_used_triplets.append(used_triplets) + if (np.mean(self._last_used_triplets) < self.triplets_threshold and + self._last_update_step + self.cooldown <= self._step_count): + if self._dist_threshold > self.min_threshold: + print(f"Updating dist_threshold at {self._step_count} ({np.mean(self._last_used_triplets)})") + self.update_dist_threshold() + if self._scaling_same_label_factor > 0.6: + print(f"Updating scale factor at {self._step_count} ({np.mean(self._last_used_triplets)})") + self.update_scale_factor() + self._last_update_step = self._step_count + + def update_scale_factor(self): + self._scaling_same_label_factor = max(self._scaling_same_label_factor * 0.9, 0.6) + print(f"Updating scaling factor to {self._scaling_same_label_factor}") + + def update_dist_threshold(self): + self._dist_threshold = max(self.min_threshold, self._dist_threshold * self.decay_factor) + print(f"Updated dist_threshold to {self._dist_threshold}") + + def get_dist_threshold(self) -> float: + return self._dist_threshold + + def get_scaling_same_label_factor(self) -> float: + return self._scaling_same_label_factor diff --git a/training/gaussian_noise.py b/training/gaussian_noise.py new file mode 100644 index 0000000..7ca8889 --- /dev/null +++ b/training/gaussian_noise.py @@ -0,0 +1,18 @@ + +import torch + + +class GaussianNoise(object): + def __init__(self, mean=0., std=1.): + self.std = std + self.mean = mean + + def __call__(self, tensor): + return tensor + torch.randn(tensor.size()) * self.std + self.mean + + def __repr__(self): + return self.__class__.__name__ + '(mean={0}, std={1})'.format(self.mean, self.std) + + +if __name__ == "__main__": + pass diff --git a/training/online_batch_mining.py b/training/online_batch_mining.py new file mode 100644 index 0000000..5d0cf7a --- /dev/null +++ b/training/online_batch_mining.py @@ -0,0 +1,105 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + + +eps = 1e-8 # an arbitrary small value to be used for numerical stability tricks + +# Adapted from https://qdrant.tech/articles/triplet-loss/ + + +class BatchAllTripletLoss(nn.Module): + """Uses all valid triplets to compute Triplet loss + Args: + margin: Margin value in the Triplet Loss equation + """ + + def __init__(self, device, margin=1., filter_easy_triplets=True): + super().__init__() + self.margin = margin + self.device = device + self.filter_easy_triplets = filter_easy_triplets + + def get_triplet_mask(self, labels): + """compute a mask for valid triplets + Args: + labels: Batch of integer labels. shape: (batch_size,) + Returns: + Mask tensor to indicate which triplets are actually valid. Shape: (batch_size, batch_size, batch_size) + A triplet is valid if: + `labels[i] == labels[j] and labels[i] != labels[k]` + and `i`, `j`, `k` are different. + """ + # step 1 - get a mask for distinct indices + + # shape: (batch_size, batch_size) + indices_equal = torch.eye(labels.size()[0], dtype=torch.bool, device=labels.device) + indices_not_equal = torch.logical_not(indices_equal) + # shape: (batch_size, batch_size, 1) + i_not_equal_j = indices_not_equal.unsqueeze(2) + # shape: (batch_size, 1, batch_size) + i_not_equal_k = indices_not_equal.unsqueeze(1) + # shape: (1, batch_size, batch_size) + j_not_equal_k = indices_not_equal.unsqueeze(0) + # Shape: (batch_size, batch_size, batch_size) + distinct_indices = torch.logical_and(torch.logical_and(i_not_equal_j, i_not_equal_k), j_not_equal_k) + + # step 2 - get a mask for valid anchor-positive-negative triplets + + # shape: (batch_size, batch_size) + labels_equal = labels.unsqueeze(0) == labels.unsqueeze(1) + # shape: (batch_size, batch_size, 1) + i_equal_j = labels_equal.unsqueeze(2) + # shape: (batch_size, 1, batch_size) + i_equal_k = labels_equal.unsqueeze(1) + # shape: (batch_size, batch_size, batch_size) + valid_indices = torch.logical_and(i_equal_j, torch.logical_not(i_equal_k)) + + # step 3 - combine two masks + mask = torch.logical_and(distinct_indices, valid_indices) + + return mask + + def forward(self, embeddings, labels, filter_easy_triplets=True): + """computes loss value. + Args: + embeddings: Batch of embeddings, e.g., output of the encoder. shape: (batch_size, embedding_dim) + labels: Batch of integer labels associated with embeddings. shape: (batch_size,) + Returns: + Scalar loss value. + """ + # step 1 - get distance matrix + # shape: (batch_size, batch_size) + distance_matrix = torch.cdist(embeddings, embeddings, p=2) + + # step 2 - compute loss values for all triplets by applying broadcasting to distance matrix + + # shape: (batch_size, batch_size, 1) + anchor_positive_dists = distance_matrix.unsqueeze(2) + # shape: (batch_size, 1, batch_size) + anchor_negative_dists = distance_matrix.unsqueeze(1) + # get loss values for all possible n^3 triplets + # shape: (batch_size, batch_size, batch_size) + triplet_loss = anchor_positive_dists - anchor_negative_dists + self.margin + + # step 3 - filter out invalid or easy triplets by setting their loss values to 0 + + # shape: (batch_size, batch_size, batch_size) + mask = self.get_triplet_mask(labels) + valid_triplets = mask.sum() + triplet_loss *= mask.to(self.device) + # easy triplets have negative loss values + triplet_loss = F.relu(triplet_loss) + + if self.filter_easy_triplets: + # step 4 - compute scalar loss value by averaging positive losses + num_positive_losses = (triplet_loss > eps).float().sum() + # We want to factor in how many triplets were used compared to batch_size (used_triplets * 3 / batch_size) + # The effect of this should be similar to LR decay but penalizing batches with fewer hard triplets + percent_used_factor = min(1.0, num_positive_losses * 3 / labels.size()[0]) + + triplet_loss = triplet_loss.sum() / (num_positive_losses + eps) * percent_used_factor + return triplet_loss, valid_triplets, int(num_positive_losses) + else: + triplet_loss = triplet_loss.sum() / (valid_triplets + eps) + return triplet_loss, valid_triplets, valid_triplets diff --git a/training/train_arguments.py b/training/train_arguments.py new file mode 100644 index 0000000..5980aff --- /dev/null +++ b/training/train_arguments.py @@ -0,0 +1,84 @@ +import argparse + + +def get_default_args(): + parser = argparse.ArgumentParser(add_help=False) + + parser.add_argument("--experiment_name", type=str, default="lsa_64_spoter", + help="Name of the experiment after which the logs and plots will be named") + parser.add_argument("--num_classes", type=int, default=100, help="Number of classes to be recognized by the model") + parser.add_argument("--hidden_dim", type=int, default=108, + help="Hidden dimension of the underlying Transformer model") + parser.add_argument("--seed", type=int, default=379, + help="Seed with which to initialize all the random components of the training") + + # Embeddings + parser.add_argument("--classification_model", action='store_true', default=False, + help="Select SPOTER model to train, pass only for original classification model") + parser.add_argument("--vector_length", type=int, default=32, + help="Number of features used in the embedding vector") + parser.add_argument("--epoch_iters", type=int, default=-1, + help="Iterations per epoch while training embeddings. Will loop through dataset once if -1") + parser.add_argument("--batch_size", type=int, default=32, help="Batch Size during training and validation") + parser.add_argument("--hard_triplet_mining", type=str, default=None, + help="Strategy to select hard triplets, options [None, in_batch]") + parser.add_argument("--triplet_loss_margin", type=float, default=1, + help="Margin used in triplet loss margin (See documentation)") + parser.add_argument("--normalize_embeddings", action='store_true', default=False, + help="Normalize model output to keep vector length to one") + parser.add_argument("--filter_easy_triplets", action='store_true', default=False, + help="Filter easy triplets in online in batch triplets") + + # Data + parser.add_argument("--dataset_name", type=str, default="", help="Dataset name") + parser.add_argument("--dataset_project", type=str, default="Sign Language Recognition", help="Dataset project name") + parser.add_argument("--training_set_path", type=str, default="", + help="Path to the training dataset CSV file (relative to root dataset)") + parser.add_argument("--validation_set_path", type=str, default="", help="Path to the validation dataset CSV file") + parser.add_argument("--dataset_loader", type=str, default="local", + help="Dataset loader to use, options: [clearml, local]") + + # Training hyperparameters + parser.add_argument("--epochs", type=int, default=1300, help="Number of epochs to train the model for") + parser.add_argument("--lr", type=float, default=0.001, help="Learning rate for the model training") + parser.add_argument("--dropout", type=float, default=0.1, + help="Dropout used in transformer layer") + parser.add_argument("--augmentations_prob", type=float, default=0.5, help="How often to use data augmentation") + + # Checkpointing + parser.add_argument("--save_checkpoints_every", type=int, default=-1, + help="Determines every how many epochs the weight checkpoints are saved. If -1 only best model \ + after final epoch") + + # Optimizer + parser.add_argument("--optimizer", type=str, default="SGD", + help="Optimizer used during training, options: [SGD, ADAM]") + + # Tracker + parser.add_argument("--tracker", type=str, default="none", + help="Experiment tracker to use, options: [clearml, none]") + + # Scheduler + parser.add_argument("--scheduler_factor", type=float, default=0, + help="Factor for the ReduceLROnPlateau scheduler") + parser.add_argument("--scheduler_patience", type=int, default=10, + help="Patience for the ReduceLROnPlateau scheduler") + parser.add_argument("--scheduler_warmup", type=int, default=400, + help="Warmup epochs before scheduler starts") + + # Gaussian noise normalization + parser.add_argument("--gaussian_mean", type=float, default=0, help="Mean parameter for Gaussian noise layer") + parser.add_argument("--gaussian_std", type=float, default=0.001, + help="Standard deviation parameter for Gaussian noise layer") + + # Batch Sorting + parser.add_argument("--start_mining_hard", type=int, default=None, help="On which epoch to start hard mining") + parser.add_argument("--hard_mining_pre_batch_multipler", type=int, default=16, + help="How many batches should be computed at once") + parser.add_argument("--hard_mining_pre_batch_mining_count", type=int, default=5, + help="How many times to loop through a list of computed batches") + parser.add_argument("--hard_mining_scheduler_triplets_threshold", type=float, default=0, + help="Enables batching grouping scheduler if > 0. Defines threshold for when to decay the \ + distance threshold of the batch sorter") + + return parser diff --git a/training/train_utils.py b/training/train_utils.py new file mode 100644 index 0000000..8069abd --- /dev/null +++ b/training/train_utils.py @@ -0,0 +1,71 @@ +import os +import random +import numpy as np +import pandas as pd +import plotly.express as px +import torch + +from models import embeddings_scatter_plot, embeddings_scatter_plot_splits + + +def train_setup(seed, experiment_name): + random.seed(seed) + np.random.seed(seed) + os.environ["PYTHONHASHSEED"] = str(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + torch.backends.cudnn.deterministic = True + g = torch.Generator() + g.manual_seed(seed) + return g + + +def create_embedding_scatter_plots(tracker, model, train_loader, val_loader, device, id_to_label, epoch, model_name): + tsne_results, labels = embeddings_scatter_plot(model, train_loader, device, id_to_label, perplexity=40, n_iter=1000) + + df = pd.DataFrame({'x': tsne_results[:, 0], + 'y': tsne_results[:, 1], + 'label': labels}) + fig = px.scatter(df, y="y", x="x", color="label") + + tracker.log_chart( + title="Training Scatter Plot with Best Model: " + model_name, + series="Scatter Plot", + iteration=epoch, + figure=fig + ) + + tsne_results, labels = embeddings_scatter_plot(model, val_loader, device, id_to_label, perplexity=40, n_iter=1000) + + df = pd.DataFrame({'x': tsne_results[:, 0], + 'y': tsne_results[:, 1], + 'label': labels}) + fig = px.scatter(df, y="y", x="x", color="label") + + tracker.log_chart( + title="Validation Scatter Plot with Best Model: " + model_name, + series="Scatter Plot", + iteration=epoch, + figure=fig, + ) + + dataloaders = {'train': train_loader, + 'val': val_loader} + splits = list(dataloaders.keys()) + tsne_results_splits, labels_splits = embeddings_scatter_plot_splits(model, dataloaders, + device, id_to_label, perplexity=40, n_iter=1000) + tsne_results = np.vstack([tsne_results_splits[split] for split in splits]) + labels = np.concatenate([labels_splits[split] for split in splits]) + split = np.concatenate([[split]*len(labels_splits[split]) for split in splits]) + df = pd.DataFrame({'x': tsne_results[:, 0], + 'y': tsne_results[:, 1], + 'label': labels, + 'split': split}) + fig = px.scatter(df, y="y", x="x", color="label", symbol='split') + tracker.log_chart( + title="Scatter Plot of train and val with Best Model: " + model_name, + series="Scatter Plot", + iteration=epoch, + figure=fig, + ) diff --git a/utils.py b/utils.py new file mode 100644 index 0000000..0949518 --- /dev/null +++ b/utils.py @@ -0,0 +1,40 @@ +import logging +import os + + +class CustomFormatter(logging.Formatter): + + grey = "\x1b[38;20m" + yellow = "\x1b[33;20m" + red = "\x1b[31;20m" + bold_red = "\x1b[31;1m" + reset = "\x1b[0m" + custom_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s (%(filename)s:%(lineno)d)" + + FORMATS = { + logging.DEBUG: grey + custom_format + reset, + logging.INFO: grey + custom_format + reset, + logging.WARNING: yellow + custom_format + reset, + logging.ERROR: red + custom_format + reset, + logging.CRITICAL: bold_red + custom_format + reset + } + + def format(self, record): + log_fmt = self.FORMATS.get(record.levelno) + formatter = logging.Formatter(log_fmt) + return formatter.format(record) + + +def get_logger(name): + logger = logging.getLogger(name) + logger.setLevel(logging.INFO) + # create console handler with a higher log level + ch = logging.StreamHandler() + ch.setLevel(logging.DEBUG) + ch.setFormatter(CustomFormatter()) + file_handler = logging.FileHandler(os.getenv('EXPERIMENT_NAME', 'run') + ".log") + file_handler.setLevel(logging.DEBUG) + file_handler.setFormatter(CustomFormatter()) + logger.addHandler(ch) + logger.addHandler(file_handler) + return logger diff --git a/web/README.md b/web/README.md new file mode 100644 index 0000000..0490fe0 --- /dev/null +++ b/web/README.md @@ -0,0 +1,8 @@ +# SPOTER Web + +To test Spoter model in the web, follow these steps: +* Convert your latest Pytorch model to Onnx by running `python convert.py`. This is best done inside the Docker container. You will need to install additional dependencies for the conversions (see commented lines in requirements.txt) +* The ONNX should be generated in the `web` folder, otherwise copy it there. +* run `npx light-server -s . -p 8080` in the `web` folder. (`npx` comes with `npm`) + +Enjoy! diff --git a/web/index.html b/web/index.html new file mode 100644 index 0000000..4bd6a86 --- /dev/null +++ b/web/index.html @@ -0,0 +1,61 @@ + + +
+ ONNX Runtime JavaScript examples: Quick Start - Web (using script tag) +
+ + +

+ + + + + \ No newline at end of file