diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..95cf77a --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +.idea/ +__pycache__/ +checkpoints/ diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..6abf4f1 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "viper/GLIP"] + path = viper/GLIP + url = https://github.com/sachit-menon/GLIP.git diff --git a/README.md b/README.md new file mode 100644 index 0000000..1ece941 --- /dev/null +++ b/README.md @@ -0,0 +1,138 @@ +# VDebugger + +This repo is for **VDebugger: Harnessing Execution Feedback for Debugging Visual Programs** + +[Paper](), [Website](https://shirley-wu.github.io/vdebugger/index.html) + +The training data and model are uploaded to huggingface: https://huggingface.co/VDebugger + +## Outlines + +- [Environment Setup](https://github.com/shirley-wu/vdebugger/tree/main?tab=readme-ov-file#environment-setup) +- [Dataset Setup](https://github.com/shirley-wu/vdebugger/tree/main?tab=readme-ov-file#dataset-setup) +- [Generation and Execution of Visual Programs](https://github.com/shirley-wu/vdebugger/tree/main?tab=readme-ov-file#generation-and-execution-of-visual-programs) +- [Inference of VDebugger](https://github.com/shirley-wu/vdebugger/tree/main?tab=readme-ov-file#inference-of-vdebugger) + +## Environment Setup + +This code is partially adapted from [ViperGPT](https://github.com/cvlab-columbia/viper). We sincerely thank the authors for their great work! + +To setup the environment, you should: +1. Clone recursively: +```bash +git clone --recurse-submodules https://github.com/cvlab-columbia/viper.git +``` +2. Install pytorch based on your own environment. We installed `torch==2.1.2` with cuda 12.1 +3. Install dependencies: +```bash +pip install -r requirements.txt +``` +4. Setup ViperGPT environments by: +```bash +cd viper +bash download_models.sh +export PATH=/usr/local/cuda/bin:$PATH +cd GLIP +python setup.py clean --all build develop --user +``` +5. If you need to use openai APIs: write api key into `viper/qpi.key` + +## Dataset Setup + +Please follow the guidelines below to download each dataset: +1. GQA: https://cs.stanford.edu/people/dorarad/gqa/download.html. The file structure should look as follows: +``` +gqa/ +├── questions +│ ├── readme.txt +│ ├── {val, test, testdev, challenge}_{all, balanced}_questions.json +│ ├── submission_all_questions.json +│ ├── train_balanced_questions.json +│ ├── train_all_questions/ +└── images + └── *.jpg +``` +2. TallyQA: https://github.com/manoja328/TallyQA_dataset. The file structure should look as follows: +``` +tallyqa/ +├── {test, train}.json +└── {train2014, val2014, VG_100K, VG_100K_2}/ + └── *.jpg +``` +3. NLVRv2: https://github.com/lil-lab/nlvr/tree/master/nlvr2. The file structure should look as follows: +``` +nlvr2/ +├── balanced_{dev, test1, test2, train}.jsonl +└── {dev, test1, test2, train}/ + └── *.png +``` +4. RefCOCO*: https://github.com/lichengunc/refer. The file structure should look as follows: +``` +refer/ +├── refcoco: +│ ├── instances.json +│ ├── refs(google).p +│ └── refs(unc).p +├── refcoco+: +│ ├── instances.json +│ └── refs(unc).p +├── refcocog +│ ├── instances.json +│ ├── refs(google).p +│ └── refs(umd).p +└── {train2014, train2017, val2014, val2017}/ + └── *.jpg +``` +5. COVR: https://covr-dataset.github.io/. The file structure should look as follows: +``` +covr/ +├── {train, val, test}.jsonl +├── gqa_images +│ └── *.jpg +└── imSitu_images + └── {adjusting, ...}/ + └── *.jpg +``` +6. RSVG: https://github.com/ZhanYang-nwpu/RSVG-pytorch. The file structure should look as follows: +``` +rsvg/ +├── {train, val, test.txt} +├── Annotations/ +│ └── *.xml +└── JPEGImages/ + └── *.jpg +``` + +## Generation and Execution of Visual Programs + +Go to `viper/` for this step. We recommend first generating and then executing the visual programs in two separate steps. Take GQA dataset as an example: +1. Generate programs: +```bash +CONFIG_NAMES=generate/gqa python main_batch_generate.py +``` +This script will load the configuration under `config/generate/gqa.yaml`. Please remember to change YOUR_DATA_DIR into your data directory. The generated code will be saved in a csv under `code` field +2. Execute and evaluate programs: +```bash +CONFIG_NAMES=execute/gqa python main_batch_execute.py +``` +This script will load the configuration under `config/execute/gqa.yaml`. Please also remember to update YOUR_DATA_DIR, and change the `cached_codex_path:` field into the csv produced in step 1. The accuracy / IoU will be computed. +3. If you want to obtain execution feedback: +```bash +CONFIG_NAMES=execute/gqa python main_batch_trace.py A_RANDOM_STAMP +``` +You can use the same configuration as in step 2. If you want to run multiple `main_batch_trace.py` in the same time, please use different `A_RANDOM_STAMP` for different processes. The execution feedback will be saved in a csv under `traced` field. + +## Inference of VDebugger + +For inference with VDebugger, it is required to first generate and execute visual programs, and obtain a csv file containing `traced` field. Take GQA dataset and VDebugger/VDebugger-{critic, refiner}-generalist-13B as an example: +```bash +# Step 1: infer critic +python infer_critic.py VDebugger/VDebugger-critic-generalist-13B --input YOUR_CSV_CONTAINING_TRACED_FIELD --dataset gqa # output file will be written to critic-infer.csv +# Step 2: infer refiner +python infer_refine.py critic-infer.csv VDebugger/VDebugger-refiner-generalist-13B # output file will be written to critic-refine-infer.csv +``` +Then you can execute the programs in `critic-refine-infer.csv` as in step 2 of [Generation and Execution of Visual Programs](https://github.com/shirley-wu/vdebugger/tree/main?tab=readme-ov-file#generation-and-execution-of-visual-programs) + +## Training of VDebugger + +If you want to reproduce our training of VDebugger, please use `vdebugger/training_scripts/train_{critic, refiner}.sh`. You will need to install `deepspeed==0.14.0`. diff --git a/docs/index.html b/docs/index.html new file mode 100644 index 0000000..621b792 --- /dev/null +++ b/docs/index.html @@ -0,0 +1,242 @@ + + + + + + + + VDebugger + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+

+ Logo + VDebugger +

+

+ Harnessing Execution Feedback for Debugging Visual Programs +

+ + +
+ University of California Los Angeles +
+ + +
+
+
+
+
+ +
+
+
+ Overview of VDebugger. +
+ +
+ +
+ +
+
+
+
+ + +
+

+ Visual programs are executable code generated by large language models to address visual reasoning problems. They decompose complex questions into multiple reasoning steps and in005 voke specialized models for each step to solve the problems. +

+

However, these programs are prone to logic errors, with our preliminary evaluation showing that 58% of the total errors are caused by program logic errors. Debugging complex visual programs remains a major bot011 tleneck for visual reasoning. +

+

To address this, we introduce VDebugger, a novel critic-refiner framework trained to localize and debug visual programs by tracking execution step by step. VDebugger identifies and corrects pro016 gram errors leveraging detailed execution feedback, improving interpretability and accuracy. The training data is generated through an automated pipeline that injects errors into correct visual programs using a novel mask-best decoding technique. +

+

Evaluations on six datasets demonstrate VDebugger's effectiveness, showing performance improvements of up to 3.2% in downstream task accuracy. Further studies show VDebugger’s ability to generalize to un026 seen tasks, bringing a notable improvement of 2.3% on the unseen COVR task.

+

+
+
+
+
+
+ +
+
+
+ Comparison against existing work. +

Comparison against existing work.

+
+ +
+ +
+ +
+
+
+
+

Results

+ +
+
+
+
+ +
+
+
+
+

Qualitative Analysis

+ +
+
+
+
+ +
+
+

BibTeX

+

+      TODO
+    
+
+
+ + + + + diff --git a/docs/static/css/bulma-carousel.min.css b/docs/static/css/bulma-carousel.min.css new file mode 100644 index 0000000..4d4b7d1 --- /dev/null +++ b/docs/static/css/bulma-carousel.min.css @@ -0,0 +1 @@ +@-webkit-keyframes spinAround{from{-webkit-transform:rotate(0);transform:rotate(0)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}@keyframes spinAround{from{-webkit-transform:rotate(0);transform:rotate(0)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}.slider{position:relative;width:100%}.slider-container{display:flex;flex-wrap:nowrap;flex-direction:row;overflow:hidden;-webkit-transform:translate3d(0,0,0);transform:translate3d(0,0,0);min-height:100%}.slider-container.is-vertical{flex-direction:column}.slider-container .slider-item{flex:none}.slider-container .slider-item .image.is-covered img{-o-object-fit:cover;object-fit:cover;-o-object-position:center center;object-position:center center;height:100%;width:100%}.slider-container .slider-item .video-container{height:0;padding-bottom:0;padding-top:56.25%;margin:0;position:relative}.slider-container .slider-item .video-container.is-1by1,.slider-container .slider-item .video-container.is-square{padding-top:100%}.slider-container .slider-item .video-container.is-4by3{padding-top:75%}.slider-container .slider-item .video-container.is-21by9{padding-top:42.857143%}.slider-container .slider-item .video-container embed,.slider-container .slider-item .video-container iframe,.slider-container .slider-item .video-container object{position:absolute;top:0;left:0;width:100%!important;height:100%!important}.slider-navigation-next,.slider-navigation-previous{display:flex;justify-content:center;align-items:center;position:absolute;width:42px;height:42px;background:#fff center center no-repeat;background-size:20px 20px;border:1px solid #fff;border-radius:25091983px;box-shadow:0 2px 5px #3232321a;top:50%;margin-top:-20px;left:0;cursor:pointer;transition:opacity .3s,-webkit-transform .3s;transition:transform .3s,opacity .3s;transition:transform .3s,opacity .3s,-webkit-transform .3s}.slider-navigation-next:hover,.slider-navigation-previous:hover{-webkit-transform:scale(1.2);transform:scale(1.2)}.slider-navigation-next.is-hidden,.slider-navigation-previous.is-hidden{display:none;opacity:0}.slider-navigation-next svg,.slider-navigation-previous svg{width:25%}.slider-navigation-next{left:auto;right:0;background:#fff center center no-repeat;background-size:20px 20px}.slider-pagination{display:none;justify-content:center;align-items:center;position:absolute;bottom:0;left:0;right:0;padding:.5rem 1rem;text-align:center}.slider-pagination .slider-page{background:#fff;width:10px;height:10px;border-radius:25091983px;display:inline-block;margin:0 3px;box-shadow:0 2px 5px #3232321a;transition:-webkit-transform .3s;transition:transform .3s;transition:transform .3s,-webkit-transform .3s;cursor:pointer}.slider-pagination .slider-page.is-active,.slider-pagination .slider-page:hover{-webkit-transform:scale(1.4);transform:scale(1.4)}@media screen and (min-width:800px){.slider-pagination{display:flex}}.hero.has-carousel{position:relative}.hero.has-carousel+.hero-body,.hero.has-carousel+.hero-footer,.hero.has-carousel+.hero-head{z-index:10;overflow:hidden}.hero.has-carousel .hero-carousel{position:absolute;top:0;left:0;bottom:0;right:0;height:auto;border:none;margin:auto;padding:0;z-index:0}.hero.has-carousel .hero-carousel .slider{width:100%;max-width:100%;overflow:hidden;height:100%!important;max-height:100%;z-index:0}.hero.has-carousel .hero-carousel .slider .has-background{max-height:100%}.hero.has-carousel .hero-carousel .slider .has-background .is-background{-o-object-fit:cover;object-fit:cover;-o-object-position:center center;object-position:center center;height:100%;width:100%}.hero.has-carousel .hero-body{margin:0 3rem;z-index:10} \ No newline at end of file diff --git a/docs/static/css/bulma-slider.min.css b/docs/static/css/bulma-slider.min.css new file mode 100644 index 0000000..09b4aeb --- /dev/null +++ b/docs/static/css/bulma-slider.min.css @@ -0,0 +1 @@ +@-webkit-keyframes spinAround{from{-webkit-transform:rotate(0);transform:rotate(0)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}@keyframes spinAround{from{-webkit-transform:rotate(0);transform:rotate(0)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}input[type=range].slider{-webkit-appearance:none;-moz-appearance:none;appearance:none;margin:1rem 0;background:0 0;touch-action:none}input[type=range].slider.is-fullwidth{display:block;width:100%}input[type=range].slider:focus{outline:0}input[type=range].slider:not([orient=vertical])::-webkit-slider-runnable-track{width:100%}input[type=range].slider:not([orient=vertical])::-moz-range-track{width:100%}input[type=range].slider:not([orient=vertical])::-ms-track{width:100%}input[type=range].slider:not([orient=vertical]).has-output+output,input[type=range].slider:not([orient=vertical]).has-output-tooltip+output{width:3rem;background:#4a4a4a;border-radius:4px;padding:.4rem .8rem;font-size:.75rem;line-height:.75rem;text-align:center;text-overflow:ellipsis;white-space:nowrap;color:#fff;overflow:hidden;pointer-events:none;z-index:200}input[type=range].slider:not([orient=vertical]).has-output-tooltip:disabled+output,input[type=range].slider:not([orient=vertical]).has-output:disabled+output{opacity:.5}input[type=range].slider:not([orient=vertical]).has-output{display:inline-block;vertical-align:middle;width:calc(100% - (4.2rem))}input[type=range].slider:not([orient=vertical]).has-output+output{display:inline-block;margin-left:.75rem;vertical-align:middle}input[type=range].slider:not([orient=vertical]).has-output-tooltip{display:block}input[type=range].slider:not([orient=vertical]).has-output-tooltip+output{position:absolute;left:0;top:-.1rem}input[type=range].slider[orient=vertical]{-webkit-appearance:slider-vertical;-moz-appearance:slider-vertical;appearance:slider-vertical;-webkit-writing-mode:bt-lr;-ms-writing-mode:bt-lr;writing-mode:bt-lr}input[type=range].slider[orient=vertical]::-webkit-slider-runnable-track{height:100%}input[type=range].slider[orient=vertical]::-moz-range-track{height:100%}input[type=range].slider[orient=vertical]::-ms-track{height:100%}input[type=range].slider::-webkit-slider-runnable-track{cursor:pointer;animate:.2s;box-shadow:0 0 0 #7a7a7a;background:#dbdbdb;border-radius:4px;border:0 solid #7a7a7a}input[type=range].slider::-moz-range-track{cursor:pointer;animate:.2s;box-shadow:0 0 0 #7a7a7a;background:#dbdbdb;border-radius:4px;border:0 solid #7a7a7a}input[type=range].slider::-ms-track{cursor:pointer;animate:.2s;box-shadow:0 0 0 #7a7a7a;background:#dbdbdb;border-radius:4px;border:0 solid #7a7a7a}input[type=range].slider::-ms-fill-lower{background:#dbdbdb;border-radius:4px}input[type=range].slider::-ms-fill-upper{background:#dbdbdb;border-radius:4px}input[type=range].slider::-webkit-slider-thumb{box-shadow:none;border:1px solid #b5b5b5;border-radius:4px;background:#fff;cursor:pointer}input[type=range].slider::-moz-range-thumb{box-shadow:none;border:1px solid #b5b5b5;border-radius:4px;background:#fff;cursor:pointer}input[type=range].slider::-ms-thumb{box-shadow:none;border:1px solid #b5b5b5;border-radius:4px;background:#fff;cursor:pointer}input[type=range].slider::-webkit-slider-thumb{-webkit-appearance:none;appearance:none}input[type=range].slider.is-circle::-webkit-slider-thumb{border-radius:290486px}input[type=range].slider.is-circle::-moz-range-thumb{border-radius:290486px}input[type=range].slider.is-circle::-ms-thumb{border-radius:290486px}input[type=range].slider:active::-webkit-slider-thumb{-webkit-transform:scale(1.25);transform:scale(1.25)}input[type=range].slider:active::-moz-range-thumb{transform:scale(1.25)}input[type=range].slider:active::-ms-thumb{transform:scale(1.25)}input[type=range].slider:disabled{opacity:.5;cursor:not-allowed}input[type=range].slider:disabled::-webkit-slider-thumb{cursor:not-allowed;-webkit-transform:scale(1);transform:scale(1)}input[type=range].slider:disabled::-moz-range-thumb{cursor:not-allowed;transform:scale(1)}input[type=range].slider:disabled::-ms-thumb{cursor:not-allowed;transform:scale(1)}input[type=range].slider:not([orient=vertical]){min-height:calc((1rem + 2px) * 1.25)}input[type=range].slider:not([orient=vertical])::-webkit-slider-runnable-track{height:.5rem}input[type=range].slider:not([orient=vertical])::-moz-range-track{height:.5rem}input[type=range].slider:not([orient=vertical])::-ms-track{height:.5rem}input[type=range].slider[orient=vertical]::-webkit-slider-runnable-track{width:.5rem}input[type=range].slider[orient=vertical]::-moz-range-track{width:.5rem}input[type=range].slider[orient=vertical]::-ms-track{width:.5rem}input[type=range].slider::-webkit-slider-thumb{height:1rem;width:1rem}input[type=range].slider::-moz-range-thumb{height:1rem;width:1rem}input[type=range].slider::-ms-thumb{height:1rem;width:1rem}input[type=range].slider::-ms-thumb{margin-top:0}input[type=range].slider::-webkit-slider-thumb{margin-top:-.25rem}input[type=range].slider[orient=vertical]::-webkit-slider-thumb{margin-top:auto;margin-left:-.25rem}input[type=range].slider.is-small:not([orient=vertical]){min-height:calc((.75rem + 2px) * 1.25)}input[type=range].slider.is-small:not([orient=vertical])::-webkit-slider-runnable-track{height:.375rem}input[type=range].slider.is-small:not([orient=vertical])::-moz-range-track{height:.375rem}input[type=range].slider.is-small:not([orient=vertical])::-ms-track{height:.375rem}input[type=range].slider.is-small[orient=vertical]::-webkit-slider-runnable-track{width:.375rem}input[type=range].slider.is-small[orient=vertical]::-moz-range-track{width:.375rem}input[type=range].slider.is-small[orient=vertical]::-ms-track{width:.375rem}input[type=range].slider.is-small::-webkit-slider-thumb{height:.75rem;width:.75rem}input[type=range].slider.is-small::-moz-range-thumb{height:.75rem;width:.75rem}input[type=range].slider.is-small::-ms-thumb{height:.75rem;width:.75rem}input[type=range].slider.is-small::-ms-thumb{margin-top:0}input[type=range].slider.is-small::-webkit-slider-thumb{margin-top:-.1875rem}input[type=range].slider.is-small[orient=vertical]::-webkit-slider-thumb{margin-top:auto;margin-left:-.1875rem}input[type=range].slider.is-medium:not([orient=vertical]){min-height:calc((1.25rem + 2px) * 1.25)}input[type=range].slider.is-medium:not([orient=vertical])::-webkit-slider-runnable-track{height:.625rem}input[type=range].slider.is-medium:not([orient=vertical])::-moz-range-track{height:.625rem}input[type=range].slider.is-medium:not([orient=vertical])::-ms-track{height:.625rem}input[type=range].slider.is-medium[orient=vertical]::-webkit-slider-runnable-track{width:.625rem}input[type=range].slider.is-medium[orient=vertical]::-moz-range-track{width:.625rem}input[type=range].slider.is-medium[orient=vertical]::-ms-track{width:.625rem}input[type=range].slider.is-medium::-webkit-slider-thumb{height:1.25rem;width:1.25rem}input[type=range].slider.is-medium::-moz-range-thumb{height:1.25rem;width:1.25rem}input[type=range].slider.is-medium::-ms-thumb{height:1.25rem;width:1.25rem}input[type=range].slider.is-medium::-ms-thumb{margin-top:0}input[type=range].slider.is-medium::-webkit-slider-thumb{margin-top:-.3125rem}input[type=range].slider.is-medium[orient=vertical]::-webkit-slider-thumb{margin-top:auto;margin-left:-.3125rem}input[type=range].slider.is-large:not([orient=vertical]){min-height:calc((1.5rem + 2px) * 1.25)}input[type=range].slider.is-large:not([orient=vertical])::-webkit-slider-runnable-track{height:.75rem}input[type=range].slider.is-large:not([orient=vertical])::-moz-range-track{height:.75rem}input[type=range].slider.is-large:not([orient=vertical])::-ms-track{height:.75rem}input[type=range].slider.is-large[orient=vertical]::-webkit-slider-runnable-track{width:.75rem}input[type=range].slider.is-large[orient=vertical]::-moz-range-track{width:.75rem}input[type=range].slider.is-large[orient=vertical]::-ms-track{width:.75rem}input[type=range].slider.is-large::-webkit-slider-thumb{height:1.5rem;width:1.5rem}input[type=range].slider.is-large::-moz-range-thumb{height:1.5rem;width:1.5rem}input[type=range].slider.is-large::-ms-thumb{height:1.5rem;width:1.5rem}input[type=range].slider.is-large::-ms-thumb{margin-top:0}input[type=range].slider.is-large::-webkit-slider-thumb{margin-top:-.375rem}input[type=range].slider.is-large[orient=vertical]::-webkit-slider-thumb{margin-top:auto;margin-left:-.375rem}input[type=range].slider.is-white::-moz-range-track{background:#fff!important}input[type=range].slider.is-white::-webkit-slider-runnable-track{background:#fff!important}input[type=range].slider.is-white::-ms-track{background:#fff!important}input[type=range].slider.is-white::-ms-fill-lower{background:#fff}input[type=range].slider.is-white::-ms-fill-upper{background:#fff}input[type=range].slider.is-white .has-output-tooltip+output,input[type=range].slider.is-white.has-output+output{background-color:#fff;color:#0a0a0a}input[type=range].slider.is-black::-moz-range-track{background:#0a0a0a!important}input[type=range].slider.is-black::-webkit-slider-runnable-track{background:#0a0a0a!important}input[type=range].slider.is-black::-ms-track{background:#0a0a0a!important}input[type=range].slider.is-black::-ms-fill-lower{background:#0a0a0a}input[type=range].slider.is-black::-ms-fill-upper{background:#0a0a0a}input[type=range].slider.is-black .has-output-tooltip+output,input[type=range].slider.is-black.has-output+output{background-color:#0a0a0a;color:#fff}input[type=range].slider.is-light::-moz-range-track{background:#f5f5f5!important}input[type=range].slider.is-light::-webkit-slider-runnable-track{background:#f5f5f5!important}input[type=range].slider.is-light::-ms-track{background:#f5f5f5!important}input[type=range].slider.is-light::-ms-fill-lower{background:#f5f5f5}input[type=range].slider.is-light::-ms-fill-upper{background:#f5f5f5}input[type=range].slider.is-light .has-output-tooltip+output,input[type=range].slider.is-light.has-output+output{background-color:#f5f5f5;color:#363636}input[type=range].slider.is-dark::-moz-range-track{background:#363636!important}input[type=range].slider.is-dark::-webkit-slider-runnable-track{background:#363636!important}input[type=range].slider.is-dark::-ms-track{background:#363636!important}input[type=range].slider.is-dark::-ms-fill-lower{background:#363636}input[type=range].slider.is-dark::-ms-fill-upper{background:#363636}input[type=range].slider.is-dark .has-output-tooltip+output,input[type=range].slider.is-dark.has-output+output{background-color:#363636;color:#f5f5f5}input[type=range].slider.is-primary::-moz-range-track{background:#00d1b2!important}input[type=range].slider.is-primary::-webkit-slider-runnable-track{background:#00d1b2!important}input[type=range].slider.is-primary::-ms-track{background:#00d1b2!important}input[type=range].slider.is-primary::-ms-fill-lower{background:#00d1b2}input[type=range].slider.is-primary::-ms-fill-upper{background:#00d1b2}input[type=range].slider.is-primary .has-output-tooltip+output,input[type=range].slider.is-primary.has-output+output{background-color:#00d1b2;color:#fff}input[type=range].slider.is-link::-moz-range-track{background:#3273dc!important}input[type=range].slider.is-link::-webkit-slider-runnable-track{background:#3273dc!important}input[type=range].slider.is-link::-ms-track{background:#3273dc!important}input[type=range].slider.is-link::-ms-fill-lower{background:#3273dc}input[type=range].slider.is-link::-ms-fill-upper{background:#3273dc}input[type=range].slider.is-link .has-output-tooltip+output,input[type=range].slider.is-link.has-output+output{background-color:#3273dc;color:#fff}input[type=range].slider.is-info::-moz-range-track{background:#209cee!important}input[type=range].slider.is-info::-webkit-slider-runnable-track{background:#209cee!important}input[type=range].slider.is-info::-ms-track{background:#209cee!important}input[type=range].slider.is-info::-ms-fill-lower{background:#209cee}input[type=range].slider.is-info::-ms-fill-upper{background:#209cee}input[type=range].slider.is-info .has-output-tooltip+output,input[type=range].slider.is-info.has-output+output{background-color:#209cee;color:#fff}input[type=range].slider.is-success::-moz-range-track{background:#23d160!important}input[type=range].slider.is-success::-webkit-slider-runnable-track{background:#23d160!important}input[type=range].slider.is-success::-ms-track{background:#23d160!important}input[type=range].slider.is-success::-ms-fill-lower{background:#23d160}input[type=range].slider.is-success::-ms-fill-upper{background:#23d160}input[type=range].slider.is-success .has-output-tooltip+output,input[type=range].slider.is-success.has-output+output{background-color:#23d160;color:#fff}input[type=range].slider.is-warning::-moz-range-track{background:#ffdd57!important}input[type=range].slider.is-warning::-webkit-slider-runnable-track{background:#ffdd57!important}input[type=range].slider.is-warning::-ms-track{background:#ffdd57!important}input[type=range].slider.is-warning::-ms-fill-lower{background:#ffdd57}input[type=range].slider.is-warning::-ms-fill-upper{background:#ffdd57}input[type=range].slider.is-warning .has-output-tooltip+output,input[type=range].slider.is-warning.has-output+output{background-color:#ffdd57;color:rgba(0,0,0,.7)}input[type=range].slider.is-danger::-moz-range-track{background:#ff3860!important}input[type=range].slider.is-danger::-webkit-slider-runnable-track{background:#ff3860!important}input[type=range].slider.is-danger::-ms-track{background:#ff3860!important}input[type=range].slider.is-danger::-ms-fill-lower{background:#ff3860}input[type=range].slider.is-danger::-ms-fill-upper{background:#ff3860}input[type=range].slider.is-danger .has-output-tooltip+output,input[type=range].slider.is-danger.has-output+output{background-color:#ff3860;color:#fff} \ No newline at end of file diff --git a/docs/static/css/bulma.css.map.txt b/docs/static/css/bulma.css.map.txt new file mode 100644 index 0000000..ed3a9d3 --- /dev/null +++ b/docs/static/css/bulma.css.map.txt @@ -0,0 +1 @@ +{"version":3,"sources":["../bulma.sass","../sass/utilities/_all.sass","../sass/utilities/animations.sass","bulma.css","../sass/utilities/mixins.sass","../sass/utilities/initial-variables.sass","../sass/utilities/controls.sass","../sass/base/_all.sass","../sass/base/minireset.sass","../sass/base/generic.sass","../sass/utilities/derived-variables.sass","../sass/elements/_all.sass","../sass/elements/box.sass","../sass/elements/button.sass","../sass/utilities/functions.sass","../sass/elements/container.sass","../sass/elements/content.sass","../sass/elements/icon.sass","../sass/elements/image.sass","../sass/elements/notification.sass","../sass/elements/progress.sass","../sass/elements/table.sass","../sass/elements/tag.sass","../sass/elements/title.sass","../sass/elements/other.sass","../sass/form/_all.sass","../sass/form/shared.sass","../sass/form/input-textarea.sass","../sass/form/checkbox-radio.sass","../sass/form/select.sass","../sass/form/file.sass","../sass/form/tools.sass","../sass/components/_all.sass","../sass/components/breadcrumb.sass","../sass/components/card.sass","../sass/components/dropdown.sass","../sass/components/level.sass","../sass/components/media.sass","../sass/components/menu.sass","../sass/components/message.sass","../sass/components/modal.sass","../sass/components/navbar.sass","../sass/components/pagination.sass","../sass/components/panel.sass","../sass/components/tabs.sass","../sass/grid/_all.sass","../sass/grid/columns.sass","../sass/grid/tiles.sass","../sass/helpers/_all.sass","../sass/helpers/color.sass","../sass/helpers/flexbox.sass","../sass/helpers/float.sass","../sass/helpers/other.sass","../sass/helpers/overflow.sass","../sass/helpers/position.sass","../sass/helpers/spacing.sass","../sass/helpers/typography.sass","../sass/helpers/visibility.sass","../sass/layout/_all.sass","../sass/layout/hero.sass","../sass/layout/section.sass","../sass/layout/footer.sass"],"names":[],"mappings":"AACA,6DAAA;ACDA,oBAAA;ACAA;EACE;IACE,uBAAuB;ECGzB;EDFA;IACE,yBAAyB;ECI3B;AACF;ADTA;EACE;IACE,uBAAuB;ECGzB;EDFA;IACE,yBAAyB;ECI3B;AACF;;AC0JA;;;;EANE,2BAA2B;EAC3B,yBAAyB;EACzB,sBAAsB;EACtB,qBAAqB;EACrB,iBAAiB;AD7InB;;ACkKA;EAfE,6BAD8B;EAE9B,kBAAkB;EAClB,eAAe;EACf,aAAa;EACb,YAAY;EACZ,cAAc;EACd,eAAe;EACf,qBAAqB;EACrB,oBAAoB;EACpB,kBAAkB;EAClB,QAAQ;EACR,yBAAyB;EACzB,wBAAwB;EACxB,cAAc;AD/IhB;;ACqJE;;EACE,qBC3IkB;AFNtB;;ACwNA;EAhEE,qBAAqB;EACrB,wBAAwB;EACxB,uCClM2B;EDmM3B,YAAY;EACZ,uBC/HuB;EDgIvB,eAAe;EACf,oBAAoB;EACpB,qBAAqB;EACrB,YAAY;EACZ,cAAc;EACd,YAAY;EACZ,YAAY;EACZ,gBAAgB;EAChB,eAAe;EACf,gBAAgB;EAChB,eAAe;EACf,aAAa;EACb,kBAAkB;EAClB,mBAAmB;EACnB,WAAW;ADpJb;;ACqJE;EAEE,uBCzM2B;ED0M3B,WAAW;EACX,cAAc;EACd,SAAS;EACT,kBAAkB;EAClB,QAAQ;EACR,0DAA0D;EAC1D,+BAA+B;ADnJnC;;ACoJE;EACE,WAAW;EACX,UAAU;ADjJd;;ACkJE;EACE,WAAW;EACX,UAAU;AD/Id;;ACgJE;EAEE,uCCtOyB;AFwF7B;;AC+IE;EACE,uCCxOyB;AF4F7B;;AC8IE;EACE,YAAY;EACZ,gBAAgB;EAChB,eAAe;EACf,gBAAgB;EAChB,eAAe;EACf,WAAW;AD3If;;AC4IE;EACE,YAAY;EACZ,gBAAgB;EAChB,eAAe;EACf,gBAAgB;EAChB,eAAe;EACf,WAAW;ADzIf;;AC0IE;EACE,YAAY;EACZ,gBAAgB;EAChB,eAAe;EACf,gBAAgB;EAChB,eAAe;EACf,WAAW;ADvIf;;ACwJA;EAXE,mDAA2C;UAA3C,2CAA2C;EAC3C,yBC7P4B;ED8P5B,uBCjMuB;EDkMvB,+BAA+B;EAC/B,6BAA6B;EAC7B,WAAW;EACX,cAAc;EACd,WAAW;EACX,kBAAkB;EAClB,UAAU;ADzIZ;;ACqJA;;;;;;;;;;;;;;;;;EANE,SADuB;EAEvB,OAFuB;EAGvB,kBAAkB;EAClB,QAJuB;EAKvB,MALuB;ADtHzB;;AGvHA;;;;;EA3BE,qBAAqB;EACrB,wBAAwB;EACxB,mBAAmB;EACnB,6BAA+C;EAC/C,kBDqDU;ECpDV,gBAAgB;EAChB,oBAAoB;EACpB,eDkBW;ECjBX,aAfoB;EAgBpB,2BAA2B;EAC3B,gBAhBuB;EAiBvB,iCAf+D;EAgB/D,gCAfkE;EAgBlE,iCAhBkE;EAiBlE,8BAlB+D;EAmB/D,kBAAkB;EAClB,mBAAmB;AH0JrB;;AGxJE;;;;;;;;;;;;;;;;;EAIE,aAAa;AHwKjB;;AGvKE;;;;;;;;;;;;;;;;EAEE,mBAAmB;AHwLvB;;AI7NA,eAAA;ACAA,0EAAA;AAEA;;;;;;;;;;;;;;;;;;;;;;;EAuBE,SAAS;EACT,UAAU;ALgOZ;;AK7NA;;;;;;EAME,eAAe;EACf,mBAAmB;ALgOrB;;AK7NA;EACE,gBAAgB;ALgOlB;;AK7NA;;;;EAIE,SAAS;ALgOX;;AK7NA;EACE,sBAAsB;ALgOxB;;AK9NA;EAII,mBAAmB;AL8NvB;;AK3NA;;EAEE,YAAY;EACZ,eAAe;AL8NjB;;AK3NA;EACE,SAAS;AL8NX;;AK3NA;EACE,yBAAyB;EACzB,iBAAiB;AL8NnB;;AK5NA;;EAEE,UAAU;AL+NZ;;AKjOA;;EAII,mBAAmB;ALkOvB;;AK9PA;EClBE,uBJjB6B;EIkB7B,eAhCc;EAiCd,kCAAkC;EAClC,mCAAmC;EACnC,gBAlCoB;EAmCpB,kBAhCsB;EAiCtB,kBAhCsB;EAiCtB,kCApCiC;EAqCjC,8BAAsB;KAAtB,2BAAsB;MAAtB,0BAAsB;UAAtB,sBAAsB;ANoRxB;;AMlRA;;;;;;;EAOE,cAAc;ANqRhB;;AMnRA;;;;;;EAME,oLJ7ByL;AFmT3L;;AMpRA;;EAEE,6BAA6B;EAC7B,4BAA4B;EAC5B,sBJlC0B;AFyT5B;;AMrRA;EACE,cJ3D4B;EI4D5B,cA1DkB;EA2DlB,gBJ3BiB;EI4BjB,gBA1DoB;ANkVtB;;AMpRA;EACE,cJpDgC;EIqDhC,eAAe;EACf,qBAAqB;ANuRvB;;AM1RA;EAKI,mBAAmB;ANyRvB;;AM9RA;EAOI,cJ1E0B;AFqW9B;;AMzRA;EACE,4BJtE4B;EIuE5B,cCpBsB;EDqBtB,kBArEiB;EAsEjB,mBAvEkB;EAwElB,4BAzEgC;ANqWlC;;AM1RA;EACE,4BJ7E4B;EI8E5B,YAAY;EACZ,cAAc;EACd,WAxEa;EAyEb,gBAxEkB;ANqWpB;;AM3RA;EACE,YAAY;EACZ,eAAe;AN8RjB;;AM5RA;;EAEE,wBAAwB;AN+R1B;;AM7RA;EACE,kBAvFuB;ANuXzB;;AM9RA;EACE,mBAAmB;EACnB,oBAAoB;ANiStB;;AM/RA;EACE,cJ1G4B;EI2G5B,gBJrEe;AFuWjB;;AM9RA;EACE,YAAY;ANiSd;;AM/RA;EL1DE,iCAAiC;EK4DjC,4BJ7G4B;EI8G5B,cJpH4B;EIqH5B,kBAjGqB;EAkGrB,gBAAgB;EAChB,uBAlG0B;EAmG1B,gBAAgB;EAChB,iBAAiB;ANkSnB;;AM1SA;EAUI,6BAA6B;EAC7B,mBAAmB;EACnB,cAvGoB;EAwGpB,UAAU;ANoSd;;AMlSA;;EAGI,mBAAmB;ANoSvB;;AMvSA;;EAKM,mBAAmB;ANuSzB;;AM5SA;EAOI,cJxI0B;AFib9B;;AQvbA,mBAAA;ACSA;EAEE,uBPI6B;EOH7B,kBP0DgB;EOzDhB,0FPX2B;EOY3B,cPP4B;EOQ5B,cAAc;EACd,gBAZmB;AT6brB;;AS/aA;EAGI,yEPC8B;AF+alC;;ASnbA;EAKI,oEPD8B;AFmblC;;AUzZA;EAGE,uBRpC6B;EQqC7B,qBR1C4B;EQ2C5B,iBPlDwB;EOmDxB,cRhD4B;EQiD5B,eAAe;EAGf,uBAAuB;EACvB,iCApD6D;EAqD7D,iBApD6B;EAqD7B,kBArD6B;EAsD7B,8BAvD6D;EAwD7D,kBAAkB;EAClB,mBAAmB;AVwZrB;;AUxaA;EAkBI,cAAc;AV0ZlB;;AU5aA;EAwBM,aAAa;EACb,YAAY;AVwZlB;;AUjbA;ETgGI,+BSrEwG;ETqExG,oBSpEgE;AV0ZpE;;AUtbA;ETgGI,mBSlEgE;ETkEhE,gCSjEwG;AV4Z5G;;AU3bA;EAiCM,+BAAmF;EACnF,gCAAoF;AV8Z1F;;AUhcA;EAsCI,qBR7E0B;EQ8E1B,cRjF0B;AF+e9B;;AUrcA;EA0CI,qBRpE8B;EQqE9B,cRrF0B;AFof9B;;AU1cA;EA6CM,kDRvE4B;AFwelC;;AU9cA;EAgDI,qBRzF0B;EQ0F1B,cR3F0B;AF6f9B;;AUndA;EAoDI,6BAA6B;EAC7B,yBAAyB;EACzB,cR/F0B;EQgG1B,0BAjF8B;AVoflC;;AU1dA;EA4DM,4BR/FwB;EQgGxB,cRvGwB;AFygB9B;;AU/dA;EAgEM,yBCH2B;EDI3B,cR3GwB;AF8gB9B;;AUpeA;;EAoEM,6BAA6B;EAC7B,yBAAyB;EACzB,gBAAgB;AVqatB;;AU3eA;EA2EM,uBR5GyB;EQ6GzB,yBAAyB;EACzB,cR3HuB;AF+hB7B;;AUjfA;EAgFQ,yBCnByB;EDoBzB,yBAAyB;EACzB,cRhIqB;AFqiB7B;;AUvfA;EAqFQ,yBAAyB;EACzB,cRpIqB;AF0iB7B;;AU5fA;EAwFU,mDRzHqB;AFiiB/B;;AUhgBA;EA2FQ,yBC9ByB;ED+BzB,yBAAyB;EACzB,cR3IqB;AFojB7B;;AUtgBA;;EAgGQ,uBRjIuB;EQkIvB,yBAAyB;EACzB,gBAAgB;AV2axB;;AU7gBA;EAoGQ,yBRlJqB;EQmJrB,YRtIuB;AFmjB/B;;AUlhBA;EAwGU,uBC3CuB;AXydjC;;AUthBA;;EA2GU,yBRzJmB;EQ0JnB,yBAAyB;EACzB,gBAAgB;EAChB,YR/IqB;AF+jB/B;;AU9hBA;EAiHU,gEAA4E;AVibtF;;AUliBA;EAmHQ,6BAA6B;EAC7B,mBRrJuB;EQsJvB,YRtJuB;AFykB/B;;AUxiBA;EA0HU,uBR3JqB;EQ4JrB,mBR5JqB;EQ6JrB,cR1KmB;AF4lB7B;;AU9iBA;EA+HY,4DAA8D;AVmb1E;;AUljBA;EAqIc,gEAA4E;AVib1F;;AUtjBA;;EAwIU,6BAA6B;EAC7B,mBR1KqB;EQ2KrB,gBAAgB;EAChB,YR5KqB;AF+lB/B;;AU9jBA;EA6IQ,6BAA6B;EAC7B,qBR5LqB;EQ6LrB,cR7LqB;AFknB7B;;AUpkBA;EAoJU,yBRlMmB;EQmMnB,YRtLqB;AF0mB/B;;AUzkBA;EA4Jc,4DAA8D;AVib5E;;AU7kBA;;EA+JU,6BAA6B;EAC7B,qBR9MmB;EQ+MnB,gBAAgB;EAChB,cRhNmB;AFmoB7B;;AUrlBA;EA2EM,yBRzHuB;EQ0HvB,yBAAyB;EACzB,YR9GyB;AF4nB/B;;AU3lBA;EAgFQ,yBCnByB;EDoBzB,yBAAyB;EACzB,YRnHuB;AFkoB/B;;AUjmBA;EAqFQ,yBAAyB;EACzB,YRvHuB;AFuoB/B;;AUtmBA;EAwFU,gDRtImB;AFwpB7B;;AU1mBA;EA2FQ,uBC9ByB;ED+BzB,yBAAyB;EACzB,YR9HuB;AFipB/B;;AUhnBA;;EAgGQ,yBR9IqB;EQ+IrB,yBAAyB;EACzB,gBAAgB;AVqhBxB;;AUvnBA;EAoGQ,uBRrIuB;EQsIvB,cRnJqB;AF0qB7B;;AU5nBA;EAwGU,yBC3CuB;AXmkBjC;;AUhoBA;;EA2GU,uBR5IqB;EQ6IrB,yBAAyB;EACzB,gBAAgB;EAChB,cR5JmB;AFsrB7B;;AUxoBA;EAiHU,4DAA4E;AV2hBtF;;AU5oBA;EAmHQ,6BAA6B;EAC7B,qBRlKqB;EQmKrB,cRnKqB;AFgsB7B;;AUlpBA;EA0HU,yBRxKmB;EQyKnB,qBRzKmB;EQ0KnB,YR7JqB;AFyrB/B;;AUxpBA;EA+HY,gEAA8D;AV6hB1E;;AU5pBA;EAqIc,4DAA4E;AV2hB1F;;AUhqBA;;EAwIU,6BAA6B;EAC7B,qBRvLmB;EQwLnB,gBAAgB;EAChB,cRzLmB;AFstB7B;;AUxqBA;EA6IQ,6BAA6B;EAC7B,mBR/KuB;EQgLvB,YRhLuB;AF+sB/B;;AU9qBA;EAoJU,uBRrLqB;EQsLrB,cRnMmB;AFiuB7B;;AUnrBA;EA4Jc,gEAA8D;AV2hB5E;;AUvrBA;;EA+JU,6BAA6B;EAC7B,mBRjMqB;EQkMrB,gBAAgB;EAChB,YRnMqB;AFguB/B;;AU/rBA;EA2EM,4BR9GwB;EQ+GxB,yBAAyB;EACzB,yBC7Ce;AXqqBrB;;AUrsBA;EAgFQ,yBCnByB;EDoBzB,yBAAyB;EACzB,yBClDa;AX2qBrB;;AU3sBA;EAqFQ,yBAAyB;EACzB,yBCtDa;AXgrBrB;;AUhtBA;EAwFU,mDR3HoB;AFuvB9B;;AUptBA;EA2FQ,yBC9ByB;ED+BzB,yBAAyB;EACzB,yBC7Da;AX0rBrB;;AU1tBA;;EAgGQ,4BRnIsB;EQoItB,yBAAyB;EACzB,gBAAgB;AV+nBxB;;AUjuBA;EAoGQ,oCCpEa;EDqEb,iBRxIsB;AFywB9B;;AUtuBA;EAwGU,oCC3CuB;AX6qBjC;;AU1uBA;;EA2GU,oCC3EW;ED4EX,yBAAyB;EACzB,gBAAgB;EAChB,iBRjJoB;AFqxB9B;;AUlvBA;EAiHU,sFAA4E;AVqoBtF;;AUtvBA;EAmHQ,6BAA6B;EAC7B,wBRvJsB;EQwJtB,iBRxJsB;AF+xB9B;;AU5vBA;EA0HU,4BR7JoB;EQ8JpB,wBR9JoB;EQ+JpB,yBC5FW;AXkuBrB;;AUlwBA;EA+HY,sEAA8D;AVuoB1E;;AUtwBA;EAqIc,sFAA4E;AVqoB1F;;AU1wBA;;EAwIU,6BAA6B;EAC7B,wBR5KoB;EQ6KpB,gBAAgB;EAChB,iBR9KoB;AFqzB9B;;AUlxBA;EA6IQ,6BAA6B;EAC7B,gCC9Ga;ED+Gb,yBC/Ga;AXwvBrB;;AUxxBA;EAoJU,oCCpHW;EDqHX,iBRxLoB;AFg0B9B;;AU7xBA;EA4Jc,sEAA8D;AVqoB5E;;AUjyBA;;EA+JU,6BAA6B;EAC7B,gCChIW;EDiIX,gBAAgB;EAChB,yBClIW;AXywBrB;;AUzyBA;EA2EM,yBRrHwB;EQsHxB,yBAAyB;EACzB,WC3CU;AX6wBhB;;AU/yBA;EAgFQ,yBCnByB;EDoBzB,yBAAyB;EACzB,WChDQ;AXmxBhB;;AUrzBA;EAqFQ,yBAAyB;EACzB,WCpDQ;AXwxBhB;;AU1zBA;EAwFU,gDRlIoB;AFw2B9B;;AU9zBA;EA2FQ,yBC9ByB;ED+BzB,yBAAyB;EACzB,WC3DQ;AXkyBhB;;AUp0BA;;EAgGQ,yBR1IsB;EQ2ItB,yBAAyB;EACzB,gBAAgB;AVyuBxB;;AU30BA;EAoGQ,sBClEQ;EDmER,cR/IsB;AF03B9B;;AUh1BA;EAwGU,yBC3CuB;AXuxBjC;;AUp1BA;;EA2GU,sBCzEM;ED0EN,yBAAyB;EACzB,gBAAgB;EAChB,cRxJoB;AFs4B9B;;AU51BA;EAiHU,0DAA4E;AV+uBtF;;AUh2BA;EAmHQ,6BAA6B;EAC7B,qBR9JsB;EQ+JtB,cR/JsB;AFg5B9B;;AUt2BA;EA0HU,yBRpKoB;EQqKpB,qBRrKoB;EQsKpB,WC1FM;AX00BhB;;AU52BA;EA+HY,gEAA8D;AVivB1E;;AUh3BA;EAqIc,0DAA4E;AV+uB1F;;AUp3BA;;EAwIU,6BAA6B;EAC7B,qBRnLoB;EQoLpB,gBAAgB;EAChB,cRrLoB;AFs6B9B;;AU53BA;EA6IQ,6BAA6B;EAC7B,kBC5GQ;ED6GR,WC7GQ;AXg2BhB;;AUl4BA;EAoJU,sBClHM;EDmHN,cR/LoB;AFi7B9B;;AUv4BA;EA4Jc,gEAA8D;AV+uB5E;;AU34BA;;EA+JU,6BAA6B;EAC7B,kBC9HM;ED+HN,gBAAgB;EAChB,WChIM;AXi3BhB;;AUn5BA;EA2EM,yBRvG4B;EQwG5B,yBAAyB;EACzB,WC3CU;AXu3BhB;;AUz5BA;EAgFQ,yBCnByB;EDoBzB,yBAAyB;EACzB,WChDQ;AX63BhB;;AU/5BA;EAqFQ,yBAAyB;EACzB,WCpDQ;AXk4BhB;;AUp6BA;EAwFU,iDRpHwB;AFo8BlC;;AUx6BA;EA2FQ,yBC9ByB;ED+BzB,yBAAyB;EACzB,WC3DQ;AX44BhB;;AU96BA;;EAgGQ,yBR5H0B;EQ6H1B,yBAAyB;EACzB,gBAAgB;AVm1BxB;;AUr7BA;EAoGQ,sBClEQ;EDmER,cRjI0B;AFs9BlC;;AU17BA;EAwGU,yBC3CuB;AXi4BjC;;AU97BA;;EA2GU,sBCzEM;ED0EN,yBAAyB;EACzB,gBAAgB;EAChB,cR1IwB;AFk+BlC;;AUt8BA;EAiHU,0DAA4E;AVy1BtF;;AU18BA;EAmHQ,6BAA6B;EAC7B,qBRhJ0B;EQiJ1B,cRjJ0B;AF4+BlC;;AUh9BA;EA0HU,yBRtJwB;EQuJxB,qBRvJwB;EQwJxB,WC1FM;AXo7BhB;;AUt9BA;EA+HY,gEAA8D;AV21B1E;;AU19BA;EAqIc,0DAA4E;AVy1B1F;;AU99BA;;EAwIU,6BAA6B;EAC7B,qBRrKwB;EQsKxB,gBAAgB;EAChB,cRvKwB;AFkgClC;;AUt+BA;EA6IQ,6BAA6B;EAC7B,kBC5GQ;ED6GR,WC7GQ;AX08BhB;;AU5+BA;EAoJU,sBClHM;EDmHN,cRjLwB;AF6gClC;;AUj/BA;EA4Jc,gEAA8D;AVy1B5E;;AUr/BA;;EA+JU,6BAA6B;EAC7B,kBC9HM;ED+HN,gBAAgB;EAChB,WChIM;AX29BhB;;AU7/BA;EAwKU,yBC/HsC;EDgItC,cCvH2D;AXg9BrE;;AUlgCA;EA4KY,yBC/GqB;EDgHrB,yBAAyB;EACzB,cC5HyD;AXs9BrE;;AUxgCA;EAiLY,yBCpHqB;EDqHrB,yBAAyB;EACzB,cCjIyD;AX49BrE;;AU9gCA;EA2EM,yBRrG4B;EQsG5B,yBAAyB;EACzB,WC3CU;AXk/BhB;;AUphCA;EAgFQ,yBCnByB;EDoBzB,yBAAyB;EACzB,WChDQ;AXw/BhB;;AU1hCA;EAqFQ,yBAAyB;EACzB,WCpDQ;AX6/BhB;;AU/hCA;EAwFU,kDRlHwB;AF6jClC;;AUniCA;EA2FQ,yBC9ByB;ED+BzB,yBAAyB;EACzB,WC3DQ;AXugChB;;AUziCA;;EAgGQ,yBR1H0B;EQ2H1B,yBAAyB;EACzB,gBAAgB;AV88BxB;;AUhjCA;EAoGQ,sBClEQ;EDmER,cR/H0B;AF+kClC;;AUrjCA;EAwGU,yBC3CuB;AX4/BjC;;AUzjCA;;EA2GU,sBCzEM;ED0EN,yBAAyB;EACzB,gBAAgB;EAChB,cRxIwB;AF2lClC;;AUjkCA;EAiHU,0DAA4E;AVo9BtF;;AUrkCA;EAmHQ,6BAA6B;EAC7B,qBR9I0B;EQ+I1B,cR/I0B;AFqmClC;;AU3kCA;EA0HU,yBRpJwB;EQqJxB,qBRrJwB;EQsJxB,WC1FM;AX+iChB;;AUjlCA;EA+HY,gEAA8D;AVs9B1E;;AUrlCA;EAqIc,0DAA4E;AVo9B1F;;AUzlCA;;EAwIU,6BAA6B;EAC7B,qBRnKwB;EQoKxB,gBAAgB;EAChB,cRrKwB;AF2nClC;;AUjmCA;EA6IQ,6BAA6B;EAC7B,kBC5GQ;ED6GR,WC7GQ;AXqkChB;;AUvmCA;EAoJU,sBClHM;EDmHN,cR/KwB;AFsoClC;;AU5mCA;EA4Jc,gEAA8D;AVo9B5E;;AUhnCA;;EA+JU,6BAA6B;EAC7B,kBC9HM;ED+HN,gBAAgB;EAChB,WChIM;AXslChB;;AUxnCA;EAwKU,yBC/HsC;EDgItC,cCvH2D;AX2kCrE;;AU7nCA;EA4KY,yBC/GqB;EDgHrB,yBAAyB;EACzB,cC5HyD;AXilCrE;;AUnoCA;EAiLY,yBCpHqB;EDqHrB,yBAAyB;EACzB,cCjIyD;AXulCrE;;AUzoCA;EA2EM,yBRtG4B;EQuG5B,yBAAyB;EACzB,WC3CU;AX6mChB;;AU/oCA;EAgFQ,yBCnByB;EDoBzB,yBAAyB;EACzB,WChDQ;AXmnChB;;AUrpCA;EAqFQ,yBAAyB;EACzB,WCpDQ;AXwnChB;;AU1pCA;EAwFU,kDRnHwB;AFyrClC;;AU9pCA;EA2FQ,yBC9ByB;ED+BzB,yBAAyB;EACzB,WC3DQ;AXkoChB;;AUpqCA;;EAgGQ,yBR3H0B;EQ4H1B,yBAAyB;EACzB,gBAAgB;AVykCxB;;AU3qCA;EAoGQ,sBClEQ;EDmER,cRhI0B;AF2sClC;;AUhrCA;EAwGU,yBC3CuB;AXunCjC;;AUprCA;;EA2GU,sBCzEM;ED0EN,yBAAyB;EACzB,gBAAgB;EAChB,cRzIwB;AFutClC;;AU5rCA;EAiHU,0DAA4E;AV+kCtF;;AUhsCA;EAmHQ,6BAA6B;EAC7B,qBR/I0B;EQgJ1B,cRhJ0B;AFiuClC;;AUtsCA;EA0HU,yBRrJwB;EQsJxB,qBRtJwB;EQuJxB,WC1FM;AX0qChB;;AU5sCA;EA+HY,gEAA8D;AVilC1E;;AUhtCA;EAqIc,0DAA4E;AV+kC1F;;AUptCA;;EAwIU,6BAA6B;EAC7B,qBRpKwB;EQqKxB,gBAAgB;EAChB,cRtKwB;AFuvClC;;AU5tCA;EA6IQ,6BAA6B;EAC7B,kBC5GQ;ED6GR,WC7GQ;AXgsChB;;AUluCA;EAoJU,sBClHM;EDmHN,cRhLwB;AFkwClC;;AUvuCA;EA4Jc,gEAA8D;AV+kC5E;;AU3uCA;;EA+JU,6BAA6B;EAC7B,kBC9HM;ED+HN,gBAAgB;EAChB,WChIM;AXitChB;;AUnvCA;EAwKU,yBC/HsC;EDgItC,cCvH2D;AXssCrE;;AUxvCA;EA4KY,yBC/GqB;EDgHrB,yBAAyB;EACzB,cC5HyD;AX4sCrE;;AU9vCA;EAiLY,yBCpHqB;EDqHrB,yBAAyB;EACzB,cCjIyD;AXktCrE;;AUpwCA;EA2EM,yBRxG4B;EQyG5B,yBAAyB;EACzB,WC3CU;AXwuChB;;AU1wCA;EAgFQ,yBCnByB;EDoBzB,yBAAyB;EACzB,WChDQ;AX8uChB;;AUhxCA;EAqFQ,yBAAyB;EACzB,WCpDQ;AXmvChB;;AUrxCA;EAwFU,kDRrHwB;AFszClC;;AUzxCA;EA2FQ,yBC9ByB;ED+BzB,yBAAyB;EACzB,WC3DQ;AX6vChB;;AU/xCA;;EAgGQ,yBR7H0B;EQ8H1B,yBAAyB;EACzB,gBAAgB;AVosCxB;;AUtyCA;EAoGQ,sBClEQ;EDmER,cRlI0B;AFw0ClC;;AU3yCA;EAwGU,yBC3CuB;AXkvCjC;;AU/yCA;;EA2GU,sBCzEM;ED0EN,yBAAyB;EACzB,gBAAgB;EAChB,cR3IwB;AFo1ClC;;AUvzCA;EAiHU,0DAA4E;AV0sCtF;;AU3zCA;EAmHQ,6BAA6B;EAC7B,qBRjJ0B;EQkJ1B,cRlJ0B;AF81ClC;;AUj0CA;EA0HU,yBRvJwB;EQwJxB,qBRxJwB;EQyJxB,WC1FM;AXqyChB;;AUv0CA;EA+HY,gEAA8D;AV4sC1E;;AU30CA;EAqIc,0DAA4E;AV0sC1F;;AU/0CA;;EAwIU,6BAA6B;EAC7B,qBRtKwB;EQuKxB,gBAAgB;EAChB,cRxKwB;AFo3ClC;;AUv1CA;EA6IQ,6BAA6B;EAC7B,kBC5GQ;ED6GR,WC7GQ;AX2zChB;;AU71CA;EAoJU,sBClHM;EDmHN,cRlLwB;AF+3ClC;;AUl2CA;EA4Jc,gEAA8D;AV0sC5E;;AUt2CA;;EA+JU,6BAA6B;EAC7B,kBC9HM;ED+HN,gBAAgB;EAChB,WChIM;AX40ChB;;AU92CA;EAwKU,yBC/HsC;EDgItC,cCvH2D;AXi0CrE;;AUn3CA;EA4KY,yBC/GqB;EDgHrB,yBAAyB;EACzB,cC5HyD;AXu0CrE;;AUz3CA;EAiLY,yBCpHqB;EDqHrB,yBAAyB;EACzB,cCjIyD;AX60CrE;;AU/3CA;EA2EM,yBRzG4B;EQ0G5B,yBAAyB;EACzB,yBC7Ce;AXq2CrB;;AUr4CA;EAgFQ,yBCnByB;EDoBzB,yBAAyB;EACzB,yBClDa;AX22CrB;;AU34CA;EAqFQ,yBAAyB;EACzB,yBCtDa;AXg3CrB;;AUh5CA;EAwFU,kDRtHwB;AFk7ClC;;AUp5CA;EA2FQ,yBC9ByB;ED+BzB,yBAAyB;EACzB,yBC7Da;AX03CrB;;AU15CA;;EAgGQ,yBR9H0B;EQ+H1B,yBAAyB;EACzB,gBAAgB;AV+zCxB;;AUj6CA;EAoGQ,oCCpEa;EDqEb,cRnI0B;AFo8ClC;;AUt6CA;EAwGU,oCC3CuB;AX62CjC;;AU16CA;;EA2GU,oCC3EW;ED4EX,yBAAyB;EACzB,gBAAgB;EAChB,cR5IwB;AFg9ClC;;AUl7CA;EAiHU,sFAA4E;AVq0CtF;;AUt7CA;EAmHQ,6BAA6B;EAC7B,qBRlJ0B;EQmJ1B,cRnJ0B;AF09ClC;;AU57CA;EA0HU,yBRxJwB;EQyJxB,qBRzJwB;EQ0JxB,yBC5FW;AXk6CrB;;AUl8CA;EA+HY,gEAA8D;AVu0C1E;;AUt8CA;EAqIc,sFAA4E;AVq0C1F;;AU18CA;;EAwIU,6BAA6B;EAC7B,qBRvKwB;EQwKxB,gBAAgB;EAChB,cRzKwB;AFg/ClC;;AUl9CA;EA6IQ,6BAA6B;EAC7B,gCC9Ga;ED+Gb,yBC/Ga;AXw7CrB;;AUx9CA;EAoJU,oCCpHW;EDqHX,cRnLwB;AF2/ClC;;AU79CA;EA4Jc,gEAA8D;AVq0C5E;;AUj+CA;;EA+JU,6BAA6B;EAC7B,gCChIW;EDiIX,gBAAgB;EAChB,yBClIW;AXy8CrB;;AUz+CA;EAwKU,yBC/HsC;EDgItC,cCvH2D;AX47CrE;;AU9+CA;EA4KY,yBC/GqB;EDgHrB,yBAAyB;EACzB,cC5HyD;AXk8CrE;;AUp/CA;EAiLY,yBCpHqB;EDqHrB,yBAAyB;EACzB,cCjIyD;AXw8CrE;;AU1/CA;EA2EM,yBRnG2B;EQoG3B,yBAAyB;EACzB,WC3CU;AX89ChB;;AUhgDA;EAgFQ,yBCnByB;EDoBzB,yBAAyB;EACzB,WChDQ;AXo+ChB;;AUtgDA;EAqFQ,yBAAyB;EACzB,WCpDQ;AXy+ChB;;AU3gDA;EAwFU,kDRhHuB;AFuiDjC;;AU/gDA;EA2FQ,yBC9ByB;ED+BzB,yBAAyB;EACzB,WC3DQ;AXm/ChB;;AUrhDA;;EAgGQ,yBRxHyB;EQyHzB,yBAAyB;EACzB,gBAAgB;AV07CxB;;AU5hDA;EAoGQ,sBClEQ;EDmER,cR7HyB;AFyjDjC;;AUjiDA;EAwGU,yBC3CuB;AXw+CjC;;AUriDA;;EA2GU,sBCzEM;ED0EN,yBAAyB;EACzB,gBAAgB;EAChB,cRtIuB;AFqkDjC;;AU7iDA;EAiHU,0DAA4E;AVg8CtF;;AUjjDA;EAmHQ,6BAA6B;EAC7B,qBR5IyB;EQ6IzB,cR7IyB;AF+kDjC;;AUvjDA;EA0HU,yBRlJuB;EQmJvB,qBRnJuB;EQoJvB,WC1FM;AX2hDhB;;AU7jDA;EA+HY,gEAA8D;AVk8C1E;;AUjkDA;EAqIc,0DAA4E;AVg8C1F;;AUrkDA;;EAwIU,6BAA6B;EAC7B,qBRjKuB;EQkKvB,gBAAgB;EAChB,cRnKuB;AFqmDjC;;AU7kDA;EA6IQ,6BAA6B;EAC7B,kBC5GQ;ED6GR,WC7GQ;AXijDhB;;AUnlDA;EAoJU,sBClHM;EDmHN,cR7KuB;AFgnDjC;;AUxlDA;EA4Jc,gEAA8D;AVg8C5E;;AU5lDA;;EA+JU,6BAA6B;EAC7B,kBC9HM;ED+HN,gBAAgB;EAChB,WChIM;AXkkDhB;;AUpmDA;EAwKU,yBC/HsC;EDgItC,cCvH2D;AXujDrE;;AUzmDA;EA4KY,yBC/GqB;EDgHrB,yBAAyB;EACzB,cC5HyD;AX6jDrE;;AU/mDA;EAiLY,yBCpHqB;EDqHrB,yBAAyB;EACzB,cCjIyD;AXmkDrE;;AUrnDA;EATE,kBR6BgB;EQ5BhB,kBRFc;AFooDhB;;AU1nDA;EANE,eRLW;AFyoDb;;AU9nDA;EAJE,kBRRc;AF8oDhB;;AUloDA;EAFE,iBRXa;AFmpDf;;AUtoDA;;EAgMI,uBRjO2B;EQkO3B,qBRvO0B;EQwO1B,gBAtNyB;EAuNzB,YAtNyB;AViqD7B;;AU9oDA;EAqMI,aAAa;EACb,WAAW;AV68Cf;;AUnpDA;EAwMI,6BAA6B;EAC7B,oBAAoB;AV+8CxB;;AUxpDA;ETvCE,kBAAkB;EAKhB,2BAAiC;EACjC,0BAAgC;ES8O9B,6BAA6B;AVk9CnC;;AU/pDA;EA+MI,4BRlP0B;EQmP1B,qBRtP0B;EQuP1B,cRzP0B;EQ0P1B,gBAAgB;EAChB,oBAAoB;AVo9CxB;;AUvqDA;EAqNI,uBR9LqB;EQ+LrB,gCAA0D;EAC1D,iCAA2D;AVs9C/D;;AUp9CA;EACE,mBAAmB;EACnB,aAAa;EACb,eAAe;EACf,2BAA2B;AVu9C7B;;AU39CA;EAMI,qBAAqB;AVy9CzB;;AU/9CA;ETzHI,oBSiIwC;AV29C5C;;AUn+CA;EAUI,sBAAsB;AV69C1B;;AUv+CA;EAYI,mBAAmB;AV+9CvB;;AU3+CA;EAlOE,kBR6BgB;EQ5BhB,kBRFc;AFmtDhB;;AUh/CA;EA7NE,kBRRc;AFytDhB;;AUp/CA;EA3NE,iBRXa;AF8tDf;;AUx/CA;EA0BQ,4BAA4B;EAC5B,yBAAyB;AVk+CjC;;AU7/CA;EA6BQ,6BAA6B;EAC7B,0BAA0B;ETvJ9B,kBSwJwC;AVo+C5C;;AUngDA;ETzHI,eS0JqC;AVs+CzC;;AUvgDA;EAoCQ,UAAU;AVu+ClB;;AU3gDA;EA0CQ,UAAU;AVq+ClB;;AU/gDA;EA4CU,UAAU;AVu+CpB;;AUnhDA;EA8CQ,YAAY;EACZ,cAAc;AVy+CtB;;AUxhDA;EAiDI,uBAAuB;AV2+C3B;;AU5hDA;EAoDQ,oBAAoB;EACpB,qBAAqB;AV4+C7B;;AUjiDA;EAuDI,yBAAyB;AV8+C7B;;AUriDA;EA0DQ,oBAAoB;EACpB,qBAAqB;AV++C7B;;AYhzDA;EACE,YAAY;EACZ,cAAc;EACd,kBAAkB;EAClB,WAAW;AZmzDb;;AYvzDA;EAMI,0BAA0B;EAC1B,kBV2CM;EU1CN,mBV0CM;EUzCN,WAAW;AZqzDf;;AChuDE;EW9FF;IAWI,gBAAuC;EZwzDzC;AACF;;AC5tDI;EWxGJ;IAcM,iBAAqE;EZ2zDzE;AACF;;ACntDI;EWvHJ;IAiBM,iBAAiE;EZ8zDrE;AACF;;ACnuDI;EW7GJ;IAoBM,iBAAqE;EZi0DzE;AACF;;AC1tDI;EW5HJ;IAuBM,iBAAiE;EZo0DrE;AACF;;Aa50DA;EAII,kBAAkB;Ab40DtB;;Aah1DA;;;;;;;EAcM,kBAAkB;Ab40DxB;;Aa11DA;;;;;;EAqBI,cXlC0B;EWmC1B,gBXEiB;EWDjB,kBAxC+B;Abs3DnC;;Aar2DA;EAyBI,cAAc;EACd,oBAAoB;Abg1DxB;;Aa12DA;EA4BM,eAAe;Abk1DrB;;Aa92DA;EA8BI,iBAAiB;EACjB,uBAAuB;Abo1D3B;;Aan3DA;EAiCM,oBAAoB;Abs1D1B;;Aav3DA;EAmCI,gBAAgB;EAChB,uBAAuB;Abw1D3B;;Aa53DA;EAsCM,oBAAoB;Ab01D1B;;Aah4DA;EAwCI,iBAAiB;EACjB,oBAAoB;Ab41DxB;;Aar4DA;EA2CI,kBAAkB;EAClB,uBAAuB;Ab81D3B;;Aa14DA;EA8CI,cAAc;EACd,kBAAkB;Abg2DtB;;Aa/4DA;EAiDI,4BXvD0B;EDmI1B,8BCtI0B;EW4D1B,qBAhEqC;Abk6DzC;;Aar5DA;EAqDI,4BAA4B;EZwE5B,gBYvEmC;EACnC,eAAe;Abo2DnB;;Aa35DA;EAyDM,wBAAwB;Abs2D9B;;Aa/5DA;EA2DQ,4BAA4B;Abw2DpC;;Aan6DA;EA6DQ,4BAA4B;Ab02DpC;;Aav6DA;EA+DQ,4BAA4B;Ab42DpC;;Aa36DA;EAiEQ,4BAA4B;Ab82DpC;;Aa/6DA;EAmEI,wBAAwB;EZ0DxB,gBYzDmC;EACnC,eAAe;Abg3DnB;;Aar7DA;EAuEM,uBAAuB;EACvB,iBAAiB;Abk3DvB;;Aa17DA;EA0EQ,uBAAuB;Abo3D/B;;Aa97DA;EZ6HI,gBYjDmC;Abs3DvC;;Aal8DA;EA8EI,gBAAgB;EAChB,iBAAiB;EACjB,kBAAkB;Abw3DtB;;Aax8DA;EAkFM,eAAe;Ab03DrB;;Aa58DA;EAoFM,kBAAkB;Ab43DxB;;Aah9DA;EAsFM,qBAAqB;Ab83D3B;;Aap9DA;EAwFM,kBAAkB;Abg4DxB;;Aax9DA;EZ2CE,iCAAiC;EYgD/B,gBAAgB;EAChB,qBAvG8B;EAwG9B,gBAAgB;EAChB,iBAAiB;Abk4DrB;;Aah+DA;;EAiGI,cAAc;Abo4DlB;;Aar+DA;EAmGI,WAAW;Abs4Df;;Aaz+DA;;EAsGM,yBX/GwB;EWgHxB,qBA/GmC;EAgHnC,qBA/GmC;EAgHnC,mBAAmB;Abw4DzB;;Aaj/DA;EA2GM,cXxHwB;AFkgE9B;;Aar/DA;EA6GQ,mBAAmB;Ab44D3B;;Aaz/DA;;EAiHQ,qBAtHsC;EAuHtC,cX/HsB;AF4gE9B;;Aa//DA;;EAsHQ,qBAzHsC;EA0HtC,cXpIsB;AFkhE9B;;AargEA;;EA6HY,sBAAsB;Ab64DlC;;Aa1gEA;EAgIM,aAAa;Ab84DnB;;Aa9gEA;EAmII,kBXhHY;AF+/DhB;;AalhEA;EAqII,kBXpHY;AFqgEhB;;AathEA;EAuII,iBXvHW;AF0gEf;;AcxiEA;EACE,mBAAmB;EACnB,oBAAoB;EACpB,uBAAuB;EACvB,cATsB;EAUtB,aAVsB;AdqjExB;;AchjEA;EAQI,YAZwB;EAaxB,WAbwB;AdyjE5B;;AcrjEA;EAWI,YAdyB;EAezB,WAfyB;Ad6jE7B;;Ac1jEA;EAcI,YAhBwB;EAiBxB,WAjBwB;AdikE5B;;AelkEA;EACE,cAAc;EACd,kBAAkB;AfqkEpB;;AevkEA;EAII,cAAc;EACd,YAAY;EACZ,WAAW;AfukEf;;Ae7kEA;EAQM,uBb6DmB;AF4gEzB;;AejlEA;EAUI,WAAW;Af2kEf;;AerlEA;;;;;;;;;;;;;;;;;EA+BM,YAAY;EACZ,WAAW;Af0kEjB;;Ae1mEA;EAmCI,iBAAiB;Af2kErB;;Ae9mEA;EAqCI,gBAAgB;Af6kEpB;;AelnEA;EAuCI,gBAAgB;Af+kEpB;;AetnEA;EAyCI,qBAAqB;AfilEzB;;Ae1nEA;EA2CI,gBAAgB;AfmlEpB;;Ae9nEA;EA6CI,mBAAmB;AfqlEvB;;AeloEA;EA+CI,gBAAgB;AfulEpB;;AetoEA;EAiDI,qBAAqB;AfylEzB;;Ae1oEA;EAmDI,iBAAiB;Af2lErB;;Ae9oEA;EAqDI,sBAAsB;Af6lE1B;;AelpEA;EAuDI,iBAAiB;Af+lErB;;AetpEA;EAyDI,sBAAsB;AfimE1B;;Ae1pEA;EA2DI,sBAAsB;AfmmE1B;;Ae9pEA;EA6DI,iBAAiB;AfqmErB;;AelqEA;EA+DI,iBAAiB;AfumErB;;AetqEA;EAmEM,YAAwB;EACxB,WAAuB;AfumE7B;;Ae3qEA;EAmEM,YAAwB;EACxB,WAAuB;Af4mE7B;;AehrEA;EAmEM,YAAwB;EACxB,WAAuB;AfinE7B;;AerrEA;EAmEM,YAAwB;EACxB,WAAuB;AfsnE7B;;Ae1rEA;EAmEM,YAAwB;EACxB,WAAuB;Af2nE7B;;Ae/rEA;EAmEM,YAAwB;EACxB,WAAuB;AfgoE7B;;AepsEA;EAmEM,aAAwB;EACxB,YAAuB;AfqoE7B;;AgBlsEA;EAEE,4BdE4B;EcD5B,kBdyDU;EcxDV,kBAAkB;EAEhB,sCAXoD;AhB8sExD;;AgBzsEA;EAUI,mBAAmB;EACnB,0BAA0B;AhBmsE9B;;AgB9sEA;EAaI,mBAAmB;AhBqsEvB;;AgBltEA;;EAgBI,iBdV2B;AFitE/B;;AgBvtEA;EAkBI,uBAAuB;AhBysE3B;;AgB3tEA;Ef+II,ae3H4B;EAC5B,kBAAkB;EAClB,WAAW;AhB2sEf;;AgBjuEA;;;EA0BI,mBAAmB;AhB6sEvB;;AgBvuEA;EAgCM,uBd1ByB;Ec2BzB,cdxCuB;AFmvE7B;;AgB5uEA;EAgCM,yBdvCuB;EcwCvB,Yd3ByB;AF2uE/B;;AgBjvEA;EAgCM,4Bd5BwB;Ec6BxB,yBLsCe;AX+qErB;;AgBtvEA;EAgCM,yBdnCwB;EcoCxB,WLwCU;AXkrEhB;;AgB3vEA;EAgCM,yBdrB4B;EcsB5B,WLwCU;AXurEhB;;AgBhwEA;EAuCU,yBLyCsC;EKxCtC,cLiD2D;AX4qErE;;AgBrwEA;EAgCM,yBdnB4B;EcoB5B,WLwCU;AXisEhB;;AgB1wEA;EAuCU,yBLyCsC;EKxCtC,cLiD2D;AXsrErE;;AgB/wEA;EAgCM,yBdpB4B;EcqB5B,WLwCU;AX2sEhB;;AgBpxEA;EAuCU,yBLyCsC;EKxCtC,cLiD2D;AXgsErE;;AgBzxEA;EAgCM,yBdtB4B;EcuB5B,WLwCU;AXqtEhB;;AgB9xEA;EAuCU,yBLyCsC;EKxCtC,cLiD2D;AX0sErE;;AgBnyEA;EAgCM,yBdvB4B;EcwB5B,yBLsCe;AXiuErB;;AgBxyEA;EAuCU,yBLyCsC;EKxCtC,cLiD2D;AXotErE;;AgB7yEA;EAgCM,yBdjB2B;EckB3B,WLwCU;AXyuEhB;;AgBlzEA;EAuCU,yBLyCsC;EKxCtC,cLiD2D;AX8tErE;;AiBxzEA;EAEE,qBAAqB;EACrB,wBAAwB;EACxB,YAAY;EACZ,uBf0DuB;EezDvB,cAAc;EACd,YfsBW;EerBX,gBAAgB;EAChB,UAAU;EACV,WAAW;AjB0zEb;;AiBp0EA;EAYI,yBfT2B;AFq0E/B;;AiBx0EA;EAcI,yBff0B;AF60E9B;;AiB50EA;EAgBI,yBfjB0B;AFi1E9B;;AiBh1EA;EAkBI,yBfnB0B;EeoB1B,YAAY;AjBk0EhB;;AiBr1EA;EAyBQ,uBflBuB;AFk1E/B;;AiBz1EA;EA2BQ,uBfpBuB;AFs1E/B;;AiB71EA;EA6BQ,uBftBuB;AF01E/B;;AiBj2EA;EA+BQ,mEAA2F;AjBs0EnG;;AiBr2EA;EAyBQ,yBf/BqB;AF+2E7B;;AiBz2EA;EA2BQ,yBfjCqB;AFm3E7B;;AiB72EA;EA6BQ,yBfnCqB;AFu3E7B;;AiBj3EA;EA+BQ,qEAA2F;AjBs1EnG;;AiBr3EA;EAyBQ,4BfpBsB;AFo3E9B;;AiBz3EA;EA2BQ,4BftBsB;AFw3E9B;;AiB73EA;EA6BQ,4BfxBsB;AF43E9B;;AiBj4EA;EA+BQ,wEAA2F;AjBs2EnG;;AiBr4EA;EAyBQ,yBf3BsB;AF24E9B;;AiBz4EA;EA2BQ,yBf7BsB;AF+4E9B;;AiB74EA;EA6BQ,yBf/BsB;AFm5E9B;;AiBj5EA;EA+BQ,qEAA2F;AjBs3EnG;;AiBr5EA;EAyBQ,yBfb0B;AF64ElC;;AiBz5EA;EA2BQ,yBff0B;AFi5ElC;;AiB75EA;EA6BQ,yBfjB0B;AFq5ElC;;AiBj6EA;EA+BQ,qEAA2F;AjBs4EnG;;AiBr6EA;EAyBQ,yBfX0B;AF25ElC;;AiBz6EA;EA2BQ,yBfb0B;AF+5ElC;;AiB76EA;EA6BQ,yBff0B;AFm6ElC;;AiBj7EA;EA+BQ,qEAA2F;AjBs5EnG;;AiBr7EA;EAyBQ,yBfZ0B;AF46ElC;;AiBz7EA;EA2BQ,yBfd0B;AFg7ElC;;AiB77EA;EA6BQ,yBfhB0B;AFo7ElC;;AiBj8EA;EA+BQ,qEAA2F;AjBs6EnG;;AiBr8EA;EAyBQ,yBfd0B;AF87ElC;;AiBz8EA;EA2BQ,yBfhB0B;AFk8ElC;;AiB78EA;EA6BQ,yBflB0B;AFs8ElC;;AiBj9EA;EA+BQ,qEAA2F;AjBs7EnG;;AiBr9EA;EAyBQ,yBff0B;AF+8ElC;;AiBz9EA;EA2BQ,yBfjB0B;AFm9ElC;;AiB79EA;EA6BQ,yBfnB0B;AFu9ElC;;AiBj+EA;EA+BQ,qEAA2F;AjBs8EnG;;AiBr+EA;EAyBQ,yBfTyB;AFy9EjC;;AiBz+EA;EA2BQ,yBfXyB;AF69EjC;;AiB7+EA;EA6BQ,yBfbyB;AFi+EjC;;AiBj/EA;EA+BQ,qEAA2F;AjBs9EnG;;AiBr/EA;EAkCI,gCAtCkC;UAsClC,wBAtCkC;EAuClC,2CAAmC;UAAnC,mCAAmC;EACnC,yCAAiC;UAAjC,iCAAiC;EACjC,yCAAiC;UAAjC,iCAAiC;EACjC,yBfnC2B;EeoC3B,qEAA0F;EAC1F,6BAA6B;EAC7B,4BAA4B;EAC5B,0BAA0B;AjBu9E9B;;AiBjgFA;EA4CM,6BAA6B;AjBy9EnC;;AiBrgFA;EA8CM,6BAA6B;AjB29EnC;;AiBzgFA;EAgDM,oBAAoB;AjB69E1B;;AiB7gFA;EAoDI,eftBY;AFm/EhB;;AiBjhFA;EAsDI,ef1BY;AFy/EhB;;AiBrhFA;EAwDI,cf7BW;AF8/Ef;;AiB/9EA;EACE;IACE,2BAA2B;EjBk+E7B;EiBj+EA;IACE,4BAA4B;EjBm+E9B;AACF;;AiBx+EA;EACE;IACE,2BAA2B;EjBk+E7B;EiBj+EA;IACE,4BAA4B;EjBm+E9B;AACF;;AkB/gFA;EAEE,uBhBd6B;EgBe7B,chBxB4B;AFyiF9B;;AkBphFA;;EAMI,yBhBvB0B;EgBwB1B,qBA9B6B;EA+B7B,qBA9B6B;EA+B7B,mBAAmB;AlBmhFvB;;AkB5hFA;;EAeQ,uBhB3BuB;EgB4BvB,mBhB5BuB;EgB6BvB,chB1CqB;AF4jF7B;;AkBniFA;;EAeQ,yBhBxCqB;EgByCrB,qBhBzCqB;EgB0CrB,YhB7BuB;AFsjF/B;;AkB1iFA;;EAeQ,4BhB7BsB;EgB8BtB,wBhB9BsB;EgB+BtB,yBPoCa;AX4/ErB;;AkBjjFA;;EAeQ,yBhBpCsB;EgBqCtB,qBhBrCsB;EgBsCtB,WPsCQ;AXigFhB;;AkBxjFA;;EAeQ,yBhBtB0B;EgBuB1B,qBhBvB0B;EgBwB1B,WPsCQ;AXwgFhB;;AkB/jFA;;EAeQ,yBhBpB0B;EgBqB1B,qBhBrB0B;EgBsB1B,WPsCQ;AX+gFhB;;AkBtkFA;;EAeQ,yBhBrB0B;EgBsB1B,qBhBtB0B;EgBuB1B,WPsCQ;AXshFhB;;AkB7kFA;;EAeQ,yBhBvB0B;EgBwB1B,qBhBxB0B;EgByB1B,WPsCQ;AX6hFhB;;AkBplFA;;EAeQ,yBhBxB0B;EgByB1B,qBhBzB0B;EgB0B1B,yBPoCa;AXsiFrB;;AkB3lFA;;EAeQ,yBhBlByB;EgBmBzB,qBhBnByB;EgBoBzB,WPsCQ;AX2iFhB;;AkBlmFA;;EAoBM,mBAAmB;EACnB,SAAS;AlBmlFf;;AkBxmFA;;EAuBM,yBhB9B4B;EgB+B5B,WP+BU;AXujFhB;;AkB9mFA;;;;EA2BQ,mBAAmB;AlB0lF3B;;AkBrnFA;;EA6BM,sBAAsB;AlB6lF5B;;AkB1nFA;EA+BI,chBpD0B;AFmpF9B;;AkB9nFA;EAiCM,mBAAmB;AlBimFzB;;AkBloFA;EAoCM,yBhB3C4B;EgB4C5B,WPkBU;AXglFhB;;AkBvoFA;;EAwCQ,mBAAmB;AlBomF3B;;AkB5oFA;;EA2CQ,kBPYQ;EOXR,mBAAmB;AlBsmF3B;;AkBlpFA;EA8CI,6BA5DqC;AlBoqFzC;;AkBtpFA;;EAiDM,qBApEgC;EAqEhC,chBvEwB;AFirF9B;;AkB5pFA;EAoDI,6BAhEqC;AlB4qFzC;;AkBhqFA;;EAuDM,qBAxEgC;EAyEhC,chB7EwB;AF2rF9B;;AkBtqFA;EA0DI,6BAvEqC;AlBurFzC;;AkB1qFA;;EA+DU,sBAAsB;AlBgnFhC;;AkB/qFA;;EAoEM,iBAAiB;AlBgnFvB;;AkBprFA;;EAyEU,wBAAwB;AlBgnFlC;;AkBzrFA;EA2EI,WAAW;AlBknFf;;AkB7rFA;EAgFU,yBhB7FoB;AF8sF9B;;AkBjsFA;EAqFY,yBhBlGkB;AFktF9B;;AkBrsFA;EAuFc,4BhBrGgB;AFutF9B;;AkBzsFA;;EA2FM,qBAAqB;AlBmnF3B;;AkB9sFA;EAgGU,yBhB7GoB;AF+tF9B;;AkBhnFA;EjB/DE,iCAAiC;EiBkEjC,cAAc;EACd,kBAAkB;EAClB,eAAe;AlBknFjB;;AmB7uFA;EACE,mBAAmB;EACnB,aAAa;EACb,eAAe;EACf,2BAA2B;AnBgvF7B;;AmBpvFA;EAMI,qBAAqB;AnBkvFzB;;AmBxvFA;ElByII,oBkBjIwC;AnBovF5C;;AmB5vFA;EAUI,sBAAsB;AnBsvF1B;;AmBhwFA;EAYI,mBAAmB;AnBwvFvB;;AmBpwFA;EAgBM,ejBcO;AF0uFb;;AmBxwFA;EAmBM,kBjBUU;AF+uFhB;;AmB5wFA;EAqBI,uBAAuB;AnB2vF3B;;AmBhxFA;EAuBM,qBAAqB;EACrB,oBAAoB;AnB6vF1B;;AmBrxFA;EA0BI,yBAAyB;AnB+vF7B;;AmBzxFA;EA6BQ,mBAAmB;AnBgwF3B;;AmB7xFA;EA+BQ,eAAe;AnBkwFvB;;AmBjyFA;ElByII,ekBvGmC;AnBmwFvC;;AmBryFA;ElByII,ckBrGqC;EAE/B,yBAAyB;EACzB,4BAA4B;AnBowFtC;;AmB3yFA;EA6CU,0BAA0B;EAC1B,6BAA6B;AnBkwFvC;;AmB7vFA;EACE,mBAAmB;EACnB,4BjB/C4B;EiBgD5B,kBjBQU;EiBPV,cjBvD4B;EiBwD5B,oBAAoB;EACpB,kBjB1Bc;EiB2Bd,WAAW;EACX,uBAAuB;EACvB,gBAAgB;EAChB,oBAAoB;EACpB,qBAAqB;EACrB,mBAAmB;AnBgwFrB;;AmB5wFA;ElBsFI,oBkBxEuC;ElBwEvC,uBkBvEyC;AnBkwF7C;;AmBjxFA;EAqBM,uBjBhEyB;EiBiEzB,cjB9EuB;AF80F7B;;AmBtxFA;EAqBM,yBjB7EuB;EiB8EvB,YjBjEyB;AFs0F/B;;AmB3xFA;EAqBM,4BjBlEwB;EiBmExB,yBRAe;AX0wFrB;;AmBhyFA;EAqBM,yBjBzEwB;EiB0ExB,WREU;AX6wFhB;;AmBryFA;EAqBM,yBjB3D4B;EiB4D5B,WREU;AXkxFhB;;AmB1yFA;EA4BU,yBRGsC;EQFtC,cRW2D;AXuwFrE;;AmB/yFA;EAqBM,yBjBzD4B;EiB0D5B,WREU;AX4xFhB;;AmBpzFA;EA4BU,yBRGsC;EQFtC,cRW2D;AXixFrE;;AmBzzFA;EAqBM,yBjB1D4B;EiB2D5B,WREU;AXsyFhB;;AmB9zFA;EA4BU,yBRGsC;EQFtC,cRW2D;AX2xFrE;;AmBn0FA;EAqBM,yBjB5D4B;EiB6D5B,WREU;AXgzFhB;;AmBx0FA;EA4BU,yBRGsC;EQFtC,cRW2D;AXqyFrE;;AmB70FA;EAqBM,yBjB7D4B;EiB8D5B,yBRAe;AX4zFrB;;AmBl1FA;EA4BU,yBRGsC;EQFtC,cRW2D;AX+yFrE;;AmBv1FA;EAqBM,yBjBvD2B;EiBwD3B,WREU;AXo0FhB;;AmB51FA;EA4BU,yBRGsC;EQFtC,cRW2D;AXyzFrE;;AmBj2FA;EAgCI,kBjBpDY;AFy3FhB;;AmBr2FA;EAkCI,ejBvDS;AF83Fb;;AmBz2FA;EAoCI,kBjB1DY;AFm4FhB;;AmB72FA;ElBsFI,qBkB/C0C;ElB+C1C,sBkB9C0C;AnB00F9C;;AmBl3FA;ElBsFI,qBkB5C0C;ElB4C1C,sBkB3C0C;AnB40F9C;;AmBv3FA;ElBsFI,qBkBzC0C;ElByC1C,sBkBxC0C;AnB80F9C;;AmB53FA;ElBsFI,gBkB7ImB;EAyGnB,UAAU;EACV,kBAAkB;EAClB,UAAU;AnB+0Fd;;AmBn4FA;EAuDM,8BAA8B;EAC9B,WAAW;EACX,cAAc;EACd,SAAS;EACT,kBAAkB;EAClB,QAAQ;EACR,0DAA0D;EAC1D,+BAA+B;AnBg1FrC;;AmB94FA;EAgEM,WAAW;EACX,UAAU;AnBk1FhB;;AmBn5FA;EAmEM,WAAW;EACX,UAAU;AnBo1FhB;;AmBx5FA;EAuEM,yBAAmD;AnBq1FzD;;AmB55FA;EAyEM,yBAAoD;AnBu1F1D;;AmBh6FA;EA2EI,uBjB9DqB;AFu5FzB;;AmBv1FA;EAEI,0BAA0B;AnBy1F9B;;AoB/8FA;;EAGE,sBAAsB;ApBi9FxB;;AoBp9FA;;;;EAMI,oBAAoB;ApBq9FxB;;AoB39FA;;EAQI,iBApBmB;ApB4+FvB;;AoBh+FA;;EAUI,iBArBmB;ApBg/FvB;;AoBr+FA;;EAYI,sBAAsB;ApB89F1B;;AoB59FA;EACE,clB5B4B;EkB+B5B,elBHW;EkBIX,gBlBKmB;EkBJnB,kBAnCuB;ApBggGzB;;AoBn+FA;EAQI,cApCwB;EAqCxB,oBApCyB;ApBmgG7B;;AoBx+FA;EAWI,oBAAoB;ApBi+FxB;;AoB5+FA;EAaI,oBA7B+B;ApBggGnC;;AoBh/FA;EAkBM,elBnBO;AFq/Fb;;AoBp/FA;EAkBM,iBlBlBS;AFw/Ff;;AoBx/FA;EAkBM,elBjBO;AF2/Fb;;AoB5/FA;EAkBM,iBlBhBS;AF8/Ff;;AoBhgGA;EAkBM,kBlBfU;AFigGhB;;AoBpgGA;EAkBM,elBdO;AFogGb;;AoBxgGA;EAkBM,kBlBbU;AFugGhB;;AoBx/FA;EACE,clB/C4B;EkBkD5B,kBlBrBc;EkBsBd,gBlBjBiB;EkBkBjB,iBA7CyB;ApBsiG3B;;AoB//FA;EAQI,clBvD0B;EkBwD1B,gBlBnBiB;AF8gGrB;;AoBpgGA;EAWI,oBA/C+B;ApB4iGnC;;AoBxgGA;EAgBM,elBrCO;AFiiGb;;AoB5gGA;EAgBM,iBlBpCS;AFoiGf;;AoBhhGA;EAgBM,elBnCO;AFuiGb;;AoBphGA;EAgBM,iBlBlCS;AF0iGf;;AoBxhGA;EAgBM,kBlBjCU;AF6iGhB;;AoB5hGA;EAgBM,elBhCO;AFgjGb;;AoBhiGA;EAgBM,kBlB/BU;AFmjGhB;;AqBnlGA;EACE,cAAc;EACd,eAAe;EACf,mBAAmB;EACnB,kBAAkB;EAClB,yBAAyB;ArBslG3B;;AqBplGA;EAEE,gBnB0BiB;EmBzBjB,eAAe;EACf,gBAAgB;EAChB,UAAU;ArBslGZ;;AqB3lGA;EAOI,cAAc;EACd,eAAe;ArBwlGnB;;AqBnlGA;EACE,mBAAmB;EACnB,4BnBf4B;EmBgB5B,uBnB0CuB;EmBzCvB,oBAAoB;EACpB,kBnBKc;EmBJd,WAAW;EACX,uBAAuB;EACvB,oBAAoB;EACpB,gBAAgB;EAChB,uBAAuB;EACvB,kBAAkB;EAClB,mBAAmB;ArBslGrB;;AsB5nGA,eAAA;ACuDA;EAxBE,uBrBhB6B;EqBiB7B,qBrBtB4B;EqBuB5B,kBrBoCU;EqBnCV,crB5B4B;AF8nG9B;;ACjkGI;EsB/BA,4BrB9B0B;AFkoG9B;;ACrkGI;EsB/BA,4BrB9B0B;AFsoG9B;;ACzkGI;EsB/BA,4BrB9B0B;AF0oG9B;;AC7kGI;EsB/BA,4BrB9B0B;AF8oG9B;;AuB/mGE;EAEE,qBrB9B0B;AF+oG9B;;AuBhnGE;EAIE,qBrBtB8B;EqBuB9B,kDrBvB8B;AFuoGlC;;AuB/mGE;;;;;EAEE,4BrBnC0B;EqBoC1B,wBrBpC0B;EqBqC1B,gBAAgB;EAChB,crB3C0B;AFgqG9B;;ACrmGI;;;;;EsBdE,+BrB7CwB;AFwqG9B;;AC7mGI;;;;;EsBdE,+BrB7CwB;AFgrG9B;;ACrnGI;;;;;EsBdE,+BrB7CwB;AFwrG9B;;AC7nGI;;;;;EsBdE,+BrB7CwB;AFgsG9B;;AwBlsGA;EAEE,2DtBN2B;EsBO3B,eAAe;EACf,WAAW;AxBosGb;;AwBnsGE;EACE,gBAAgB;AxBssGpB;;AwBlsGI;EACE,mBtBFyB;AFusG/B;;AwBtsGK;EAMG,mDtBPuB;AF2sG/B;;AwB1sGI;EACE,qBtBfuB;AF4tG7B;;AwB9sGK;EAMG,gDtBpBqB;AFguG7B;;AwBltGI;EACE,wBtBJwB;AFytG9B;;AwBttGK;EAMG,mDtBTsB;AF6tG9B;;AwB1tGI;EACE,qBtBXwB;AFwuG9B;;AwB9tGK;EAMG,gDtBhBsB;AF4uG9B;;AwBluGI;EACE,qBtBG4B;AFkuGlC;;AwBtuGK;EAMG,iDtBF0B;AFsuGlC;;AwB1uGI;EACE,qBtBK4B;AFwuGlC;;AwB9uGK;EAMG,kDtBA0B;AF4uGlC;;AwBlvGI;EACE,qBtBI4B;AFivGlC;;AwBtvGK;EAMG,kDtBD0B;AFqvGlC;;AwB1vGI;EACE,qBtBE4B;AF2vGlC;;AwB9vGK;EAMG,kDtBH0B;AF+vGlC;;AwBlwGI;EACE,qBtBC4B;AFowGlC;;AwBtwGK;EAMG,kDtBJ0B;AFwwGlC;;AwB1wGI;EACE,qBtBO2B;AFswGjC;;AwB9wGK;EAMG,kDtBEyB;AF0wGjC;;AwB1wGE;ErBoBA,kBDwBgB;ECvBhB,kBDPc;AFiwGhB;;AwB7wGE;ErBqBA,kBDXc;AFuwGhB;;AwB/wGE;ErBqBA,iBDda;AF4wGf;;AwBhxGE;EACE,cAAc;EACd,WAAW;AxBmxGf;;AwBlxGE;EACE,eAAe;EACf,WAAW;AxBqxGf;;AwBnxGA;EAGI,uBtB8BqB;EsB7BrB,gDAA4D;EAC5D,iDAA6D;AxBoxGjE;;AwBzxGA;EAOI,6BAA6B;EAC7B,yBAAyB;EACzB,gBAAgB;EAChB,eAAe;EACf,gBAAgB;AxBsxGpB;;AwBpxGA;EAEE,cAAc;EACd,eAAe;EACf,eAAe;EACf,2BrB/CkE;EqBgDlE,gBAAgB;AxBsxGlB;;AwB5xGA;EAQI,gBA1DsB;EA2DtB,eA1DqB;AxBk1GzB;;AwBjyGA;EAWI,eAAe;AxB0xGnB;;AwBryGA;EAcI,YAAY;AxB2xGhB;;AyB51GA;EACE,eAAe;EACf,qBAAqB;EACrB,iBAAiB;EACjB,kBAAkB;AzB+1GpB;;AyB91GE;EACE,eAAe;AzBi2GnB;;AyBh2GE;EACE,cvBF0B;AFq2G9B;;AyBl2GE;;;;;EAGE,cvBJ0B;EuBK1B,mBAAmB;AzBu2GvB;;AyBl2GA;ExB8HI,kBwB3HqC;AzBm2GzC;;A0Bt3GA;EACE,qBAAqB;EACrB,eAAe;EACf,kBAAkB;EAClB,mBAAmB;A1By3GrB;;A0B73GA;EAMI,avBHkB;AH83GtB;;A0Bj4GA;EAUM,qBxBU4B;EDkI9B,cyB3I+B;EAC7B,UAAU;A1B23GhB;;A0Bv4GA;EAeM,uBxBsDmB;EDyErB,iByB9HsC;A1B43G1C;;A0B54GA;EAmBI,eAAe;EACf,cAAc;EACd,cAAc;EACd,eAAe;EACf,aAAa;A1B63GjB;;A0Bp5GA;EAyBM,aAAa;A1B+3GnB;;A0Bx5GA;;EA4BM,wBxBjBwB;AFk5G9B;;A0B75GA;EzB8II,oByBhHwC;A1Bm4G5C;;A0Bj6GA;EAgCM,YAAY;EACZ,UAAU;A1Bq4GhB;;A0Bt6GA;EAmCQ,kBAAkB;A1Bu4G1B;;A0B16GA;EAuCM,qBxBnCwB;AF06G9B;;A0B96GA;EA6CQ,mBxBhCuB;AFq6G/B;;A0Bl7GA;EA+CQ,mBxBlCuB;AFy6G/B;;A0Bt7GA;EAkDU,qBfyDuB;AX+0GjC;;A0B17GA;EAuDU,mDxB1CqB;AFi7G/B;;A0B97GA;EA6CQ,qBxB7CqB;AFk8G7B;;A0Bl8GA;EA+CQ,qBxB/CqB;AFs8G7B;;A0Bt8GA;EAkDU,mBfyDuB;AX+1GjC;;A0B18GA;EAuDU,gDxBvDmB;AF88G7B;;A0B98GA;EA6CQ,wBxBlCsB;AFu8G9B;;A0Bl9GA;EA+CQ,wBxBpCsB;AF28G9B;;A0Bt9GA;EAkDU,qBfyDuB;AX+2GjC;;A0B19GA;EAuDU,mDxB5CoB;AFm9G9B;;A0B99GA;EA6CQ,qBxBzCsB;AF89G9B;;A0Bl+GA;EA+CQ,qBxB3CsB;AFk+G9B;;A0Bt+GA;EAkDU,qBfyDuB;AX+3GjC;;A0B1+GA;EAuDU,gDxBnDoB;AF0+G9B;;A0B9+GA;EA6CQ,qBxB3B0B;AFg+GlC;;A0Bl/GA;EA+CQ,qBxB7B0B;AFo+GlC;;A0Bt/GA;EAkDU,qBfyDuB;AX+4GjC;;A0B1/GA;EAuDU,iDxBrCwB;AF4+GlC;;A0B9/GA;EA6CQ,qBxBzB0B;AF8+GlC;;A0BlgHA;EA+CQ,qBxB3B0B;AFk/GlC;;A0BtgHA;EAkDU,qBfyDuB;AX+5GjC;;A0B1gHA;EAuDU,kDxBnCwB;AF0/GlC;;A0B9gHA;EA6CQ,qBxB1B0B;AF+/GlC;;A0BlhHA;EA+CQ,qBxB5B0B;AFmgHlC;;A0BthHA;EAkDU,qBfyDuB;AX+6GjC;;A0B1hHA;EAuDU,kDxBpCwB;AF2gHlC;;A0B9hHA;EA6CQ,qBxB5B0B;AFihHlC;;A0BliHA;EA+CQ,qBxB9B0B;AFqhHlC;;A0BtiHA;EAkDU,qBfyDuB;AX+7GjC;;A0B1iHA;EAuDU,kDxBtCwB;AF6hHlC;;A0B9iHA;EA6CQ,qBxB7B0B;AFkiHlC;;A0BljHA;EA+CQ,qBxB/B0B;AFsiHlC;;A0BtjHA;EAkDU,qBfyDuB;AX+8GjC;;A0B1jHA;EAuDU,kDxBvCwB;AF8iHlC;;A0B9jHA;EA6CQ,qBxBvByB;AF4iHjC;;A0BlkHA;EA+CQ,qBxBzByB;AFgjHjC;;A0BtkHA;EAkDU,qBfyDuB;AX+9GjC;;A0B1kHA;EAuDU,kDxBjCuB;AFwjHjC;;A0B9kHA;EvB0CE,kBDwBgB;ECvBhB,kBDPc;AF+iHhB;;A0BnlHA;EvB6CE,kBDXc;AFqjHhB;;A0BvlHA;EvB+CE,iBDda;AF0jHf;;A0B3lHA;EAkEM,qBxB5DwB;AFylH9B;;A0B/lHA;EAoEI,WAAW;A1B+hHf;;A0BnmHA;EAsEM,WAAW;A1BiiHjB;;A0BvmHA;EA0EM,aAAa;EACb,kBAAkB;EzB2EpB,cyB1E+B;EAC7B,YAAY;EACZ,eAAe;A1BiiHrB;;A0B/mHA;EAgFM,kBxB5CU;AF+kHhB;;A0BnnHA;EAkFM,kBxBhDU;AFqlHhB;;A0BvnHA;EAoFM,iBxBnDS;AF0lHf;;A2B9mHA;EAEE,oBAAoB;EACpB,aAAa;EACb,2BAA2B;EAC3B,kBAAkB;A3BgnHpB;;A2BrnHA;EAYQ,uBzBZuB;EyBavB,yBAAyB;EACzB,czB3BqB;AFwoH7B;;A2B3nHA;EAkBU,yBhB4EuB;EgB3EvB,yBAAyB;EACzB,czBjCmB;AF8oH7B;;A2BjoHA;EAwBU,yBAAyB;EACzB,+CzBzBqB;EyB0BrB,czBvCmB;AFopH7B;;A2BvoHA;EA8BU,yBhBgEuB;EgB/DvB,yBAAyB;EACzB,czB7CmB;AF0pH7B;;A2B7oHA;EAYQ,yBzBzBqB;EyB0BrB,yBAAyB;EACzB,YzBduB;AFmpH/B;;A2BnpHA;EAkBU,yBhB4EuB;EgB3EvB,yBAAyB;EACzB,YzBpBqB;AFypH/B;;A2BzpHA;EAwBU,yBAAyB;EACzB,4CzBtCmB;EyBuCnB,YzB1BqB;AF+pH/B;;A2B/pHA;EA8BU,uBhBgEuB;EgB/DvB,yBAAyB;EACzB,YzBhCqB;AFqqH/B;;A2BrqHA;EAYQ,4BzBdsB;EyBetB,yBAAyB;EACzB,yBhBmDa;AX0mHrB;;A2B3qHA;EAkBU,yBhB4EuB;EgB3EvB,yBAAyB;EACzB,yBhB6CW;AXgnHrB;;A2BjrHA;EAwBU,yBAAyB;EACzB,+CzB3BoB;EyB4BpB,yBhBuCW;AXsnHrB;;A2BvrHA;EA8BU,yBhBgEuB;EgB/DvB,yBAAyB;EACzB,yBhBiCW;AX4nHrB;;A2B7rHA;EAYQ,yBzBrBsB;EyBsBtB,yBAAyB;EACzB,WhBqDQ;AXgoHhB;;A2BnsHA;EAkBU,yBhB4EuB;EgB3EvB,yBAAyB;EACzB,WhB+CM;AXsoHhB;;A2BzsHA;EAwBU,yBAAyB;EACzB,4CzBlCoB;EyBmCpB,WhByCM;AX4oHhB;;A2B/sHA;EA8BU,yBhBgEuB;EgB/DvB,yBAAyB;EACzB,WhBmCM;AXkpHhB;;A2BrtHA;EAYQ,yBzBP0B;EyBQ1B,yBAAyB;EACzB,WhBqDQ;AXwpHhB;;A2B3tHA;EAkBU,yBhB4EuB;EgB3EvB,yBAAyB;EACzB,WhB+CM;AX8pHhB;;A2BjuHA;EAwBU,yBAAyB;EACzB,6CzBpBwB;EyBqBxB,WhByCM;AXoqHhB;;A2BvuHA;EA8BU,yBhBgEuB;EgB/DvB,yBAAyB;EACzB,WhBmCM;AX0qHhB;;A2B7uHA;EAYQ,yBzBL0B;EyBM1B,yBAAyB;EACzB,WhBqDQ;AXgrHhB;;A2BnvHA;EAkBU,yBhB4EuB;EgB3EvB,yBAAyB;EACzB,WhB+CM;AXsrHhB;;A2BzvHA;EAwBU,yBAAyB;EACzB,8CzBlBwB;EyBmBxB,WhByCM;AX4rHhB;;A2B/vHA;EA8BU,yBhBgEuB;EgB/DvB,yBAAyB;EACzB,WhBmCM;AXksHhB;;A2BrwHA;EAYQ,yBzBN0B;EyBO1B,yBAAyB;EACzB,WhBqDQ;AXwsHhB;;A2B3wHA;EAkBU,yBhB4EuB;EgB3EvB,yBAAyB;EACzB,WhB+CM;AX8sHhB;;A2BjxHA;EAwBU,yBAAyB;EACzB,8CzBnBwB;EyBoBxB,WhByCM;AXotHhB;;A2BvxHA;EA8BU,yBhBgEuB;EgB/DvB,yBAAyB;EACzB,WhBmCM;AX0tHhB;;A2B7xHA;EAYQ,yBzBR0B;EyBS1B,yBAAyB;EACzB,WhBqDQ;AXguHhB;;A2BnyHA;EAkBU,yBhB4EuB;EgB3EvB,yBAAyB;EACzB,WhB+CM;AXsuHhB;;A2BzyHA;EAwBU,yBAAyB;EACzB,8CzBrBwB;EyBsBxB,WhByCM;AX4uHhB;;A2B/yHA;EA8BU,yBhBgEuB;EgB/DvB,yBAAyB;EACzB,WhBmCM;AXkvHhB;;A2BrzHA;EAYQ,yBzBT0B;EyBU1B,yBAAyB;EACzB,yBhBmDa;AX0vHrB;;A2B3zHA;EAkBU,yBhB4EuB;EgB3EvB,yBAAyB;EACzB,yBhB6CW;AXgwHrB;;A2Bj0HA;EAwBU,yBAAyB;EACzB,8CzBtBwB;EyBuBxB,yBhBuCW;AXswHrB;;A2Bv0HA;EA8BU,yBhBgEuB;EgB/DvB,yBAAyB;EACzB,yBhBiCW;AX4wHrB;;A2B70HA;EAYQ,yBzBHyB;EyBIzB,yBAAyB;EACzB,WhBqDQ;AXgxHhB;;A2Bn1HA;EAkBU,yBhB4EuB;EgB3EvB,yBAAyB;EACzB,WhB+CM;AXsxHhB;;A2Bz1HA;EAwBU,yBAAyB;EACzB,8CzBhBuB;EyBiBvB,WhByCM;AX4xHhB;;A2B/1HA;EA8BU,yBhBgEuB;EgB/DvB,yBAAyB;EACzB,WhBmCM;AXkyHhB;;A2Br2HA;EAmCI,kBzBZY;AFk1HhB;;A2Bz2HA;EAqCI,kBzBhBY;AFw1HhB;;A2B72HA;EAwCQ,eAAe;A3By0HvB;;A2Bj3HA;EA0CI,iBzBtBW;AFi2Hf;;A2Br3HA;EA6CQ,eAAe;A3B40HvB;;A2Bz3HA;EAiDM,6BAA6B;EAC7B,0BAA0B;A3B40HhC;;A2B93HA;EAoDM,4BAA4B;EAC5B,yBAAyB;A3B80H/B;;A2Bn4HA;EAwDQ,kBzBFI;AFi1HZ;;A2Bv4HA;EA0DQ,aAAa;A3Bi1HrB;;A2B34HA;EA6DM,sBAAsB;A3Bk1H5B;;A2B/4HA;EA+DM,sBAAsB;EACtB,YAAY;EACZ,gBAAgB;A3Bo1HtB;;A2Br5HA;EAmEM,uBAAuB;A3Bs1H7B;;A2Bz5HA;EAqEM,aAAa;EACb,YAAY;A3Bw1HlB;;A2B95HA;EAwEQ,eAAe;A3B01HvB;;A2Bl6HA;EA2EQ,eAAe;A3B21HvB;;A2Bt6HA;EA8EQ,eAAe;A3B41HvB;;A2B16HA;EAiFQ,eAAe;A3B61HvB;;A2B96HA;EAoFQ,0BAA4C;A3B81HpD;;A2Bl7HA;EAsFQ,0BzBhCI;EyBiCJ,uBAAuB;A3Bg2H/B;;A2Bv7HA;EAyFI,uBAAuB;A3Bk2H3B;;A2B37HA;EA4FM,WAAW;A3Bm2HjB;;A2B/7HA;EA8FM,YAAY;EACZ,eAAe;A3Bq2HrB;;A2Bp8HA;EAiGI,yBAAyB;A3Bu2H7B;;A2Bx8HA;EAmGM,0BAA4C;A3By2HlD;;A2B58HA;EAqGM,0BzB/CM;EyBgDN,2BAA2B;EAC3B,SAAS;A3B22Hf;;A2Bz2HA;EACE,oBAAoB;EACpB,aAAa;EACb,eAAe;EACf,2BAA2B;EAC3B,gBAAgB;EAChB,kBAAkB;A3B42HpB;;A2Bl3HA;EASM,yBhBpB2B;EgBqB3B,czB5HwB;AFy+H9B;;A2Bv3HA;EAYM,qBhBvB2B;AXs4HjC;;A2B33HA;EAeM,yBhB1B2B;EgB2B3B,czBlIwB;AFk/H9B;;A2Bh4HA;EAkBM,qBhB7B2B;AX+4HjC;;A2Bh3HA;EACE,YAAY;EACZ,OAAO;EACP,UAAU;EACV,aAAa;EACb,kBAAkB;EAClB,MAAM;EACN,WAAW;A3Bm3Hb;;A2Bj3HA;;EAGE,qBzB9I4B;EyB+I5B,kBzBpFU;EyBqFV,cAAc;EACd,iBAAiB;EACjB,kBAAkB;EAClB,mBAAmB;A3Bm3HrB;;A2Bj3HA;EACE,4BzBnJ4B;EyBoJ5B,czB1J4B;AF8gI9B;;A2Bl3HA;EACE,qBzB1J4B;EyB2J5B,mBA5J4B;EA6J5B,2BA5JoC;EA6JpC,cAAc;EACd,eA7JwB;EA8JxB,gBAAgB;EAChB,mBAAmB;EACnB,uBAAuB;A3Bq3HzB;;A2Bn3HA;EACE,mBAAmB;EACnB,aAAa;EACb,WAAW;EACX,uBAAuB;E1BjCrB,mB0BkCmC;EACrC,UAAU;A3Bs3HZ;;A2B53HA;EAQI,eAAe;A3Bw3HnB;;A4BtiIA;EACE,c1BF4B;E0BG5B,cAAc;EACd,e1B2BW;E0B1BX,gB1BiCe;AFwgIjB;;A4B7iIA;EAMI,oBAAoB;A5B2iIxB;;A4BjjIA;EASI,kB1BsBY;AFshIhB;;A4BrjIA;EAWI,kB1BkBY;AF4hIhB;;A4BzjIA;EAaI,iB1BeW;AFiiIf;;A4B9iIA;EACE,cAAc;EACd,kB1Bcc;E0Bbd,mBAAmB;A5BijIrB;;A4BpjIA;EAOM,Y1BdyB;AF+jI/B;;A4BxjIA;EAOM,c1B3BuB;AFglI7B;;A4B5jIA;EAOM,iB1BhBwB;AFykI9B;;A4BhkIA;EAOM,c1BvBwB;AFolI9B;;A4BpkIA;EAOM,c1BT4B;AF0kIlC;;A4BxkIA;EAOM,c1BP4B;AF4kIlC;;A4B5kIA;EAOM,c1BR4B;AFilIlC;;A4BhlIA;EAOM,c1BV4B;AFulIlC;;A4BplIA;EAOM,c1BX4B;AF4lIlC;;A4BxlIA;EAOM,c1BL2B;AF0lIjC;;A4BjlIA;EAEI,sBAAsB;A5BmlI1B;;A4BrlIA;EAKI,aAAa;EACb,2BAA2B;A5BolI/B;;A4B1lIA;E3B+GI,kB2BtGwC;A5BqlI5C;;A4B9lIA;;;EAcU,gBAAgB;A5BslI1B;;A4BpmIA;;;EAoBY,6BAA6B;EAC7B,0BAA0B;A5BslItC;;A4B3mIA;;;EA8BY,4BAA4B;EAC5B,yBAAyB;A5BmlIrC;;A4BlnIA;;;;;EAyCY,UAAU;A5BilItB;;A4B1nIA;;;;;;;;;EA8CY,UAAU;A5BwlItB;;A4BtoIA;;;;;;;;;EAgDc,UAAU;A5BkmIxB;;A4BlpIA;EAkDQ,YAAY;EACZ,cAAc;A5BomItB;;A4BvpIA;EAqDM,uBAAuB;A5BsmI7B;;A4B3pIA;EAuDM,yBAAyB;A5BwmI/B;;A4B/pIA;EA0DQ,YAAY;EACZ,cAAc;A5BymItB;;A4BpqIA;EA6DI,aAAa;EACb,2BAA2B;A5B2mI/B;;A4BzqIA;EAgEM,cAAc;A5B6mIpB;;A4B7qIA;EAkEQ,gBAAgB;E3B6CpB,qB2B5C2C;A5B+mI/C;;A4BlrIA;EAqEQ,YAAY;EACZ,cAAc;A5BinItB;;A4BvrIA;EAwEM,uBAAuB;A5BmnI7B;;A4B3rIA;EA0EM,yBAAyB;A5BqnI/B;;A4B/rIA;EA4EM,eAAe;A5BunIrB;;A4BnsIA;EAgFU,sBAAsB;A5BunIhC;;A4BvsIA;EAkFQ,uBAAuB;A5BynI/B;;A4B3sIA;EAoFQ,gBAAgB;A5B2nIxB;;AC3pIE;E2BpDF;IAuFM,aAAa;E5B6nIjB;AACF;;A4B5nIA;EAEI,kBAAkB;A5B8nItB;;ACzqIE;E2ByCF;IAII,qBAAqB;E5BioIvB;AACF;;AC3qIE;E2BqCF;IAMI,aAAa;IACb,YAAY;IACZ,cAAc;I3Bcd,oB2BbsC;IACtC,iBAAiB;E5BqoInB;E4B/oIF;IAYM,kB1BhGU;I0BiGV,oBAAoB;E5BsoIxB;E4BnpIF;IAeM,oBAAoB;E5BuoIxB;E4BtpIF;IAiBM,kB1BvGU;I0BwGV,oBAAoB;E5BwoIxB;E4B1pIF;IAoBM,iB1B3GS;I0B4GT,oBAAoB;E5ByoIxB;AACF;;A4BxoIA;EAEI,gBAAgB;A5B0oIpB;;ACxsIE;E2B4DF;IAII,aAAa;IACb,aAAa;IACb,YAAY;IACZ,cAAc;E5B6oIhB;E4BppIF;IASM,gBAAgB;E5B8oIpB;E4BvpIF;IAWM,cAAc;E5B+oIlB;E4B1pIF;IAaQ,YAAY;E5BgpIlB;E4B7pIF;I3BDI,qB2BgB2C;E5BipI7C;AACF;;A4BhpIA;EACE,sBAAsB;EACtB,WAAW;EACX,e1BhIW;E0BiIX,kBAAkB;EAClB,mBAAmB;A5BmpIrB;;A4BxpIA;;;EAaU,c1BxKoB;AFyzI9B;;A4B9pIA;;;EAeQ,kB1B3IQ;AFgyIhB;;A4BpqIA;;;EAiBQ,kB1B/IQ;AFwyIhB;;A4B1qIA;;;EAmBQ,iB1BlJO;AF+yIf;;A4BhrIA;EAqBM,c1B7KwB;E0B8KxB,azBnLgB;EyBoLhB,oBAAoB;EACpB,kBAAkB;EAClB,MAAM;EACN,YzBvLgB;EyBwLhB,UAAU;A5B+pIhB;;A4B1rIA;;EA+BM,mBzB5LgB;AH41ItB;;A4B/rIA;EAiCM,OAAO;A5BkqIb;;A4BnsIA;;EAqCM,oBzBlMgB;AHq2ItB;;A4BxsIA;EAuCM,QAAQ;A5BqqId;;A4B5sIA;EA2CM,6BAA6B;E3BrD/B,c2BsD+B;EAC7B,YAAY;EACZ,UAAU;A5BqqIhB;;A4BntIA;EAgDM,kB1B5KU;AFm1IhB;;A4BvtIA;EAkDM,kB1BhLU;AFy1IhB;;A4B3tIA;EAoDM,iB1BnLS;AF81If;;A6Bj4IA,qBAAA;ACSA;EAGE,e5ByBW;E4BxBX,mBAAmB;A9B03IrB;;A8B93IA;EAMI,mBAAmB;EACnB,c5BM8B;E4BL9B,aAAa;EACb,uBAAuB;EACvB,iBAduC;A9B04I3C;;A8Bt4IA;EAYM,c5BfwB;AF64I9B;;A8B14IA;EAcI,mBAAmB;EACnB,aAAa;A9Bg4IjB;;A8B/4IA;E7BuII,e6BtHoC;A9Bk4IxC;;A8Bn5IA;EAoBQ,c5BvBsB;E4BwBtB,eAAe;EACf,oBAAoB;A9Bm4I5B;;A8Bz5IA;EAwBM,c5BxBwB;E4ByBxB,iBAAiB;A9Bq4IvB;;A8B95IA;;EA4BI,uBAAuB;EACvB,aAAa;EACb,eAAe;EACf,2BAA2B;A9Bu4I/B;;A8Bt6IA;E7BuII,mB6BrGuC;A9Bw4I3C;;A8B16IA;E7BuII,kB6BnGuC;A9B04I3C;;A8B96IA;;EAyCM,uBAAuB;A9B04I7B;;A8Bn7IA;;EA6CM,yBAAyB;A9B24I/B;;A8Bx7IA;EAgDI,kB5BnBY;AF+5IhB;;A8B57IA;EAkDI,kB5BvBY;AFq6IhB;;A8Bh8IA;EAoDI,iB5B1BW;AF06If;;A8Bp8IA;EAwDM,iBAAiB;A9Bg5IvB;;A8Bx8IA;EA2DM,iBAAiB;A9Bi5IvB;;A8B58IA;EA8DM,iBAAiB;A9Bk5IvB;;A8Bh9IA;EAiEM,iBAAiB;A9Bm5IvB;;A+Bx8IA;EACE,uB7BP6B;E6BQ7B,sBApBmB;EAqBnB,0F7BtB2B;E6BuB3B,c7BlB4B;E6BmB5B,eAAe;EACf,gBAvBoB;EAwBpB,kBAAkB;A/B28IpB;;A+Bz8IA;EACE,6BAzBwC;EA0BxC,oBAAoB;EACpB,kD7B/B2B;E6BgC3B,aAAa;A/B48If;;A+B18IA;EACE,mBAAmB;EACnB,c7BhC4B;E6BiC5B,aAAa;EACb,YAAY;EACZ,gB7BGe;E6BFf,qBAlCgC;A/B++IlC;;A+Bn9IA;EAQI,uBAAuB;A/B+8I3B;;A+B78IA;EACE,mBAAmB;EACnB,eAAe;EACf,aAAa;EACb,uBAAuB;EACvB,qBA3CgC;A/B2/IlC;;A+B98IA;EACE,cAAc;EACd,kBAAkB;A/Bi9IpB;;A+B/8IA;EACE,6BA9CyC;EA+CzC,eA9C2B;A/BggJ7B;;A+Bh9IA;EACE,6BA/CwC;EAgDxC,6B7BpD6B;E6BqD7B,oBAAoB;EACpB,aAAa;A/Bm9If;;A+Bj9IA;EACE,mBAAmB;EACnB,aAAa;EACb,aAAa;EACb,YAAY;EACZ,cAAc;EACd,uBAAuB;EACvB,gBAzD2B;A/B6gJ7B;;A+B39IA;E9B6EI,+BCrI2B;AFuhJ/B;;A+Bl9IA;EAEI,qB7BlCkB;AFs/ItB;;AgCnhJA;EACE,oBAAoB;EACpB,kBAAkB;EAClB,mBAAmB;AhCshJrB;;AgCzhJA;EAOM,cAAc;AhCshJpB;;AgC7hJA;EAUM,UAAU;EACV,QAAQ;AhCuhJd;;AgCliJA;EAcM,YAAY;EACZ,mBA9BuB;EA+BvB,oBAAoB;EACpB,SAAS;AhCwhJf;;AgCthJA;EACE,aAAa;E/BiHX,O+BhHqB;EACvB,gBAzC6B;EA0C7B,gBAtC2B;EAuC3B,kBAAkB;EAClB,SAAS;EACT,WApCqB;AhC6jJvB;;AgCvhJA;EACE,uB9BjC6B;E8BkC7B,kB9BoBU;E8BnBV,0F9BhD2B;E8BiD3B,sBA9CsC;EA+CtC,mBA9CmC;AhCwkJrC;;AgB5jJgB;EgBqCd,c9BhD4B;E8BiD5B,cAAc;EACd,mBAAmB;EACnB,gBAAgB;EAChB,sBAAsB;EACtB,kBAAkB;AhC2hJpB;;AgCzhJA;;E/BkFI,mB+BhFmC;EACrC,mBAAmB;EACnB,mBAAmB;EACnB,WAAW;AhC4hJb;;AgCjiJA;;EAOI,4B9BxD0B;E8ByD1B,c9BpEyB;AFmmJ7B;;AgCviJA;;EAUI,yB9BlD8B;E8BmD9B,WrBSY;AXyhJhB;;AgChiJA;EACE,yB9BjE6B;E8BkE7B,YAAY;EACZ,cAAc;EACd,WAAW;EACX,gBAAgB;AhCmiJlB;;AiCjnJA;EAEE,mBAAmB;EACnB,8BAA8B;AjCmnJhC;;AiCtnJA;EAKI,kB/B8DQ;AFujJZ;;AiC1nJA;EAOI,qBAAqB;EACrB,mBAAmB;AjCunJvB;;AiC/nJA;EAWI,aAAa;AjCwnJjB;;AiCnoJA;;EAcM,aAAa;AjC0nJnB;;AiCxoJA;EAgBM,aAAa;AjC4nJnB;;AiC5oJA;EAmBQ,gBAAgB;EhC2HpB,qBgChJqC;AjCmpJzC;;AiCjpJA;EAsBQ,YAAY;AjC+nJpB;;AClkJE;EgCnFF;IAyBI,aAAa;EjCioJf;EiC1pJF;IA4BQ,YAAY;EjCioJlB;AACF;;AiChoJA;EACE,mBAAmB;EACnB,aAAa;EACb,gBAAgB;EAChB,YAAY;EACZ,cAAc;EACd,uBAAuB;AjCmoJzB;;AiCzoJA;;EASI,gBAAgB;AjCqoJpB;;AC7lJE;EgCjDF;IAaM,sBA7CmC;EjCmrJvC;AACF;;AiCroJA;;EAEE,gBAAgB;EAChB,YAAY;EACZ,cAAc;AjCwoJhB;;AiC5oJA;;EAQM,YAAY;AjCyoJlB;;AC3mJE;EgCtCF;;IhCiGI,qBgChJqC;EjCssJvC;AACF;;AiC1oJA;EACE,mBAAmB;EACnB,2BAA2B;AjC6oJ7B;;AC3nJE;EgCpBF;IAMM,kBAAkB;EjC8oJtB;AACF;;AC7nJE;EgCxBF;IAQI,aAAa;EjCkpJf;AACF;;AiCjpJA;EACE,mBAAmB;EACnB,yBAAyB;AjCopJ3B;;ACxoJE;EgCdF;IAKI,aAAa;EjCspJf;AACF;;AkC/tJA;EACE,uBAAuB;EACvB,aAAa;EACb,mBAAmB;AlCkuJrB;;AkCruJA;EAKI,sBAAsB;AlCouJ1B;;AkCzuJA;EAOI,8ChCD0B;EgCE1B,aAAa;EACb,oBAAoB;AlCsuJxB;;AkC/uJA;;EAYM,qBAAqB;AlCwuJ3B;;AkCpvJA;EAcM,mBAAmB;AlC0uJzB;;AkCxvJA;EAgBQ,kBAAkB;AlC4uJ1B;;AkC5vJA;EAkBI,8ChCZ0B;EgCa1B,gBAtBgB;EAuBhB,iBAvBgB;AlCqwJpB;;AkClwJA;EAwBM,kBA1BsB;EA2BtB,mBA3BsB;AlCywJ5B;;AkC5uJA;;EAEE,gBAAgB;EAChB,YAAY;EACZ,cAAc;AlC+uJhB;;AkC7uJA;EjC2GI,kBiC/IgB;AlCqxJpB;;AkC9uJA;EjCwGI,iBiC/IgB;AlCyxJpB;;AkC/uJA;EACE,gBAAgB;EAChB,YAAY;EACZ,cAAc;EACd,mBAAmB;AlCkvJrB;;AChtJE;EiCtCF;IAQI,gBAAgB;ElCmvJlB;AACF;;AmCrxJA;EACE,ejCkBW;AFswJb;;AmCzxJA;EAII,kBjCgBY;AFywJhB;;AmC7xJA;EAMI,kBjCYY;AF+wJhB;;AmCjyJA;EAQI,iBjCSW;AFoxJf;;AmC3xJA;EACE,iBArB0B;AnCmzJ5B;;AmC/xJA;EAGI,kBjCqCc;EiCpCd,cjCzB0B;EiC0B1B,cAAc;EACd,qBAzBiC;AnCyzJrC;;AmCtyJA;EAQM,4BjCvBwB;EiCwBxB,cjC/BwB;AFi0J9B;;AmC3yJA;EAYM,yBjClB4B;EiCmB5B,WxByCU;AX0vJhB;;AmChzJA;ElCoHI,8BCtI0B;EiCmCxB,cAnC0B;ElCsI5B,oBkCrIkC;AnCu0JtC;;AmClyJA;EACE,cjCzC4B;EiC0C5B,iBApC2B;EAqC3B,qBApC+B;EAqC/B,yBAAyB;AnCqyJ3B;;AmCzyJA;EAMI,eAtCoB;AnC60JxB;;AmC7yJA;EAQI,kBAxCoB;AnCi1JxB;;AoC50JA;EAEE,4BlCV4B;EkCW5B,kBlC6CU;EkC5CV,elCYW;AFk0Jb;;AoCl1JA;EAMI,mBAAmB;ApCg1JvB;;AoCt1JA;EAQI,mBAAmB;EACnB,0BAA0B;ApCk1J9B;;AoC31JA;EAYI,kBlCKY;AF80JhB;;AoC/1JA;EAcI,kBlCCY;AFo1JhB;;AoCn2JA;EAgBI,iBlCFW;AFy1Jf;;AoCv2JA;EAsCM,uBAH+C;ApCw0JrD;;AoC32JA;EAwCQ,uBlC9CuB;EkC+CvB,clC5DqB;AFm4J7B;;AoCh3JA;EA2CQ,mBlCjDuB;AF03J/B;;AoCp3JA;EAsCM,yBAH+C;ApCq1JrD;;AoCx3JA;EAwCQ,yBlC3DqB;EkC4DrB,YlC/CuB;AFm4J/B;;AoC73JA;EA2CQ,qBlC9DqB;AFo5J7B;;AoCj4JA;EAsCM,yBAH+C;ApCk2JrD;;AoCr4JA;EAwCQ,4BlChDsB;EkCiDtB,yBzBkBa;AX+0JrB;;AoC14JA;EA2CQ,wBlCnDsB;AFs5J9B;;AoC94JA;EAsCM,yBAH+C;ApC+2JrD;;AoCl5JA;EAwCQ,yBlCvDsB;EkCwDtB,WzBoBQ;AX01JhB;;AoCv5JA;EA2CQ,qBlC1DsB;AF06J9B;;AoC35JA;EAsCM,yBzB8B0C;AX21JhD;;AoC/5JA;EAwCQ,yBlCzC0B;EkC0C1B,WzBoBQ;AXu2JhB;;AoCp6JA;EA2CQ,qBlC5C0B;EkC6C1B,czBiC6D;AX41JrE;;AoCz6JA;EAsCM,yBzB8B0C;AXy2JhD;;AoC76JA;EAwCQ,yBlCvC0B;EkCwC1B,WzBoBQ;AXq3JhB;;AoCl7JA;EA2CQ,qBlC1C0B;EkC2C1B,czBiC6D;AX02JrE;;AoCv7JA;EAsCM,yBzB8B0C;AXu3JhD;;AoC37JA;EAwCQ,yBlCxC0B;EkCyC1B,WzBoBQ;AXm4JhB;;AoCh8JA;EA2CQ,qBlC3C0B;EkC4C1B,czBiC6D;AXw3JrE;;AoCr8JA;EAsCM,yBzB8B0C;AXq4JhD;;AoCz8JA;EAwCQ,yBlC1C0B;EkC2C1B,WzBoBQ;AXi5JhB;;AoC98JA;EA2CQ,qBlC7C0B;EkC8C1B,czBiC6D;AXs4JrE;;AoCn9JA;EAsCM,yBzB8B0C;AXm5JhD;;AoCv9JA;EAwCQ,yBlC3C0B;EkC4C1B,yBzBkBa;AXi6JrB;;AoC59JA;EA2CQ,qBlC9C0B;EkC+C1B,czBiC6D;AXo5JrE;;AoCj+JA;EAsCM,yBzB8B0C;AXi6JhD;;AoCr+JA;EAwCQ,yBlCrCyB;EkCsCzB,WzBoBQ;AX66JhB;;AoC1+JA;EA2CQ,qBlCxCyB;EkCyCzB,czBiC6D;AXk6JrE;;AoCj8JA;EACE,mBAAmB;EACnB,yBlC9D4B;EkC+D5B,0BAAgE;EAChE,WzBWc;EyBVd,aAAa;EACb,gBlC7Be;EkC8Bf,8BAA8B;EAC9B,iBAAiB;EACjB,mBAtEiC;EAuEjC,kBAAkB;ApCo8JpB;;AoC98JA;EAYI,YAAY;EACZ,cAAc;EnCgEd,mBmC/DsC;ApCs8J1C;;AoCp9JA;EAgBI,eAjEgC;EAkEhC,yBAAyB;EACzB,0BAA0B;ApCw8J9B;;AoCt8JA;EACE,qBlC9E4B;EkC+E5B,kBlCpBU;EkCqBV,mBAAmB;EACnB,uBAjFmC;EAkFnC,clCrF4B;EkCsF5B,qBAjFiC;ApC0hKnC;;AoC/8JA;;EASI,uBlCjF2B;AF4hK/B;;AoCp9JA;EAWI,6BAlFgD;ApC+hKpD;;AqC/gKA;EAEE,mBAAmB;EACnB,aAAa;EACb,sBAAsB;EACtB,uBAAuB;EACvB,gBAAgB;EAChB,eAAe;EACf,WAxCU;ArCyjKZ;;AqCzhKA;EAWI,aAAa;ArCkhKjB;;AqChhKA;EAEE,wCnC7C2B;AF+jK7B;;AqChhKA;;EAEE,cA9CgC;EA+ChC,+BAA0D;EAC1D,cAAc;EACd,kBAAkB;EAClB,WAAW;ArCmhKb;;ACjgKE;EoCxBF;;IASI,cAAc;IACd,8BAA0D;IAC1D,YAxDuB;ErC8kKzB;AACF;;AqCrhKA;EAEE,gBAAgB;EAChB,YAxD2B;EAyD3B,eAAe;EpCsFb,WoC9IoB;EA0DtB,SAzDoB;EA0DpB,WA5D2B;ArCmlK7B;;AqCrhKA;EACE,aAAa;EACb,sBAAsB;EACtB,8BAAgD;EAChD,gBAAgB;EAChB,uBAAuB;ArCwhKzB;;AqCthKA;;EAEE,mBAAmB;EACnB,4BnCpE4B;EmCqE5B,aAAa;EACb,cAAc;EACd,2BAA2B;EAC3B,aApE4B;EAqE5B,kBAAkB;ArCyhKpB;;AqCvhKA;EACE,gCnC/E4B;EmCgF5B,2BnCpBgB;EmCqBhB,4BnCrBgB;AF+iKlB;;AqCxhKA;EACE,cnCxF4B;EmCyF5B,YAAY;EACZ,cAAc;EACd,iBnC9Da;EmC+Db,cA7E8B;ArCwmKhC;;AqCzhKA;EACE,8BnC/BgB;EmCgChB,+BnChCgB;EmCiChB,6BnC7F4B;AFynK9B;;AqC/hKA;EpC4CI,mBoCtCuC;ArC6hK3C;;AqC3hKA;EpC9CE,iCAAiC;EoCgDjC,uBnC/F6B;EmCgG7B,YAAY;EACZ,cAAc;EACd,cAAc;EACd,aAtF4B;ArConK9B;;AsCxlKA;EACE,uBpC1C6B;EoC2C7B,mBAvDqB;EAwDrB,kBAAkB;EAClB,WAtDW;AtCipKb;;AsC/lKA;EASM,uBpClDyB;EoCmDzB,cpChEuB;AF0pK7B;;AsCpmKA;;EAcU,cpCpEmB;AF+pK7B;;AsCzmKA;;;;EAoBY,yB3BiCqB;E2BhCrB,cpC3EiB;AFuqK7B;;AsCjnKA;EAwBY,qBpC9EiB;AF2qK7B;;AsCrnKA;EA0BQ,cpChFqB;AF+qK7B;;ACxmKE;EqCjBF;;;;IAgCY,cpCtFiB;EFurK3B;EsCjoKF;;;;;;;;;;IAsCc,yB3BemB;I2BdnB,cpC7Fe;EFosK3B;EsC9oKF;;IA0Cc,qBpChGe;EFwsK3B;EsClpKF;;;IA8CU,yB3BOuB;I2BNvB,cpCrGmB;EF8sK3B;EsCxpKF;IAmDc,uBpC5FiB;IoC6FjB,cpC1Ge;EFktK3B;AACF;;AsC7pKA;EASM,yBpC/DuB;EoCgEvB,YpCnDyB;AF2sK/B;;AsClqKA;;EAcU,YpCvDqB;AFgtK/B;;AsCvqKA;;;;EAoBY,uB3BiCqB;E2BhCrB,YpC9DmB;AFwtK/B;;AsC/qKA;EAwBY,mBpCjEmB;AF4tK/B;;AsCnrKA;EA0BQ,YpCnEuB;AFguK/B;;ACtqKE;EqCjBF;;;;IAgCY,YpCzEmB;EFwuK7B;EsC/rKF;;;;;;;;;;IAsCc,uB3BemB;I2BdnB,YpChFiB;EFqvK7B;EsC5sKF;;IA0Cc,mBpCnFiB;EFyvK7B;EsChtKF;;;IA8CU,uB3BOuB;I2BNvB,YpCxFqB;EF+vK7B;EsCttKF;IAmDc,yBpCzGe;IoC0Gf,YpC7FiB;EFmwK7B;AACF;;AsC3tKA;EASM,4BpCpDwB;EoCqDxB,yB3Bce;AXwsKrB;;AsChuKA;;EAcU,yB3BUW;AX6sKrB;;AsCruKA;;;;EAoBY,yB3BiCqB;E2BhCrB,yB3BGS;AXqtKrB;;AsC7uKA;EAwBY,gC3BAS;AXytKrB;;AsCjvKA;EA0BQ,yB3BFa;AX6tKrB;;ACpuKE;EqCjBF;;;;IAgCY,yB3BRS;EXquKnB;EsC7vKF;;;;;;;;;;IAsCc,yB3BemB;I2BdnB,yB3BfO;EXkvKnB;EsC1wKF;;IA0Cc,gC3BlBO;EXsvKnB;EsC9wKF;;;IA8CU,yB3BOuB;I2BNvB,yB3BvBW;EX4vKnB;EsCpxKF;IAmDc,4BpC9FgB;IoC+FhB,yB3B5BO;EXgwKnB;AACF;;AsCzxKA;EASM,yBpC3DwB;EoC4DxB,W3BgBU;AXowKhB;;AsC9xKA;;EAcU,W3BYM;AXywKhB;;AsCnyKA;;;;EAoBY,yB3BiCqB;E2BhCrB,W3BKI;AXixKhB;;AsC3yKA;EAwBY,kB3BEI;AXqxKhB;;AsC/yKA;EA0BQ,W3BAQ;AXyxKhB;;AClyKE;EqCjBF;;;;IAgCY,W3BNI;EXiyKd;EsC3zKF;;;;;;;;;;IAsCc,yB3BemB;I2BdnB,W3BbE;EX8yKd;EsCx0KF;;IA0Cc,kB3BhBE;EXkzKd;EsC50KF;;;IA8CU,yB3BOuB;I2BNvB,W3BrBM;EXwzKd;EsCl1KF;IAmDc,yBpCrGgB;IoCsGhB,W3B1BE;EX4zKd;AACF;;AsCv1KA;EASM,yBpC7C4B;EoC8C5B,W3BgBU;AXk0KhB;;AsC51KA;;EAcU,W3BYM;AXu0KhB;;AsCj2KA;;;;EAoBY,yB3BiCqB;E2BhCrB,W3BKI;AX+0KhB;;AsCz2KA;EAwBY,kB3BEI;AXm1KhB;;AsC72KA;EA0BQ,W3BAQ;AXu1KhB;;ACh2KE;EqCjBF;;;;IAgCY,W3BNI;EX+1Kd;EsCz3KF;;;;;;;;;;IAsCc,yB3BemB;I2BdnB,W3BbE;EX42Kd;EsCt4KF;;IA0Cc,kB3BhBE;EXg3Kd;EsC14KF;;;IA8CU,yB3BOuB;I2BNvB,W3BrBM;EXs3Kd;EsCh5KF;IAmDc,yBpCvFoB;IoCwFpB,W3B1BE;EX03Kd;AACF;;AsCr5KA;EASM,yBpC3C4B;EoC4C5B,W3BgBU;AXg4KhB;;AsC15KA;;EAcU,W3BYM;AXq4KhB;;AsC/5KA;;;;EAoBY,yB3BiCqB;E2BhCrB,W3BKI;AX64KhB;;AsCv6KA;EAwBY,kB3BEI;AXi5KhB;;AsC36KA;EA0BQ,W3BAQ;AXq5KhB;;AC95KE;EqCjBF;;;;IAgCY,W3BNI;EX65Kd;EsCv7KF;;;;;;;;;;IAsCc,yB3BemB;I2BdnB,W3BbE;EX06Kd;EsCp8KF;;IA0Cc,kB3BhBE;EX86Kd;EsCx8KF;;;IA8CU,yB3BOuB;I2BNvB,W3BrBM;EXo7Kd;EsC98KF;IAmDc,yBpCrFoB;IoCsFpB,W3B1BE;EXw7Kd;AACF;;AsCn9KA;EASM,yBpC5C4B;EoC6C5B,W3BgBU;AX87KhB;;AsCx9KA;;EAcU,W3BYM;AXm8KhB;;AsC79KA;;;;EAoBY,yB3BiCqB;E2BhCrB,W3BKI;AX28KhB;;AsCr+KA;EAwBY,kB3BEI;AX+8KhB;;AsCz+KA;EA0BQ,W3BAQ;AXm9KhB;;AC59KE;EqCjBF;;;;IAgCY,W3BNI;EX29Kd;EsCr/KF;;;;;;;;;;IAsCc,yB3BemB;I2BdnB,W3BbE;EXw+Kd;EsClgLF;;IA0Cc,kB3BhBE;EX4+Kd;EsCtgLF;;;IA8CU,yB3BOuB;I2BNvB,W3BrBM;EXk/Kd;EsC5gLF;IAmDc,yBpCtFoB;IoCuFpB,W3B1BE;EXs/Kd;AACF;;AsCjhLA;EASM,yBpC9C4B;EoC+C5B,W3BgBU;AX4/KhB;;AsCthLA;;EAcU,W3BYM;AXigLhB;;AsC3hLA;;;;EAoBY,yB3BiCqB;E2BhCrB,W3BKI;AXygLhB;;AsCniLA;EAwBY,kB3BEI;AX6gLhB;;AsCviLA;EA0BQ,W3BAQ;AXihLhB;;AC1hLE;EqCjBF;;;;IAgCY,W3BNI;EXyhLd;EsCnjLF;;;;;;;;;;IAsCc,yB3BemB;I2BdnB,W3BbE;EXsiLd;EsChkLF;;IA0Cc,kB3BhBE;EX0iLd;EsCpkLF;;;IA8CU,yB3BOuB;I2BNvB,W3BrBM;EXgjLd;EsC1kLF;IAmDc,yBpCxFoB;IoCyFpB,W3B1BE;EXojLd;AACF;;AsC/kLA;EASM,yBpC/C4B;EoCgD5B,yB3Bce;AX4jLrB;;AsCplLA;;EAcU,yB3BUW;AXikLrB;;AsCzlLA;;;;EAoBY,yB3BiCqB;E2BhCrB,yB3BGS;AXykLrB;;AsCjmLA;EAwBY,gC3BAS;AX6kLrB;;AsCrmLA;EA0BQ,yB3BFa;AXilLrB;;ACxlLE;EqCjBF;;;;IAgCY,yB3BRS;EXylLnB;EsCjnLF;;;;;;;;;;IAsCc,yB3BemB;I2BdnB,yB3BfO;EXsmLnB;EsC9nLF;;IA0Cc,gC3BlBO;EX0mLnB;EsCloLF;;;IA8CU,yB3BOuB;I2BNvB,yB3BvBW;EXgnLnB;EsCxoLF;IAmDc,yBpCzFoB;IoC0FpB,yB3B5BO;EXonLnB;AACF;;AsC7oLA;EASM,yBpCzC2B;EoC0C3B,W3BgBU;AXwnLhB;;AsClpLA;;EAcU,W3BYM;AX6nLhB;;AsCvpLA;;;;EAoBY,yB3BiCqB;E2BhCrB,W3BKI;AXqoLhB;;AsC/pLA;EAwBY,kB3BEI;AXyoLhB;;AsCnqLA;EA0BQ,W3BAQ;AX6oLhB;;ACtpLE;EqCjBF;;;;IAgCY,W3BNI;EXqpLd;EsC/qLF;;;;;;;;;;IAsCc,yB3BemB;I2BdnB,W3BbE;EXkqLd;EsC5rLF;;IA0Cc,kB3BhBE;EXsqLd;EsChsLF;;;IA8CU,yB3BOuB;I2BNvB,W3BrBM;EX4qLd;EsCtsLF;IAmDc,yBpCnFmB;IoCoFnB,W3B1BE;EXgrLd;AACF;;AsC3sLA;EAsDI,oBAAoB;EACpB,aAAa;EACb,mBA7GmB;EA8GnB,WAAW;AtCypLf;;AsCltLA;EA2DI,gCpCtG0B;AFiwL9B;;AsCttLA;EALE,OAAO;EACP,eAAe;EACf,QAAQ;EACR,WA/CiB;AtC8wLnB;;AsC7tLA;EAgEI,SAAS;AtCiqLb;;AsCjuLA;EAkEM,iCpC7GwB;AFgxL9B;;AsCruLA;EAoEI,MAAM;AtCqqLV;;AsCnqLA;;EAGI,oBA9HmB;AtCmyLvB;;AsCxqLA;;EAKI,uBAhImB;AtCwyLvB;;AsCtqLA;;EAEE,oBAAoB;EACpB,aAAa;EACb,cAAc;EACd,mBAvIqB;AtCgzLvB;;AsCvqLA;EAIM,6BAA6B;AtCuqLnC;;AsCrqLA;ErCpFE,iCAAiC;EqCsFjC,gBAAgB;EAChB,gBAAgB;EAChB,kBAAkB;AtCwqLpB;;AsCtqLA;EACE,cpClJ4B;EDoB5B,eAAe;EACf,cAAc;EACd,eqC1BqB;ErC2BrB,kBAAkB;EAClB,cqC5BqB;ErC6InB,iBqCWkC;AtC6qLtC;;ACxyLE;EACE,8BAA8B;EAC9B,cAAc;EACd,WAAW;EACX,qBAAqB;EACrB,kBAAkB;EAClB,wBAAwB;EACxB,yBCiCQ;EDhCR,yDAAyD;EACzD,oCC0Ba;EDzBb,WAAW;AD2yLf;;AC1yLI;EACE,oBAAoB;AD6yL1B;;AC5yLI;EACE,oBAAoB;AD+yL1B;;AC9yLI;EACE,oBAAoB;ADizL1B;;AChzLE;EACE,qCAAiC;ADmzLrC;;AC/yLM;EACE,wCAAwC;ADkzLhD;;ACjzLM;EACE,UAAU;ADozLlB;;ACnzLM;EACE,0CAA0C;ADszLlD;;AsCptLA;EACE,aAAa;AtCutLf;;AsCrtLA;;EAEE,cpC3J4B;EoC4J5B,cAAc;EACd,gBAAgB;EAChB,uBAAuB;EACvB,kBAAkB;AtCwtLpB;;AsC9tLA;;EASM,qBAAqB;EACrB,sBAAsB;AtC0tL5B;;AsCxtLA;;EAEE,eAAe;AtC2tLjB;;AsC7tLA;;;;;EAOI,yBpCrK0B;EoCsK1B,cpC9J8B;AF43LlC;;AsC5tLA;EACE,YAAY;EACZ,cAAc;AtC+tLhB;;AsCjuLA;EAII,mBA5KgC;AtC64LpC;;AsCruLA;EAMI,UAAU;AtCmuLd;;AsCzuLA;EAQI,YAAY;EACZ,cAAc;AtCquLlB;;AsC9uLA;EAWI,oCAAoC;EACpC,mBA/LmB;EAgMnB,kCAAkC;AtCuuLtC;;AsCpvLA;EAgBM,6BApLyC;EAqLzC,4BpCjL4B;AFy5LlC;;AsCzvLA;EAmBM,6BApL0C;EAqL1C,4BpCpL4B;EoCqL5B,0BApLuC;EAqLvC,wBApLqC;EAqLrC,cpCvL4B;EoCwL5B,kCAAwE;AtC0uL9E;;AsCxuLA;EACE,YAAY;EACZ,cAAc;AtC2uLhB;;AsCzuLA;ErCpEI,oBqCqEoC;AtC4uLxC;;AsC7uLA;EAII,qBpClM8B;EoCmM9B,oBAAoB;ErCjEpB,cqCkE6B;AtC6uLjC;;AsC3uLA;EACE,mBAAmB;EACnB,sBAAsB;EACtB,mBAAmB;AtC8uLrB;;AsCjvLA;EAKI,oBAAoB;EACpB,qBAAqB;AtCgvLzB;;AsC9uLA;EACE,4BpCxN4B;EoCyN5B,YAAY;EACZ,aAAa;EACb,WA9LyB;EA+LzB,gBAAgB;AtCivLlB;;AC74LE;EqCrBF;IAqLI,cAAc;EtCkvLhB;EsCjvLA;;IAGI,mBAAmB;IACnB,aAAa;EtCkvLjB;EsCjvLA;IAEI,aAAa;EtCkvLjB;EsC10LF;IA0FI,uBpCxO2B;IoCyO3B,4CpCtPyB;IoCuPzB,iBAAiB;EtCmvLnB;EsCtvLA;IAKI,cAAc;EtCovLlB;EsClvLA;IA1MA,OAAO;IACP,eAAe;IACf,QAAQ;IACR,WA/CiB;EtC8+LjB;EsCxvLA;IAKI,SAAS;EtCsvLb;EsC3vLA;IAOM,4CpClQqB;EFy/L3B;EsC9vLA;IASI,MAAM;EtCwvLV;EsCjwLA;IrC/LA,iCAAiC;IqC6M3B,iCAA2C;IAC3C,cAAc;EtCuvLpB;EsCtvLA;;IAGI,oBA7QiB;EtCogMrB;EsC1vLA;;IAKI,uBA/QiB;EtCwgMrB;AACF;;ACn8LE;EqC4MA;;;;IAIE,oBAAoB;IACpB,aAAa;EtC2vLf;EsC79LF;IAoOI,mBAzRmB;EtCqhMrB;EsC7vLA;IAGI,kBAzR0B;EtCshM9B;EsChwLA;;IAMM,mBAAmB;EtC8vLzB;EsCpwLA;;IASM,kBpC/NI;EF89LV;EsCxwLA;;;;IAgBQ,wCAAwC;EtC8vLhD;EsC9wLA;IAuBU,wCAAwC;EtC0vLlD;EsCjxLA;IA4BU,4BpC1SkB;IoC2SlB,cpCtTiB;EF8iM3B;EsCrxLA;IA+BU,4BpC7SkB;IoC8SlB,cpCrSsB;EF8hMhC;EsC55LF;IAqKI,aAAa;EtC0vLf;EsCv5LF;;IAgKI,mBAAmB;IACnB,aAAa;EtC2vLf;EsCt4LF;IA8IM,oBAAoB;EtC2vLxB;EsC7vLA;IAKM,oDAAoD;EtC2vL1D;EsChwLA;IAOM,gCpC/TsB;IoCgUtB,0BAAkE;IAClE,gBAAgB;IAChB,YAAY;IACZ,4CpC3UqB;IoC4UrB,SAAS;EtC4vLf;EsCxwLA;IAkBM,cAAc;EtCyvLpB;EsCxvLM;IAEE,UAAU;IACV,oBAAoB;IACpB,wBAAwB;EtCyvLhC;EsCr7LF;IA8LI,YAAY;IACZ,cAAc;EtC0vLhB;EsCzvLA;IACE,2BAA2B;IrC9M3B,kBqC+MoC;EtC2vLtC;EsC1vLA;IACE,yBAAyB;IrCjNzB,iBqCkNoC;EtC4vLtC;EsCl4LF;IAwII,uBpCrV2B;IoCsV3B,8BpC/Rc;IoCgSd,+BpChSc;IoCiSd,6BpC7V0B;IoC8V1B,2CpCtWyB;IoCuWzB,aAAa;IACb,mBAAmB;IrClNnB,OqCmNuB;IACvB,eAAe;IACf,kBAAkB;IAClB,SAAS;IACT,WAhVkB;EtC6kMpB;EsCh5LF;IAqJM,sBAAsB;IACtB,mBAAmB;EtC8vLvB;EsC7wLA;IrCnNE,mBqCoOuC;EtC+vLzC;EsChxLA;IAoBM,4BpC1WsB;IoC2WtB,cpCtXqB;EFqnM3B;EsCpxLA;IAuBM,4BpC7WsB;IoC8WtB,cpCrW0B;EFqmMhC;EsC/vLE;IAEE,kBpCxTY;IoCyTZ,gBAAgB;IAChB,4EpC9XuB;IoC+XvB,cAAc;IACd,UAAU;IACV,oBAAoB;IACpB,wBAA8C;IAC9C,2BAA2B;IAC3B,yBpC9TM;IoC+TN,uCAAuC;EtCgwL3C;EsCpyLA;IAsCI,UAAU;IACV,QAAQ;EtCiwLZ;EsCv6LF;IAwKI,cAAc;EtCkwLhB;EsCjwLA;;IrC7PE,qBqCgQyC;EtCkwL3C;EsCrwLA;;IrC7PE,sBqCkQyC;EtCowL3C;EsClwLA;IAjWA,OAAO;IACP,eAAe;IACf,QAAQ;IACR,WA/CiB;EtCqpMjB;EsCxwLA;IAKI,SAAS;EtCswLb;EsC3wLA;IAOM,4CpCzZqB;EFgqM3B;EsC9wLA;IASI,MAAM;EtCwwLV;EsCvwLA;;IAGI,oBA9ZiB;EtCsqMrB;EsC3wLA;;IAKI,uBAhaiB;EtC0qMrB;EsC/wLA;;IAOI,oBAA4D;EtC4wLhE;EsCnxLA;;IASI,uBAA+D;EtC8wLnE;EsC5wLA;;IAGI,cpC1auB;EFurM3B;EsChxLA;;IAKI,6BAja2C;EtCgrM/C;EsC9wLA;IAKM,yBpCtasB;EFkrM5B;AACF;;AsCzwLA;EAEI,iCAA2C;AtC2wL/C;;AuCtqMA;EAEE,erCIW;EqCHX,gBAhC0B;AvCwsM5B;;AuC3qMA;EAMI,kBrCCY;AFwqMhB;;AuC/qMA;EAQI,kBrCHY;AF8qMhB;;AuCnrMA;EAUI,iBrCNW;AFmrMf;;AuCvrMA;;EAcM,iBAAiB;EACjB,kBAAkB;EAClB,uBrCwBmB;AFspMzB;;AuC9rMA;EAkBM,uBrCsBmB;AF0pMzB;;AuC9qMA;;EAEE,mBAAmB;EACnB,aAAa;EACb,uBAAuB;EACvB,kBAAkB;AvCirMpB;;AuC/qMA;;;;EAME,cA3D6B;EA4D7B,uBAAuB;EACvB,eA5D8B;EA6D9B,mBA5DkC;EA6DlC,oBA5DmC;EA6DnC,kBAAkB;AvCgrMpB;;AuC9qMA;;;EAGE,qBrChE4B;EqCiE5B,crCrE4B;EqCsE5B,gBpCvEoB;AHwvMtB;;AuCtrMA;;;EAOI,qBrCrE0B;EqCsE1B,crCzE0B;AF8vM9B;;AuC7rMA;;;EAUI,qBrC3D8B;AFovMlC;;AuCnsMA;;;EAYI,iDrCjFyB;AF8wM7B;;AuCzsMA;;;EAcI,yBrC3E0B;EqC4E1B,qBrC5E0B;EqC6E1B,gBAAgB;EAChB,crChF0B;EqCiF1B,YAAY;AvCisMhB;;AuC/rMA;;EAEE,oBAAoB;EACpB,qBAAqB;EACrB,mBAAmB;AvCksMrB;;AuChsMA;EAEI,yBrC7E8B;EqC8E9B,qBrC9E8B;EqC+E9B,W5BnBY;AXqtMhB;;AuChsMA;EACE,crC/F4B;EqCgG5B,oBAAoB;AvCmsMtB;;AuCjsMA;EACE,eAAe;AvCosMjB;;AC/tME;EsClDF;IAiFI,eAAe;EvCqsMjB;EuC1tMF;;IAwBI,YAAY;IACZ,cAAc;EvCssMhB;EuCrsMA;IAEI,YAAY;IACZ,cAAc;EvCssMlB;AACF;;AC1uME;EsCsBF;IAiBI,YAAY;IACZ,cAAc;IACd,2BAA2B;IAC3B,QAAQ;EvCwsMV;EuCvsMA;IACE,QAAQ;EvCysMV;EuCxsMA;IACE,QAAQ;EvC0sMV;EuC9yMF;IAsGI,8BAA8B;EvC2sMhC;EuC5sMA;IAIM,QAAQ;EvC2sMd;EuC/sMA;IAMM,uBAAuB;IACvB,QAAQ;EvC4sMd;EuCntMA;IASM,QAAQ;EvC6sMd;EuCttMA;IAYM,QAAQ;EvC6sMd;EuCztMA;IAcM,QAAQ;EvC8sMd;EuC5tMA;IAgBM,yBAAyB;IACzB,QAAQ;EvC+sMd;AACF;;AwCv0MA;EACE,kBtCuCgB;EsCtChB,0FtC9B2B;EsC+B3B,etCIW;AFs0Mb;;AwC70MA;EAKI,qBtCakB;AF+zMtB;;AwCj1MA;EAYQ,uBtC3BuB;EsC4BvB,ctCzCqB;AFk3M7B;;AwCt1MA;EAeQ,0BtC9BuB;AFy2M/B;;AwC11MA;EAiBQ,YtChCuB;AF62M/B;;AwC91MA;EAYQ,yBtCxCqB;EsCyCrB,YtC5BuB;AFk3M/B;;AwCn2MA;EAeQ,4BtC3CqB;AFm4M7B;;AwCv2MA;EAiBQ,ctC7CqB;AFu4M7B;;AwC32MA;EAYQ,4BtC7BsB;EsC8BtB,yB7BqCa;AX8zMrB;;AwCh3MA;EAeQ,+BtChCsB;AFq4M9B;;AwCp3MA;EAiBQ,iBtClCsB;AFy4M9B;;AwCx3MA;EAYQ,yBtCpCsB;EsCqCtB,W7BuCQ;AXy0MhB;;AwC73MA;EAeQ,4BtCvCsB;AFy5M9B;;AwCj4MA;EAiBQ,ctCzCsB;AF65M9B;;AwCr4MA;EAYQ,yBtCtB0B;EsCuB1B,W7BuCQ;AXs1MhB;;AwC14MA;EAeQ,4BtCzB0B;AFw5MlC;;AwC94MA;EAiBQ,ctC3B0B;AF45MlC;;AwCl5MA;EAYQ,yBtCpB0B;EsCqB1B,W7BuCQ;AXm2MhB;;AwCv5MA;EAeQ,4BtCvB0B;AFm6MlC;;AwC35MA;EAiBQ,ctCzB0B;AFu6MlC;;AwC/5MA;EAYQ,yBtCrB0B;EsCsB1B,W7BuCQ;AXg3MhB;;AwCp6MA;EAeQ,4BtCxB0B;AFi7MlC;;AwCx6MA;EAiBQ,ctC1B0B;AFq7MlC;;AwC56MA;EAYQ,yBtCvB0B;EsCwB1B,W7BuCQ;AX63MhB;;AwCj7MA;EAeQ,4BtC1B0B;AFg8MlC;;AwCr7MA;EAiBQ,ctC5B0B;AFo8MlC;;AwCz7MA;EAYQ,yBtCxB0B;EsCyB1B,yB7BqCa;AX44MrB;;AwC97MA;EAeQ,4BtC3B0B;AF88MlC;;AwCl8MA;EAiBQ,ctC7B0B;AFk9MlC;;AwCt8MA;EAYQ,yBtClByB;EsCmBzB,W7BuCQ;AXu5MhB;;AwC38MA;EAeQ,4BtCrByB;AFq9MjC;;AwC/8MA;EAiBQ,ctCvByB;AFy9MjC;;AwCh8MA;;EAGI,gCtCzC2B;AF2+M/B;;AwCh8MA;EACE,yBtC5C6B;EsC6C7B,0BAA8C;EAC9C,ctCnD4B;EsCoD5B,iBAhDyB;EAiDzB,gBtCfe;EsCgBf,iBArD8B;EAsD9B,mBArDgC;AxCw/MlC;;AwCj8MA;EACE,qBAAqB;EACrB,aAAa;EACb,kBArD4B;EAsD5B,uBAAuB;AxCo8MzB;;AwCx8MA;EAMI,gCtC3D0B;EsC4D1B,mBAAmB;EACnB,cAAc;AxCs8MlB;;AwC98MA;EAWM,4BtCnEwB;EsCoExB,ctCrEwB;AF4gN9B;;AwCr8MA;EAEI,ctCxE0B;AF+gN9B;;AwCz8MA;EAIM,ctC3D4B;AFogNlC;;AwCv8MA;EACE,mBAAmB;EACnB,ctC/E4B;EsCgF5B,aAAa;EACb,2BAA2B;EAC3B,qBAAqB;AxC08MvB;;AwC/8MA;EvC6DI,oBuCtDsC;AxC48M1C;;AwCn9MA;EASI,YAAY;EACZ,cAAc;EACd,WAAW;AxC88Mf;;AwCz9MA;EAaI,eAAe;AxCg9MnB;;AwC79MA;EAeI,0BtC5E8B;EsC6E9B,ctC7F0B;AF+iN9B;;AwCl+MA;EAkBM,ctC/E4B;AFmiNlC;;AwCt+MA;EAoBI,8BtCjCc;EsCkCd,+BtClCc;AFw/MlB;;AwCp9MA;;EAEE,eAAe;AxCu9MjB;;AwCz9MA;;EAII,4BtCjG0B;AF2jN9B;;AwCx9MA;EvC9FE,qBAAqB;EACrB,euC8FgB;EvC7FhB,WuC6FqB;EvC5FrB,gBuC4FqB;EvC3FrB,kBAAkB;EAClB,mBAAmB;EACnB,UuCyFqB;EACrB,ctC1G4B;EDwI1B,oBuC7BoC;AxCi+MxC;;AwCp+MA;EAKI,kBAAkB;EAClB,oBAAoB;AxCm+MxB;;AyC7jNA;ExCkCE,iCAAiC;EwC9BjC,oBAAoB;EACpB,aAAa;EACb,evCGW;EuCFX,8BAA8B;EAC9B,gBAAgB;EAChB,gBAAgB;EAChB,mBAAmB;AzC8jNrB;;AyCxkNA;EAYI,mBAAmB;EACnB,4BvC/B0B;EuCgC1B,0BAzC4B;EA0C5B,wBAzC0B;EA0C1B,cvCrC0B;EuCsC1B,aAAa;EACb,uBAAuB;EACvB,mBAA6C;EAC7C,kBAxCyB;EAyCzB,mBAAmB;AzCgkNvB;;AyCrlNA;EAuBM,4BvC7CwB;EuC8CxB,cvC9CwB;AFgnN9B;;AyC1lNA;EA0BI,cAAc;AzCokNlB;;AyC9lNA;EA6BQ,4BvCnC0B;EuCoC1B,cvCpC0B;AFymNlC;;AyCnmNA;EAgCI,mBAAmB;EACnB,4BvCnD0B;EuCoD1B,0BA7D4B;EA8D5B,wBA7D0B;EA8D1B,aAAa;EACb,YAAY;EACZ,cAAc;EACd,2BAA2B;AzCukN/B;;AyC9mNA;EAyCM,qBAAqB;AzCykN3B;;AyClnNA;EA2CM,UAAU;EACV,uBAAuB;EACvB,oBAAoB;EACpB,qBAAqB;AzC2kN3B;;AyCznNA;EAgDM,yBAAyB;EACzB,oBAAoB;AzC6kN1B;;AyC9nNA;ExCoHI,mBwChEuC;AzC8kN3C;;AyCloNA;ExCoHI,kBwC9DuC;AzCglN3C;;AyCtoNA;EA0DM,uBAAuB;AzCglN7B;;AyC1oNA;EA6DM,yBAAyB;AzCilN/B;;AyC9oNA;EAiEM,6BAA6B;EAE3B,0BAAkE;AzCglN1E;;AyCnpNA;EAuEQ,4BvCtFsB;EuCuFtB,4BvC1FsB;AF0qN9B;;AyCxpNA;EA4EU,uBvCzFqB;EuC0FrB,qBvC/FoB;EuCgGpB,2CAA2E;AzCglNrF;;AyC9pNA;EAiFM,YAAY;EACZ,cAAc;AzCilNpB;;AyCnqNA;EAqFM,qBvCvGwB;EuCwGxB,mBA/F+B;EAgG/B,iBA/F6B;EAgG7B,gBAAgB;EAChB,kBAAkB;AzCklNxB;;AyC3qNA;EA2FQ,4BvC1GsB;EuC2GtB,qBvC/GsB;EuCgHtB,UAAU;AzColNlB;;AyCjrNA;ExCoHI,iBwCpBuE;AzCqlN3E;;AyCrrNA;EAmGU,2BvC1DE;EuC2DF,8BvC3DE;AFipNZ;;AyC1rNA;EA0GU,4BvCjEE;EuCkEF,+BvClEE;AFspNZ;;AyC/rNA;EAiHU,yBvCvHwB;EuCwHxB,qBvCxHwB;EuCyHxB,W9B7DM;E8B8DN,UAAU;AzCklNpB;;AyCtsNA;EAsHM,mBAAmB;AzColNzB;;AyC1sNA;EA2HY,mCvChFa;EuCiFb,gCvCjFa;EuCkFb,oBAAoB;AzCmlNhC;;AyChtNA;EAoIY,oCvCzFa;EuC0Fb,iCvC1Fa;EuC2Fb,qBAAqB;AzCglNjC;;AyCttNA;EA6II,kBvCnIY;AFgtNhB;;AyC1tNA;EA+II,kBvCvIY;AFstNhB;;AyC9tNA;EAiJI,iBvC1IW;AF2tNf;;A0C9vNA,eAAA;ACEA;EACE,cAAc;EACd,aAAa;EACb,YAAY;EACZ,cAAc;EACd,gBAPkB;A3CuwNpB;;A2C/vNE;EACE,UAAU;A3CkwNd;;A2CjwNE;EACE,UAAU;EACV,WAAW;A3CowNf;;A2CnwNE;EACE,UAAU;EACV,UAAU;A3CswNd;;A2CrwNE;EACE,UAAU;EACV,eAAe;A3CwwNnB;;A2CvwNE;EACE,UAAU;EACV,UAAU;A3C0wNd;;A2CzwNE;EACE,UAAU;EACV,eAAe;A3C4wNnB;;A2C3wNE;EACE,UAAU;EACV,UAAU;A3C8wNd;;A2C7wNE;EACE,UAAU;EACV,UAAU;A3CgxNd;;A2C/wNE;EACE,UAAU;EACV,UAAU;A3CkxNd;;A2CjxNE;EACE,UAAU;EACV,UAAU;A3CoxNd;;A2CnxNE;EACE,UAAU;EACV,UAAU;A3CsxNd;;A2CrxNE;E1CwGE,gB0CvGmC;A3CwxNvC;;A2CvxNE;E1CsGE,qB0CrGwC;A3C0xN5C;;A2CzxNE;E1CoGE,gB0CnGmC;A3C4xNvC;;A2C3xNE;E1CkGE,qB0CjGwC;A3C8xN5C;;A2C7xNE;E1CgGE,gB0C/FmC;A3CgyNvC;;A2C/xNE;E1C8FE,gB0C7FmC;A3CkyNvC;;A2CjyNE;E1C4FE,gB0C3FmC;A3CoyNvC;;A2CnyNE;E1C0FE,gB0CzFmC;A3CsyNvC;;A2CryNE;E1CwFE,gB0CvFmC;A3CwyNvC;;A2CtyNI;EACE,UAAU;EACV,SAA0B;A3CyyNhC;;A2CxyNI;E1CkFA,e0CjFqD;A3C2yNzD;;A2C/yNI;EACE,UAAU;EACV,eAA0B;A3CkzNhC;;A2CjzNI;E1CkFA,qB0CjFqD;A3CozNzD;;A2CxzNI;EACE,UAAU;EACV,gBAA0B;A3C2zNhC;;A2C1zNI;E1CkFA,sB0CjFqD;A3C6zNzD;;A2Cj0NI;EACE,UAAU;EACV,UAA0B;A3Co0NhC;;A2Cn0NI;E1CkFA,gB0CjFqD;A3Cs0NzD;;A2C10NI;EACE,UAAU;EACV,gBAA0B;A3C60NhC;;A2C50NI;E1CkFA,sB0CjFqD;A3C+0NzD;;A2Cn1NI;EACE,UAAU;EACV,gBAA0B;A3Cs1NhC;;A2Cr1NI;E1CkFA,sB0CjFqD;A3Cw1NzD;;A2C51NI;EACE,UAAU;EACV,UAA0B;A3C+1NhC;;A2C91NI;E1CkFA,gB0CjFqD;A3Ci2NzD;;A2Cr2NI;EACE,UAAU;EACV,gBAA0B;A3Cw2NhC;;A2Cv2NI;E1CkFA,sB0CjFqD;A3C02NzD;;A2C92NI;EACE,UAAU;EACV,gBAA0B;A3Ci3NhC;;A2Ch3NI;E1CkFA,sB0CjFqD;A3Cm3NzD;;A2Cv3NI;EACE,UAAU;EACV,UAA0B;A3C03NhC;;A2Cz3NI;E1CkFA,gB0CjFqD;A3C43NzD;;A2Ch4NI;EACE,UAAU;EACV,gBAA0B;A3Cm4NhC;;A2Cl4NI;E1CkFA,sB0CjFqD;A3Cq4NzD;;A2Cz4NI;EACE,UAAU;EACV,gBAA0B;A3C44NhC;;A2C34NI;E1CkFA,sB0CjFqD;A3C84NzD;;A2Cl5NI;EACE,UAAU;EACV,WAA0B;A3Cq5NhC;;A2Cp5NI;E1CkFA,iB0CjFqD;A3Cu5NzD;;ACr4NE;E0C/EF;IAgEM,UAAU;E3Cy5Nd;E2Cz9NF;IAkEM,UAAU;IACV,WAAW;E3C05Nf;E2C79NF;IAqEM,UAAU;IACV,UAAU;E3C25Nd;E2Cj+NF;IAwEM,UAAU;IACV,eAAe;E3C45NnB;E2Cr+NF;IA2EM,UAAU;IACV,UAAU;E3C65Nd;E2Cz+NF;IA8EM,UAAU;IACV,eAAe;E3C85NnB;E2C7+NF;IAiFM,UAAU;IACV,UAAU;E3C+5Nd;E2Cj/NF;IAoFM,UAAU;IACV,UAAU;E3Cg6Nd;E2Cr/NF;IAuFM,UAAU;IACV,UAAU;E3Ci6Nd;E2Cz/NF;IA0FM,UAAU;IACV,UAAU;E3Ck6Nd;E2C7/NF;IA6FM,UAAU;IACV,UAAU;E3Cm6Nd;E2CjgOF;I1C8II,gB0C9CqC;E3Co6NvC;E2CpgOF;I1C8II,qB0C5C0C;E3Cq6N5C;E2CvgOF;I1C8II,gB0C1CqC;E3Cs6NvC;E2C1gOF;I1C8II,qB0CxC0C;E3Cu6N5C;E2C7gOF;I1C8II,gB0CtCqC;E3Cw6NvC;E2ChhOF;I1C8II,gB0CpCqC;E3Cy6NvC;E2CnhOF;I1C8II,gB0ClCqC;E3C06NvC;E2CthOF;I1C8II,gB0ChCqC;E3C26NvC;E2CzhOF;I1C8II,gB0C9BqC;E3C46NvC;E2C5hOF;IAmHQ,UAAU;IACV,SAA0B;E3C46NhC;E2ChiOF;I1C8II,e0CxBuD;E3C66NzD;E2CniOF;IAmHQ,UAAU;IACV,eAA0B;E3Cm7NhC;E2CviOF;I1C8II,qB0CxBuD;E3Co7NzD;E2C1iOF;IAmHQ,UAAU;IACV,gBAA0B;E3C07NhC;E2C9iOF;I1C8II,sB0CxBuD;E3C27NzD;E2CjjOF;IAmHQ,UAAU;IACV,UAA0B;E3Ci8NhC;E2CrjOF;I1C8II,gB0CxBuD;E3Ck8NzD;E2CxjOF;IAmHQ,UAAU;IACV,gBAA0B;E3Cw8NhC;E2C5jOF;I1C8II,sB0CxBuD;E3Cy8NzD;E2C/jOF;IAmHQ,UAAU;IACV,gBAA0B;E3C+8NhC;E2CnkOF;I1C8II,sB0CxBuD;E3Cg9NzD;E2CtkOF;IAmHQ,UAAU;IACV,UAA0B;E3Cs9NhC;E2C1kOF;I1C8II,gB0CxBuD;E3Cu9NzD;E2C7kOF;IAmHQ,UAAU;IACV,gBAA0B;E3C69NhC;E2CjlOF;I1C8II,sB0CxBuD;E3C89NzD;E2CplOF;IAmHQ,UAAU;IACV,gBAA0B;E3Co+NhC;E2CxlOF;I1C8II,sB0CxBuD;E3Cq+NzD;E2C3lOF;IAmHQ,UAAU;IACV,UAA0B;E3C2+NhC;E2C/lOF;I1C8II,gB0CxBuD;E3C4+NzD;E2ClmOF;IAmHQ,UAAU;IACV,gBAA0B;E3Ck/NhC;E2CtmOF;I1C8II,sB0CxBuD;E3Cm/NzD;E2CzmOF;IAmHQ,UAAU;IACV,gBAA0B;E3Cy/NhC;E2C7mOF;I1C8II,sB0CxBuD;E3C0/NzD;E2ChnOF;IAmHQ,UAAU;IACV,WAA0B;E3CggOhC;E2CpnOF;I1C8II,iB0CxBuD;E3CigOzD;AACF;;ACriOE;E0CnFF;IA0HM,UAAU;E3CmgOd;E2C7nOF;IA6HM,UAAU;IACV,WAAW;E3CmgOf;E2CjoOF;IAiIM,UAAU;IACV,UAAU;E3CmgOd;E2CroOF;IAqIM,UAAU;IACV,eAAe;E3CmgOnB;E2CzoOF;IAyIM,UAAU;IACV,UAAU;E3CmgOd;E2C7oOF;IA6IM,UAAU;IACV,eAAe;E3CmgOnB;E2CjpOF;IAiJM,UAAU;IACV,UAAU;E3CmgOd;E2CrpOF;IAqJM,UAAU;IACV,UAAU;E3CmgOd;E2CzpOF;IAyJM,UAAU;IACV,UAAU;E3CmgOd;E2C7pOF;IA6JM,UAAU;IACV,UAAU;E3CmgOd;E2CjqOF;IAiKM,UAAU;IACV,UAAU;E3CmgOd;E2CrqOF;I1C8II,gB0CuBqC;E3CmgOvC;E2CxqOF;I1C8II,qB0C0B0C;E3CmgO5C;E2C3qOF;I1C8II,gB0C6BqC;E3CmgOvC;E2C9qOF;I1C8II,qB0CgC0C;E3CmgO5C;E2CjrOF;I1C8II,gB0CmCqC;E3CmgOvC;E2CprOF;I1C8II,gB0CsCqC;E3CmgOvC;E2CvrOF;I1C8II,gB0CyCqC;E3CmgOvC;E2C1rOF;I1C8II,gB0C4CqC;E3CmgOvC;E2C7rOF;I1C8II,gB0C+CqC;E3CmgOvC;E2ChsOF;IAiMQ,UAAU;IACV,SAA0B;E3CkgOhC;E2CpsOF;I1C8II,e0CuDuD;E3CkgOzD;E2CvsOF;IAiMQ,UAAU;IACV,eAA0B;E3CygOhC;E2C3sOF;I1C8II,qB0CuDuD;E3CygOzD;E2C9sOF;IAiMQ,UAAU;IACV,gBAA0B;E3CghOhC;E2CltOF;I1C8II,sB0CuDuD;E3CghOzD;E2CrtOF;IAiMQ,UAAU;IACV,UAA0B;E3CuhOhC;E2CztOF;I1C8II,gB0CuDuD;E3CuhOzD;E2C5tOF;IAiMQ,UAAU;IACV,gBAA0B;E3C8hOhC;E2ChuOF;I1C8II,sB0CuDuD;E3C8hOzD;E2CnuOF;IAiMQ,UAAU;IACV,gBAA0B;E3CqiOhC;E2CvuOF;I1C8II,sB0CuDuD;E3CqiOzD;E2C1uOF;IAiMQ,UAAU;IACV,UAA0B;E3C4iOhC;E2C9uOF;I1C8II,gB0CuDuD;E3C4iOzD;E2CjvOF;IAiMQ,UAAU;IACV,gBAA0B;E3CmjOhC;E2CrvOF;I1C8II,sB0CuDuD;E3CmjOzD;E2CxvOF;IAiMQ,UAAU;IACV,gBAA0B;E3C0jOhC;E2C5vOF;I1C8II,sB0CuDuD;E3C0jOzD;E2C/vOF;IAiMQ,UAAU;IACV,UAA0B;E3CikOhC;E2CnwOF;I1C8II,gB0CuDuD;E3CikOzD;E2CtwOF;IAiMQ,UAAU;IACV,gBAA0B;E3CwkOhC;E2C1wOF;I1C8II,sB0CuDuD;E3CwkOzD;E2C7wOF;IAiMQ,UAAU;IACV,gBAA0B;E3C+kOhC;E2CjxOF;I1C8II,sB0CuDuD;E3C+kOzD;E2CpxOF;IAiMQ,UAAU;IACV,WAA0B;E3CslOhC;E2CxxOF;I1C8II,iB0CuDuD;E3CslOzD;AACF;;ACjsOE;E0C3FF;IAwMM,UAAU;E3CylOd;E2CjyOF;IA0MM,UAAU;IACV,WAAW;E3C0lOf;E2CryOF;IA6MM,UAAU;IACV,UAAU;E3C2lOd;E2CzyOF;IAgNM,UAAU;IACV,eAAe;E3C4lOnB;E2C7yOF;IAmNM,UAAU;IACV,UAAU;E3C6lOd;E2CjzOF;IAsNM,UAAU;IACV,eAAe;E3C8lOnB;E2CrzOF;IAyNM,UAAU;IACV,UAAU;E3C+lOd;E2CzzOF;IA4NM,UAAU;IACV,UAAU;E3CgmOd;E2C7zOF;IA+NM,UAAU;IACV,UAAU;E3CimOd;E2Cj0OF;IAkOM,UAAU;IACV,UAAU;E3CkmOd;E2Cr0OF;IAqOM,UAAU;IACV,UAAU;E3CmmOd;E2Cz0OF;I1C8II,gB0C0FqC;E3ComOvC;E2C50OF;I1C8II,qB0C4F0C;E3CqmO5C;E2C/0OF;I1C8II,gB0C8FqC;E3CsmOvC;E2Cl1OF;I1C8II,qB0CgG0C;E3CumO5C;E2Cr1OF;I1C8II,gB0CkGqC;E3CwmOvC;E2Cx1OF;I1C8II,gB0CoGqC;E3CymOvC;E2C31OF;I1C8II,gB0CsGqC;E3C0mOvC;E2C91OF;I1C8II,gB0CwGqC;E3C2mOvC;E2Cj2OF;I1C8II,gB0C0GqC;E3C4mOvC;E2Cp2OF;IA2PQ,UAAU;IACV,SAA0B;E3C4mOhC;E2Cx2OF;I1C8II,e0CgHuD;E3C6mOzD;E2C32OF;IA2PQ,UAAU;IACV,eAA0B;E3CmnOhC;E2C/2OF;I1C8II,qB0CgHuD;E3ConOzD;E2Cl3OF;IA2PQ,UAAU;IACV,gBAA0B;E3C0nOhC;E2Ct3OF;I1C8II,sB0CgHuD;E3C2nOzD;E2Cz3OF;IA2PQ,UAAU;IACV,UAA0B;E3CioOhC;E2C73OF;I1C8II,gB0CgHuD;E3CkoOzD;E2Ch4OF;IA2PQ,UAAU;IACV,gBAA0B;E3CwoOhC;E2Cp4OF;I1C8II,sB0CgHuD;E3CyoOzD;E2Cv4OF;IA2PQ,UAAU;IACV,gBAA0B;E3C+oOhC;E2C34OF;I1C8II,sB0CgHuD;E3CgpOzD;E2C94OF;IA2PQ,UAAU;IACV,UAA0B;E3CspOhC;E2Cl5OF;I1C8II,gB0CgHuD;E3CupOzD;E2Cr5OF;IA2PQ,UAAU;IACV,gBAA0B;E3C6pOhC;E2Cz5OF;I1C8II,sB0CgHuD;E3C8pOzD;E2C55OF;IA2PQ,UAAU;IACV,gBAA0B;E3CoqOhC;E2Ch6OF;I1C8II,sB0CgHuD;E3CqqOzD;E2Cn6OF;IA2PQ,UAAU;IACV,UAA0B;E3C2qOhC;E2Cv6OF;I1C8II,gB0CgHuD;E3C4qOzD;E2C16OF;IA2PQ,UAAU;IACV,gBAA0B;E3CkrOhC;E2C96OF;I1C8II,sB0CgHuD;E3CmrOzD;E2Cj7OF;IA2PQ,UAAU;IACV,gBAA0B;E3CyrOhC;E2Cr7OF;I1C8II,sB0CgHuD;E3C0rOzD;E2Cx7OF;IA2PQ,UAAU;IACV,WAA0B;E3CgsOhC;E2C57OF;I1C8II,iB0CgHuD;E3CisOzD;AACF;;ACj2OE;E0C/FF;IAiQM,UAAU;E3CosOd;E2Cr8OF;IAmQM,UAAU;IACV,WAAW;E3CqsOf;E2Cz8OF;IAsQM,UAAU;IACV,UAAU;E3CssOd;E2C78OF;IAyQM,UAAU;IACV,eAAe;E3CusOnB;E2Cj9OF;IA4QM,UAAU;IACV,UAAU;E3CwsOd;E2Cr9OF;IA+QM,UAAU;IACV,eAAe;E3CysOnB;E2Cz9OF;IAkRM,UAAU;IACV,UAAU;E3C0sOd;E2C79OF;IAqRM,UAAU;IACV,UAAU;E3C2sOd;E2Cj+OF;IAwRM,UAAU;IACV,UAAU;E3C4sOd;E2Cr+OF;IA2RM,UAAU;IACV,UAAU;E3C6sOd;E2Cz+OF;IA8RM,UAAU;IACV,UAAU;E3C8sOd;E2C7+OF;I1C8II,gB0CmJqC;E3C+sOvC;E2Ch/OF;I1C8II,qB0CqJ0C;E3CgtO5C;E2Cn/OF;I1C8II,gB0CuJqC;E3CitOvC;E2Ct/OF;I1C8II,qB0CyJ0C;E3CktO5C;E2Cz/OF;I1C8II,gB0C2JqC;E3CmtOvC;E2C5/OF;I1C8II,gB0C6JqC;E3CotOvC;E2C//OF;I1C8II,gB0C+JqC;E3CqtOvC;E2ClgPF;I1C8II,gB0CiKqC;E3CstOvC;E2CrgPF;I1C8II,gB0CmKqC;E3CutOvC;E2CxgPF;IAoTQ,UAAU;IACV,SAA0B;E3CutOhC;E2C5gPF;I1C8II,e0CyKuD;E3CwtOzD;E2C/gPF;IAoTQ,UAAU;IACV,eAA0B;E3C8tOhC;E2CnhPF;I1C8II,qB0CyKuD;E3C+tOzD;E2CthPF;IAoTQ,UAAU;IACV,gBAA0B;E3CquOhC;E2C1hPF;I1C8II,sB0CyKuD;E3CsuOzD;E2C7hPF;IAoTQ,UAAU;IACV,UAA0B;E3C4uOhC;E2CjiPF;I1C8II,gB0CyKuD;E3C6uOzD;E2CpiPF;IAoTQ,UAAU;IACV,gBAA0B;E3CmvOhC;E2CxiPF;I1C8II,sB0CyKuD;E3CovOzD;E2C3iPF;IAoTQ,UAAU;IACV,gBAA0B;E3C0vOhC;E2C/iPF;I1C8II,sB0CyKuD;E3C2vOzD;E2CljPF;IAoTQ,UAAU;IACV,UAA0B;E3CiwOhC;E2CtjPF;I1C8II,gB0CyKuD;E3CkwOzD;E2CzjPF;IAoTQ,UAAU;IACV,gBAA0B;E3CwwOhC;E2C7jPF;I1C8II,sB0CyKuD;E3CywOzD;E2ChkPF;IAoTQ,UAAU;IACV,gBAA0B;E3C+wOhC;E2CpkPF;I1C8II,sB0CyKuD;E3CgxOzD;E2CvkPF;IAoTQ,UAAU;IACV,UAA0B;E3CsxOhC;E2C3kPF;I1C8II,gB0CyKuD;E3CuxOzD;E2C9kPF;IAoTQ,UAAU;IACV,gBAA0B;E3C6xOhC;E2CllPF;I1C8II,sB0CyKuD;E3C8xOzD;E2CrlPF;IAoTQ,UAAU;IACV,gBAA0B;E3CoyOhC;E2CzlPF;I1C8II,sB0CyKuD;E3CqyOzD;E2C5lPF;IAoTQ,UAAU;IACV,WAA0B;E3C2yOhC;E2ChmPF;I1C8II,iB0CyKuD;E3C4yOzD;AACF;;ACt/OI;E0C9GJ;IA0TM,UAAU;E3C+yOd;E2CzmPF;IA4TM,UAAU;IACV,WAAW;E3CgzOf;E2C7mPF;IA+TM,UAAU;IACV,UAAU;E3CizOd;E2CjnPF;IAkUM,UAAU;IACV,eAAe;E3CkzOnB;E2CrnPF;IAqUM,UAAU;IACV,UAAU;E3CmzOd;E2CznPF;IAwUM,UAAU;IACV,eAAe;E3CozOnB;E2C7nPF;IA2UM,UAAU;IACV,UAAU;E3CqzOd;E2CjoPF;IA8UM,UAAU;IACV,UAAU;E3CszOd;E2CroPF;IAiVM,UAAU;IACV,UAAU;E3CuzOd;E2CzoPF;IAoVM,UAAU;IACV,UAAU;E3CwzOd;E2C7oPF;IAuVM,UAAU;IACV,UAAU;E3CyzOd;E2CjpPF;I1C8II,gB0C4MqC;E3C0zOvC;E2CppPF;I1C8II,qB0C8M0C;E3C2zO5C;E2CvpPF;I1C8II,gB0CgNqC;E3C4zOvC;E2C1pPF;I1C8II,qB0CkN0C;E3C6zO5C;E2C7pPF;I1C8II,gB0CoNqC;E3C8zOvC;E2ChqPF;I1C8II,gB0CsNqC;E3C+zOvC;E2CnqPF;I1C8II,gB0CwNqC;E3Cg0OvC;E2CtqPF;I1C8II,gB0C0NqC;E3Ci0OvC;E2CzqPF;I1C8II,gB0C4NqC;E3Ck0OvC;E2C5qPF;IA6WQ,UAAU;IACV,SAA0B;E3Ck0OhC;E2ChrPF;I1C8II,e0CkOuD;E3Cm0OzD;E2CnrPF;IA6WQ,UAAU;IACV,eAA0B;E3Cy0OhC;E2CvrPF;I1C8II,qB0CkOuD;E3C00OzD;E2C1rPF;IA6WQ,UAAU;IACV,gBAA0B;E3Cg1OhC;E2C9rPF;I1C8II,sB0CkOuD;E3Ci1OzD;E2CjsPF;IA6WQ,UAAU;IACV,UAA0B;E3Cu1OhC;E2CrsPF;I1C8II,gB0CkOuD;E3Cw1OzD;E2CxsPF;IA6WQ,UAAU;IACV,gBAA0B;E3C81OhC;E2C5sPF;I1C8II,sB0CkOuD;E3C+1OzD;E2C/sPF;IA6WQ,UAAU;IACV,gBAA0B;E3Cq2OhC;E2CntPF;I1C8II,sB0CkOuD;E3Cs2OzD;E2CttPF;IA6WQ,UAAU;IACV,UAA0B;E3C42OhC;E2C1tPF;I1C8II,gB0CkOuD;E3C62OzD;E2C7tPF;IA6WQ,UAAU;IACV,gBAA0B;E3Cm3OhC;E2CjuPF;I1C8II,sB0CkOuD;E3Co3OzD;E2CpuPF;IA6WQ,UAAU;IACV,gBAA0B;E3C03OhC;E2CxuPF;I1C8II,sB0CkOuD;E3C23OzD;E2C3uPF;IA6WQ,UAAU;IACV,UAA0B;E3Ci4OhC;E2C/uPF;I1C8II,gB0CkOuD;E3Ck4OzD;E2ClvPF;IA6WQ,UAAU;IACV,gBAA0B;E3Cw4OhC;E2CtvPF;I1C8II,sB0CkOuD;E3Cy4OzD;E2CzvPF;IA6WQ,UAAU;IACV,gBAA0B;E3C+4OhC;E2C7vPF;I1C8II,sB0CkOuD;E3Cg5OzD;E2ChwPF;IA6WQ,UAAU;IACV,WAA0B;E3Cs5OhC;E2CpwPF;I1C8II,iB0CkOuD;E3Cu5OzD;AACF;;AC3oPI;E0C7HJ;IAmXM,UAAU;E3C05Od;E2C7wPF;IAqXM,UAAU;IACV,WAAW;E3C25Of;E2CjxPF;IAwXM,UAAU;IACV,UAAU;E3C45Od;E2CrxPF;IA2XM,UAAU;IACV,eAAe;E3C65OnB;E2CzxPF;IA8XM,UAAU;IACV,UAAU;E3C85Od;E2C7xPF;IAiYM,UAAU;IACV,eAAe;E3C+5OnB;E2CjyPF;IAoYM,UAAU;IACV,UAAU;E3Cg6Od;E2CryPF;IAuYM,UAAU;IACV,UAAU;E3Ci6Od;E2CzyPF;IA0YM,UAAU;IACV,UAAU;E3Ck6Od;E2C7yPF;IA6YM,UAAU;IACV,UAAU;E3Cm6Od;E2CjzPF;IAgZM,UAAU;IACV,UAAU;E3Co6Od;E2CrzPF;I1C8II,gB0CqQqC;E3Cq6OvC;E2CxzPF;I1C8II,qB0CuQ0C;E3Cs6O5C;E2C3zPF;I1C8II,gB0CyQqC;E3Cu6OvC;E2C9zPF;I1C8II,qB0C2Q0C;E3Cw6O5C;E2Cj0PF;I1C8II,gB0C6QqC;E3Cy6OvC;E2Cp0PF;I1C8II,gB0C+QqC;E3C06OvC;E2Cv0PF;I1C8II,gB0CiRqC;E3C26OvC;E2C10PF;I1C8II,gB0CmRqC;E3C46OvC;E2C70PF;I1C8II,gB0CqRqC;E3C66OvC;E2Ch1PF;IAsaQ,UAAU;IACV,SAA0B;E3C66OhC;E2Cp1PF;I1C8II,e0C2RuD;E3C86OzD;E2Cv1PF;IAsaQ,UAAU;IACV,eAA0B;E3Co7OhC;E2C31PF;I1C8II,qB0C2RuD;E3Cq7OzD;E2C91PF;IAsaQ,UAAU;IACV,gBAA0B;E3C27OhC;E2Cl2PF;I1C8II,sB0C2RuD;E3C47OzD;E2Cr2PF;IAsaQ,UAAU;IACV,UAA0B;E3Ck8OhC;E2Cz2PF;I1C8II,gB0C2RuD;E3Cm8OzD;E2C52PF;IAsaQ,UAAU;IACV,gBAA0B;E3Cy8OhC;E2Ch3PF;I1C8II,sB0C2RuD;E3C08OzD;E2Cn3PF;IAsaQ,UAAU;IACV,gBAA0B;E3Cg9OhC;E2Cv3PF;I1C8II,sB0C2RuD;E3Ci9OzD;E2C13PF;IAsaQ,UAAU;IACV,UAA0B;E3Cu9OhC;E2C93PF;I1C8II,gB0C2RuD;E3Cw9OzD;E2Cj4PF;IAsaQ,UAAU;IACV,gBAA0B;E3C89OhC;E2Cr4PF;I1C8II,sB0C2RuD;E3C+9OzD;E2Cx4PF;IAsaQ,UAAU;IACV,gBAA0B;E3Cq+OhC;E2C54PF;I1C8II,sB0C2RuD;E3Cs+OzD;E2C/4PF;IAsaQ,UAAU;IACV,UAA0B;E3C4+OhC;E2Cn5PF;I1C8II,gB0C2RuD;E3C6+OzD;E2Ct5PF;IAsaQ,UAAU;IACV,gBAA0B;E3Cm/OhC;E2C15PF;I1C8II,sB0C2RuD;E3Co/OzD;E2C75PF;IAsaQ,UAAU;IACV,gBAA0B;E3C0/OhC;E2Cj6PF;I1C8II,sB0C2RuD;E3C2/OzD;E2Cp6PF;IAsaQ,UAAU;IACV,WAA0B;E3CigPhC;E2Cx6PF;I1C8II,iB0C2RuD;E3CkgPzD;AACF;;A2CjgPA;E1C7RI,qB0ChJgB;E1CgJhB,sB0ChJgB;EAgblB,oBAhbkB;A3Co7PpB;;A2CvgPA;EAKI,uBAlbgB;A3Cw7PpB;;A2C3gPA;EAOI,qCAA4C;A3CwgPhD;;A2C/gPA;EAUI,uBAAuB;A3CygP3B;;A2CnhPA;E1C7RI,c0CySiC;E1CzSjC,e0C0SiC;EACjC,aAAa;A3C2gPjB;;A2CzhPA;EAgBM,SAAS;EACT,qBAAqB;A3C6gP3B;;A2C9hPA;EAmBM,qBAAqB;A3C+gP3B;;A2CliPA;EAqBM,gBAAgB;A3CihPtB;;A2CtiPA;EAuBI,aAAa;A3CmhPjB;;A2C1iPA;EAyBI,eAAe;A3CqhPnB;;A2C9iPA;EA2BI,mBAAmB;A3CuhPvB;;AC14PE;E0CwVF;IA+BM,aAAa;E3CwhPjB;AACF;;ACp4PE;E0C4UF;IAmCM,aAAa;E3C0hPjB;AACF;;A2CxhPE;EACE,oBAAY;E1CpUZ,wC0CqU2D;E1CrU3D,yC0CsU2D;A3C2hP/D;;A2C9hPE;EAKI,8BAA8B;EAC9B,+BAA+B;A3C6hPrC;;A2CniPE;EASM,iBAAY;A3C8hPpB;;ACz6PE;E0CkYA;IAYQ,iBAAY;E3CgiPpB;AACF;;AC36PE;E0C8XA;IAeQ,iBAAY;E3CmiPpB;AACF;;AC76PE;E0C0XA;IAkBQ,iBAAY;E3CsiPpB;AACF;;AC/6PE;E0CsXA;IAqBQ,iBAAY;E3CyiPpB;AACF;;ACj7PE;E0CkXA;IAwBQ,iBAAY;E3C4iPpB;AACF;;ACl7PI;E0C6WF;IA2BQ,iBAAY;E3C+iPpB;AACF;;AC96PI;E0CmWF;IA8BQ,iBAAY;E3CkjPpB;AACF;;AC/6PI;E0C8VF;IAiCQ,iBAAY;E3CqjPpB;AACF;;AC36PI;E0CoVF;IAoCQ,iBAAY;E3CwjPpB;AACF;;A2C7lPE;EASM,oBAAY;A3CwlPpB;;ACn+PE;E0CkYA;IAYQ,oBAAY;E3C0lPpB;AACF;;ACr+PE;E0C8XA;IAeQ,oBAAY;E3C6lPpB;AACF;;ACv+PE;E0C0XA;IAkBQ,oBAAY;E3CgmPpB;AACF;;ACz+PE;E0CsXA;IAqBQ,oBAAY;E3CmmPpB;AACF;;AC3+PE;E0CkXA;IAwBQ,oBAAY;E3CsmPpB;AACF;;AC5+PI;E0C6WF;IA2BQ,oBAAY;E3CymPpB;AACF;;ACx+PI;E0CmWF;IA8BQ,oBAAY;E3C4mPpB;AACF;;ACz+PI;E0C8VF;IAiCQ,oBAAY;E3C+mPpB;AACF;;ACr+PI;E0CoVF;IAoCQ,oBAAY;E3CknPpB;AACF;;A2CvpPE;EASM,mBAAY;A3CkpPpB;;AC7hQE;E0CkYA;IAYQ,mBAAY;E3CopPpB;AACF;;AC/hQE;E0C8XA;IAeQ,mBAAY;E3CupPpB;AACF;;ACjiQE;E0C0XA;IAkBQ,mBAAY;E3C0pPpB;AACF;;ACniQE;E0CsXA;IAqBQ,mBAAY;E3C6pPpB;AACF;;ACriQE;E0CkXA;IAwBQ,mBAAY;E3CgqPpB;AACF;;ACtiQI;E0C6WF;IA2BQ,mBAAY;E3CmqPpB;AACF;;ACliQI;E0CmWF;IA8BQ,mBAAY;E3CsqPpB;AACF;;ACniQI;E0C8VF;IAiCQ,mBAAY;E3CyqPpB;AACF;;AC/hQI;E0CoVF;IAoCQ,mBAAY;E3C4qPpB;AACF;;A2CjtPE;EASM,oBAAY;A3C4sPpB;;ACvlQE;E0CkYA;IAYQ,oBAAY;E3C8sPpB;AACF;;ACzlQE;E0C8XA;IAeQ,oBAAY;E3CitPpB;AACF;;AC3lQE;E0C0XA;IAkBQ,oBAAY;E3CotPpB;AACF;;AC7lQE;E0CsXA;IAqBQ,oBAAY;E3CutPpB;AACF;;AC/lQE;E0CkXA;IAwBQ,oBAAY;E3C0tPpB;AACF;;AChmQI;E0C6WF;IA2BQ,oBAAY;E3C6tPpB;AACF;;AC5lQI;E0CmWF;IA8BQ,oBAAY;E3CguPpB;AACF;;AC7lQI;E0C8VF;IAiCQ,oBAAY;E3CmuPpB;AACF;;ACzlQI;E0CoVF;IAoCQ,oBAAY;E3CsuPpB;AACF;;A2C3wPE;EASM,iBAAY;A3CswPpB;;ACjpQE;E0CkYA;IAYQ,iBAAY;E3CwwPpB;AACF;;ACnpQE;E0C8XA;IAeQ,iBAAY;E3C2wPpB;AACF;;ACrpQE;E0C0XA;IAkBQ,iBAAY;E3C8wPpB;AACF;;ACvpQE;E0CsXA;IAqBQ,iBAAY;E3CixPpB;AACF;;ACzpQE;E0CkXA;IAwBQ,iBAAY;E3CoxPpB;AACF;;AC1pQI;E0C6WF;IA2BQ,iBAAY;E3CuxPpB;AACF;;ACtpQI;E0CmWF;IA8BQ,iBAAY;E3C0xPpB;AACF;;ACvpQI;E0C8VF;IAiCQ,iBAAY;E3C6xPpB;AACF;;ACnpQI;E0CoVF;IAoCQ,iBAAY;E3CgyPpB;AACF;;A2Cr0PE;EASM,oBAAY;A3Cg0PpB;;AC3sQE;E0CkYA;IAYQ,oBAAY;E3Ck0PpB;AACF;;AC7sQE;E0C8XA;IAeQ,oBAAY;E3Cq0PpB;AACF;;AC/sQE;E0C0XA;IAkBQ,oBAAY;E3Cw0PpB;AACF;;ACjtQE;E0CsXA;IAqBQ,oBAAY;E3C20PpB;AACF;;ACntQE;E0CkXA;IAwBQ,oBAAY;E3C80PpB;AACF;;ACptQI;E0C6WF;IA2BQ,oBAAY;E3Ci1PpB;AACF;;AChtQI;E0CmWF;IA8BQ,oBAAY;E3Co1PpB;AACF;;ACjtQI;E0C8VF;IAiCQ,oBAAY;E3Cu1PpB;AACF;;AC7sQI;E0CoVF;IAoCQ,oBAAY;E3C01PpB;AACF;;A2C/3PE;EASM,mBAAY;A3C03PpB;;ACrwQE;E0CkYA;IAYQ,mBAAY;E3C43PpB;AACF;;ACvwQE;E0C8XA;IAeQ,mBAAY;E3C+3PpB;AACF;;ACzwQE;E0C0XA;IAkBQ,mBAAY;E3Ck4PpB;AACF;;AC3wQE;E0CsXA;IAqBQ,mBAAY;E3Cq4PpB;AACF;;AC7wQE;E0CkXA;IAwBQ,mBAAY;E3Cw4PpB;AACF;;AC9wQI;E0C6WF;IA2BQ,mBAAY;E3C24PpB;AACF;;AC1wQI;E0CmWF;IA8BQ,mBAAY;E3C84PpB;AACF;;AC3wQI;E0C8VF;IAiCQ,mBAAY;E3Ci5PpB;AACF;;ACvwQI;E0CoVF;IAoCQ,mBAAY;E3Co5PpB;AACF;;A2Cz7PE;EASM,oBAAY;A3Co7PpB;;AC/zQE;E0CkYA;IAYQ,oBAAY;E3Cs7PpB;AACF;;ACj0QE;E0C8XA;IAeQ,oBAAY;E3Cy7PpB;AACF;;ACn0QE;E0C0XA;IAkBQ,oBAAY;E3C47PpB;AACF;;ACr0QE;E0CsXA;IAqBQ,oBAAY;E3C+7PpB;AACF;;ACv0QE;E0CkXA;IAwBQ,oBAAY;E3Ck8PpB;AACF;;ACx0QI;E0C6WF;IA2BQ,oBAAY;E3Cq8PpB;AACF;;ACp0QI;E0CmWF;IA8BQ,oBAAY;E3Cw8PpB;AACF;;ACr0QI;E0C8VF;IAiCQ,oBAAY;E3C28PpB;AACF;;ACj0QI;E0CoVF;IAoCQ,oBAAY;E3C88PpB;AACF;;A2Cn/PE;EASM,iBAAY;A3C8+PpB;;ACz3QE;E0CkYA;IAYQ,iBAAY;E3Cg/PpB;AACF;;AC33QE;E0C8XA;IAeQ,iBAAY;E3Cm/PpB;AACF;;AC73QE;E0C0XA;IAkBQ,iBAAY;E3Cs/PpB;AACF;;AC/3QE;E0CsXA;IAqBQ,iBAAY;E3Cy/PpB;AACF;;ACj4QE;E0CkXA;IAwBQ,iBAAY;E3C4/PpB;AACF;;ACl4QI;E0C6WF;IA2BQ,iBAAY;E3C+/PpB;AACF;;AC93QI;E0CmWF;IA8BQ,iBAAY;E3CkgQpB;AACF;;AC/3QI;E0C8VF;IAiCQ,iBAAY;E3CqgQpB;AACF;;AC33QI;E0CoVF;IAoCQ,iBAAY;E3CwgQpB;AACF;;A4C9/QA;EACE,oBAAoB;EACpB,cAAc;EACd,aAAa;EACb,YAAY;EACZ,cAAc;EACd,+BAAuB;EAAvB,4BAAuB;EAAvB,uBAAuB;A5CigRzB;;A4CvgRA;EASI,qBAA+B;EAC/B,sBAAgC;EAChC,oBAA8B;A5CkgRlC;;A4C7gRA;EAaM,uBAAiC;A5CogRvC;;A4CjhRA;EAeM,sBAjBgB;A5CuhRtB;;A4CrhRA;EAiBI,oBAAoB;A5CwgRxB;;A4CzhRA;EAmBI,gBArBkB;A5C+hRtB;;A4C7hRA;EAqBI,sBAAsB;A5C4gR1B;;A4CjiRA;EAuBM,gCAAgC;A5C8gRtC;;ACl9QE;E2CnFF;IA2BM,aAAa;E5C+gRjB;E4C1iRF;IA8BQ,UAAU;IACV,eAAuB;E5C+gR7B;E4C9iRF;IA8BQ,UAAU;IACV,gBAAuB;E5CmhR7B;E4CljRF;IA8BQ,UAAU;IACV,UAAuB;E5CuhR7B;E4CtjRF;IA8BQ,UAAU;IACV,gBAAuB;E5C2hR7B;E4C1jRF;IA8BQ,UAAU;IACV,gBAAuB;E5C+hR7B;E4C9jRF;IA8BQ,UAAU;IACV,UAAuB;E5CmiR7B;E4ClkRF;IA8BQ,UAAU;IACV,gBAAuB;E5CuiR7B;E4CtkRF;IA8BQ,UAAU;IACV,gBAAuB;E5C2iR7B;E4C1kRF;IA8BQ,UAAU;IACV,UAAuB;E5C+iR7B;E4C9kRF;IA8BQ,UAAU;IACV,gBAAuB;E5CmjR7B;E4CllRF;IA8BQ,UAAU;IACV,gBAAuB;E5CujR7B;E4CtlRF;IA8BQ,UAAU;IACV,WAAuB;E5C2jR7B;AACF;;A6C7lRA,kBAAA;ACEE;EACE,uBAAwB;A9C+lR5B;;A8C9lRE;EAGI,yBAA0C;A9C+lRhD;;A8C9lRE;EACE,kCAAmC;A9CimRvC;;A8CxmRE;EACE,yBAAwB;A9C2mR5B;;A8C1mRE;EAGI,uBAA0C;A9C2mRhD;;A8C1mRE;EACE,oCAAmC;A9C6mRvC;;A8CpnRE;EACE,4BAAwB;A9CunR5B;;A8CtnRE;EAGI,yBAA0C;A9CunRhD;;A8CtnRE;EACE,uCAAmC;A9CynRvC;;A8ChoRE;EACE,yBAAwB;A9CmoR5B;;A8CloRE;EAGI,yBAA0C;A9CmoRhD;;A8CloRE;EACE,oCAAmC;A9CqoRvC;;A8C5oRE;EACE,yBAAwB;A9C+oR5B;;A8C9oRE;EAGI,yBAA0C;A9C+oRhD;;A8C9oRE;EACE,oCAAmC;A9CipRvC;;A8C5oRI;EACE,yBAA8B;A9C+oRpC;;A8C9oRI;EAGI,yBAAgD;A9C+oRxD;;A8C9oRI;EACE,oCAAyC;A9CipR/C;;A8C/oRI;EACE,yBAA6B;A9CkpRnC;;A8CjpRI;EAGI,yBAAgD;A9CkpRxD;;A8CjpRI;EACE,oCAAwC;A9CopR9C;;A8ChrRE;EACE,yBAAwB;A9CmrR5B;;A8ClrRE;EAGI,yBAA0C;A9CmrRhD;;A8ClrRE;EACE,oCAAmC;A9CqrRvC;;A8ChrRI;EACE,yBAA8B;A9CmrRpC;;A8ClrRI;EAGI,yBAAgD;A9CmrRxD;;A8ClrRI;EACE,oCAAyC;A9CqrR/C;;A8CnrRI;EACE,yBAA6B;A9CsrRnC;;A8CrrRI;EAGI,yBAAgD;A9CsrRxD;;A8CrrRI;EACE,oCAAwC;A9CwrR9C;;A8CptRE;EACE,yBAAwB;A9CutR5B;;A8CttRE;EAGI,yBAA0C;A9CutRhD;;A8CttRE;EACE,oCAAmC;A9CytRvC;;A8CptRI;EACE,yBAA8B;A9CutRpC;;A8CttRI;EAGI,yBAAgD;A9CutRxD;;A8CttRI;EACE,oCAAyC;A9CytR/C;;A8CvtRI;EACE,yBAA6B;A9C0tRnC;;A8CztRI;EAGI,yBAAgD;A9C0tRxD;;A8CztRI;EACE,oCAAwC;A9C4tR9C;;A8CxvRE;EACE,yBAAwB;A9C2vR5B;;A8C1vRE;EAGI,yBAA0C;A9C2vRhD;;A8C1vRE;EACE,oCAAmC;A9C6vRvC;;A8CxvRI;EACE,yBAA8B;A9C2vRpC;;A8C1vRI;EAGI,yBAAgD;A9C2vRxD;;A8C1vRI;EACE,oCAAyC;A9C6vR/C;;A8C3vRI;EACE,yBAA6B;A9C8vRnC;;A8C7vRI;EAGI,yBAAgD;A9C8vRxD;;A8C7vRI;EACE,oCAAwC;A9CgwR9C;;A8C5xRE;EACE,yBAAwB;A9C+xR5B;;A8C9xRE;EAGI,yBAA0C;A9C+xRhD;;A8C9xRE;EACE,oCAAmC;A9CiyRvC;;A8C5xRI;EACE,yBAA8B;A9C+xRpC;;A8C9xRI;EAGI,yBAAgD;A9C+xRxD;;A8C9xRI;EACE,oCAAyC;A9CiyR/C;;A8C/xRI;EACE,yBAA6B;A9CkyRnC;;A8CjyRI;EAGI,yBAAgD;A9CkyRxD;;A8CjyRI;EACE,oCAAwC;A9CoyR9C;;A8Ch0RE;EACE,yBAAwB;A9Cm0R5B;;A8Cl0RE;EAGI,yBAA0C;A9Cm0RhD;;A8Cl0RE;EACE,oCAAmC;A9Cq0RvC;;A8Ch0RI;EACE,yBAA8B;A9Cm0RpC;;A8Cl0RI;EAGI,yBAAgD;A9Cm0RxD;;A8Cl0RI;EACE,oCAAyC;A9Cq0R/C;;A8Cn0RI;EACE,yBAA6B;A9Cs0RnC;;A8Cr0RI;EAGI,yBAAgD;A9Cs0RxD;;A8Cr0RI;EACE,oCAAwC;A9Cw0R9C;;A8Cr0RE;EACE,yBAAwB;A9Cw0R5B;;A8Cv0RE;EACE,oCAAmC;A9C00RvC;;A8C70RE;EACE,yBAAwB;A9Cg1R5B;;A8C/0RE;EACE,oCAAmC;A9Ck1RvC;;A8Cr1RE;EACE,yBAAwB;A9Cw1R5B;;A8Cv1RE;EACE,oCAAmC;A9C01RvC;;A8C71RE;EACE,yBAAwB;A9Cg2R5B;;A8C/1RE;EACE,oCAAmC;A9Ck2RvC;;A8Cr2RE;EACE,yBAAwB;A9Cw2R5B;;A8Cv2RE;EACE,oCAAmC;A9C02RvC;;A8C72RE;EACE,yBAAwB;A9Cg3R5B;;A8C/2RE;EACE,oCAAmC;A9Ck3RvC;;A8Cr3RE;EACE,yBAAwB;A9Cw3R5B;;A8Cv3RE;EACE,oCAAmC;A9C03RvC;;A8C73RE;EACE,4BAAwB;A9Cg4R5B;;A8C/3RE;EACE,uCAAmC;A9Ck4RvC;;A8Cr4RE;EACE,yBAAwB;A9Cw4R5B;;A8Cv4RE;EACE,oCAAmC;A9C04RvC;;A+C56RE;EACE,8BAAiC;A/C+6RrC;;A+Ch7RE;EACE,sCAAiC;A/Cm7RrC;;A+Cp7RE;EACE,iCAAiC;A/Cu7RrC;;A+Cx7RE;EACE,yCAAiC;A/C27RrC;;A+Cv7RE;EACE,4BAA4B;A/C07RhC;;A+C37RE;EACE,0BAA4B;A/C87RhC;;A+C/7RE;EACE,kCAA4B;A/Ck8RhC;;A+C97RE;EACE,sCAAkC;A/Ci8RtC;;A+Cl8RE;EACE,oCAAkC;A/Cq8RtC;;A+Ct8RE;EACE,kCAAkC;A/Cy8RtC;;A+C18RE;EACE,yCAAkC;A/C68RtC;;A+C98RE;EACE,wCAAkC;A/Ci9RtC;;A+Cl9RE;EACE,wCAAkC;A/Cq9RtC;;A+Ct9RE;EACE,iCAAkC;A/Cy9RtC;;A+C19RE;EACE,+BAAkC;A/C69RtC;;A+C99RE;EACE,gCAAkC;A/Ci+RtC;;A+Cl+RE;EACE,iCAAkC;A/Cq+RtC;;A+Cj+RE;EACE,oCAAgC;A/Co+RpC;;A+Cr+RE;EACE,kCAAgC;A/Cw+RpC;;A+Cz+RE;EACE,gCAAgC;A/C4+RpC;;A+C7+RE;EACE,uCAAgC;A/Cg/RpC;;A+Cj/RE;EACE,sCAAgC;A/Co/RpC;;A+Cr/RE;EACE,sCAAgC;A/Cw/RpC;;A+Cz/RE;EACE,iCAAgC;A/C4/RpC;;A+C7/RE;EACE,+BAAgC;A/CggSpC;;A+CjgSE;EACE,6BAAgC;A/CogSpC;;A+CrgSE;EACE,kCAAgC;A/CwgSpC;;A+CpgSE;EACE,+BAA8B;A/CugSlC;;A+CxgSE;EACE,kCAA8B;A/C2gSlC;;A+C5gSE;EACE,gCAA8B;A/C+gSlC;;A+ChhSE;EACE,8BAA8B;A/CmhSlC;;A+CphSE;EACE,gCAA8B;A/CuhSlC;;A+CxhSE;EACE,6BAA8B;A/C2hSlC;;A+C5hSE;EACE,2BAA8B;A/C+hSlC;;A+ChiSE;EACE,kCAA8B;A/CmiSlC;;A+CpiSE;EACE,gCAA8B;A/CuiSlC;;A+CniSE;EACE,2BAA6B;A/CsiSjC;;A+CviSE;EACE,iCAA6B;A/C0iSjC;;A+C3iSE;EACE,+BAA6B;A/C8iSjC;;A+C/iSE;EACE,6BAA6B;A/CkjSjC;;A+CnjSE;EACE,+BAA6B;A/CsjSjC;;A+CvjSE;EACE,8BAA6B;A/C0jSjC;;A+CrjSI;EACE,uBAAqC;A/CwjS3C;;A+CzjSI;EACE,uBAAqC;A/C4jS3C;;A+C7jSI;EACE,uBAAqC;A/CgkS3C;;A+CjkSI;EACE,uBAAqC;A/CokS3C;;A+CrkSI;EACE,uBAAqC;A/CwkS3C;;A+CzkSI;EACE,uBAAqC;A/C4kS3C;;A+C7kSI;EACE,yBAAqC;A/CglS3C;;A+CjlSI;EACE,yBAAqC;A/ColS3C;;A+CrlSI;EACE,yBAAqC;A/CwlS3C;;A+CzlSI;EACE,yBAAqC;A/C4lS3C;;A+C7lSI;EACE,yBAAqC;A/CgmS3C;;A+CjmSI;EACE,yBAAqC;A/ComS3C;;ACnoSE;EACE,WAAW;EACX,YAAY;EACZ,cAAc;ADsoSlB;;AgDzoSA;EACE,sBAAsB;AhD4oSxB;;AgD1oSA;EACE,uBAAuB;AhD6oSzB;;AiDppSA;EACE,2BAA2B;AjDupS7B;;AiDrpSA;EACE,2BAA2B;AjDwpS7B;;AiDtpSA;EACE,0BAA0B;AjDypS5B;;AkDhqSA;EACE,2BAA2B;AlDmqS7B;;AmDjqSA;EACE,6BAA6B;AnDoqS/B;;AoDxqSA;EACE,oBAAoB;ApD2qStB;;AoDzqSA;EACE,qBAAqB;ApD4qSvB;;AoDjqSI;EACE,oBAA+B;ApDoqSrC;;AoDjqSM;EACE,wBAA8C;ApDoqStD;;AoDrqSM;EACE,0BAA8C;ApDwqStD;;AoDzqSM;EACE,2BAA8C;ApD4qStD;;AoD7qSM;EACE,yBAA8C;ApDgrStD;;AoD7qSM;EACE,yBAAyC;EACzC,0BAA2C;ApDgrSnD;;AoD7qSM;EACE,wBAAuC;EACvC,2BAA6C;ApDgrSrD;;AoD/rSI;EACE,0BAA+B;ApDksSrC;;AoD/rSM;EACE,8BAA8C;ApDksStD;;AoDnsSM;EACE,gCAA8C;ApDssStD;;AoDvsSM;EACE,iCAA8C;ApD0sStD;;AoD3sSM;EACE,+BAA8C;ApD8sStD;;AoD3sSM;EACE,+BAAyC;EACzC,gCAA2C;ApD8sSnD;;AoD3sSM;EACE,8BAAuC;EACvC,iCAA6C;ApD8sSrD;;AoD7tSI;EACE,yBAA+B;ApDguSrC;;AoD7tSM;EACE,6BAA8C;ApDguStD;;AoDjuSM;EACE,+BAA8C;ApDouStD;;AoDruSM;EACE,gCAA8C;ApDwuStD;;AoDzuSM;EACE,8BAA8C;ApD4uStD;;AoDzuSM;EACE,8BAAyC;EACzC,+BAA2C;ApD4uSnD;;AoDzuSM;EACE,6BAAuC;EACvC,gCAA6C;ApD4uSrD;;AoD3vSI;EACE,0BAA+B;ApD8vSrC;;AoD3vSM;EACE,8BAA8C;ApD8vStD;;AoD/vSM;EACE,gCAA8C;ApDkwStD;;AoDnwSM;EACE,iCAA8C;ApDswStD;;AoDvwSM;EACE,+BAA8C;ApD0wStD;;AoDvwSM;EACE,+BAAyC;EACzC,gCAA2C;ApD0wSnD;;AoDvwSM;EACE,8BAAuC;EACvC,iCAA6C;ApD0wSrD;;AoDzxSI;EACE,uBAA+B;ApD4xSrC;;AoDzxSM;EACE,2BAA8C;ApD4xStD;;AoD7xSM;EACE,6BAA8C;ApDgyStD;;AoDjySM;EACE,8BAA8C;ApDoyStD;;AoDrySM;EACE,4BAA8C;ApDwyStD;;AoDrySM;EACE,4BAAyC;EACzC,6BAA2C;ApDwySnD;;AoDrySM;EACE,2BAAuC;EACvC,8BAA6C;ApDwySrD;;AoDvzSI;EACE,yBAA+B;ApD0zSrC;;AoDvzSM;EACE,6BAA8C;ApD0zStD;;AoD3zSM;EACE,+BAA8C;ApD8zStD;;AoD/zSM;EACE,gCAA8C;ApDk0StD;;AoDn0SM;EACE,8BAA8C;ApDs0StD;;AoDn0SM;EACE,8BAAyC;EACzC,+BAA2C;ApDs0SnD;;AoDn0SM;EACE,6BAAuC;EACvC,gCAA6C;ApDs0SrD;;AoDr1SI;EACE,uBAA+B;ApDw1SrC;;AoDr1SM;EACE,2BAA8C;ApDw1StD;;AoDz1SM;EACE,6BAA8C;ApD41StD;;AoD71SM;EACE,8BAA8C;ApDg2StD;;AoDj2SM;EACE,4BAA8C;ApDo2StD;;AoDj2SM;EACE,4BAAyC;EACzC,6BAA2C;ApDo2SnD;;AoDj2SM;EACE,2BAAuC;EACvC,8BAA6C;ApDo2SrD;;AoDn3SI;EACE,qBAA+B;ApDs3SrC;;AoDn3SM;EACE,yBAA8C;ApDs3StD;;AoDv3SM;EACE,2BAA8C;ApD03StD;;AoD33SM;EACE,4BAA8C;ApD83StD;;AoD/3SM;EACE,0BAA8C;ApDk4StD;;AoD/3SM;EACE,0BAAyC;EACzC,2BAA2C;ApDk4SnD;;AoD/3SM;EACE,yBAAuC;EACvC,4BAA6C;ApDk4SrD;;AoDj5SI;EACE,2BAA+B;ApDo5SrC;;AoDj5SM;EACE,+BAA8C;ApDo5StD;;AoDr5SM;EACE,iCAA8C;ApDw5StD;;AoDz5SM;EACE,kCAA8C;ApD45StD;;AoD75SM;EACE,gCAA8C;ApDg6StD;;AoD75SM;EACE,gCAAyC;EACzC,iCAA2C;ApDg6SnD;;AoD75SM;EACE,+BAAuC;EACvC,kCAA6C;ApDg6SrD;;AoD/6SI;EACE,0BAA+B;ApDk7SrC;;AoD/6SM;EACE,8BAA8C;ApDk7StD;;AoDn7SM;EACE,gCAA8C;ApDs7StD;;AoDv7SM;EACE,iCAA8C;ApD07StD;;AoD37SM;EACE,+BAA8C;ApD87StD;;AoD37SM;EACE,+BAAyC;EACzC,gCAA2C;ApD87SnD;;AoD37SM;EACE,8BAAuC;EACvC,iCAA6C;ApD87SrD;;AoD78SI;EACE,2BAA+B;ApDg9SrC;;AoD78SM;EACE,+BAA8C;ApDg9StD;;AoDj9SM;EACE,iCAA8C;ApDo9StD;;AoDr9SM;EACE,kCAA8C;ApDw9StD;;AoDz9SM;EACE,gCAA8C;ApD49StD;;AoDz9SM;EACE,gCAAyC;EACzC,iCAA2C;ApD49SnD;;AoDz9SM;EACE,+BAAuC;EACvC,kCAA6C;ApD49SrD;;AoD3+SI;EACE,wBAA+B;ApD8+SrC;;AoD3+SM;EACE,4BAA8C;ApD8+StD;;AoD/+SM;EACE,8BAA8C;ApDk/StD;;AoDn/SM;EACE,+BAA8C;ApDs/StD;;AoDv/SM;EACE,6BAA8C;ApD0/StD;;AoDv/SM;EACE,6BAAyC;EACzC,8BAA2C;ApD0/SnD;;AoDv/SM;EACE,4BAAuC;EACvC,+BAA6C;ApD0/SrD;;AoDzgTI;EACE,0BAA+B;ApD4gTrC;;AoDzgTM;EACE,8BAA8C;ApD4gTtD;;AoD7gTM;EACE,gCAA8C;ApDghTtD;;AoDjhTM;EACE,iCAA8C;ApDohTtD;;AoDrhTM;EACE,+BAA8C;ApDwhTtD;;AoDrhTM;EACE,+BAAyC;EACzC,gCAA2C;ApDwhTnD;;AoDrhTM;EACE,8BAAuC;EACvC,iCAA6C;ApDwhTrD;;AoDviTI;EACE,wBAA+B;ApD0iTrC;;AoDviTM;EACE,4BAA8C;ApD0iTtD;;AoD3iTM;EACE,8BAA8C;ApD8iTtD;;AoD/iTM;EACE,+BAA8C;ApDkjTtD;;AoDnjTM;EACE,6BAA8C;ApDsjTtD;;AoDnjTM;EACE,6BAAyC;EACzC,8BAA2C;ApDsjTnD;;AoDnjTM;EACE,4BAAuC;EACvC,+BAA6C;ApDsjTrD;;AqDjlTI;EACE,0BAA2B;ArDolTjC;;AqDrlTI;EACE,4BAA2B;ArDwlTjC;;AqDzlTI;EACE,0BAA2B;ArD4lTjC;;AqD7lTI;EACE,4BAA2B;ArDgmTjC;;AqDjmTI;EACE,6BAA2B;ArDomTjC;;AqDrmTI;EACE,0BAA2B;ArDwmTjC;;AqDzmTI;EACE,6BAA2B;ArD4mTjC;;AC/hTE;EoD9EE;IACE,0BAA2B;ErDinT/B;EqDlnTE;IACE,4BAA2B;ErDonT/B;EqDrnTE;IACE,0BAA2B;ErDunT/B;EqDxnTE;IACE,4BAA2B;ErD0nT/B;EqD3nTE;IACE,6BAA2B;ErD6nT/B;EqD9nTE;IACE,0BAA2B;ErDgoT/B;EqDjoTE;IACE,6BAA2B;ErDmoT/B;AACF;;ACnjTE;EoDlFE;IACE,0BAA2B;ErDyoT/B;EqD1oTE;IACE,4BAA2B;ErD4oT/B;EqD7oTE;IACE,0BAA2B;ErD+oT/B;EqDhpTE;IACE,4BAA2B;ErDkpT/B;EqDnpTE;IACE,6BAA2B;ErDqpT/B;EqDtpTE;IACE,0BAA2B;ErDwpT/B;EqDzpTE;IACE,6BAA2B;ErD2pT/B;AACF;;ACnkTE;EoD1FE;IACE,0BAA2B;ErDiqT/B;EqDlqTE;IACE,4BAA2B;ErDoqT/B;EqDrqTE;IACE,0BAA2B;ErDuqT/B;EqDxqTE;IACE,4BAA2B;ErD0qT/B;EqD3qTE;IACE,6BAA2B;ErD6qT/B;EqD9qTE;IACE,0BAA2B;ErDgrT/B;EqDjrTE;IACE,6BAA2B;ErDmrT/B;AACF;;ACvlTE;EoD9FE;IACE,0BAA2B;ErDyrT/B;EqD1rTE;IACE,4BAA2B;ErD4rT/B;EqD7rTE;IACE,0BAA2B;ErD+rT/B;EqDhsTE;IACE,4BAA2B;ErDksT/B;EqDnsTE;IACE,6BAA2B;ErDqsT/B;EqDtsTE;IACE,0BAA2B;ErDwsT/B;EqDzsTE;IACE,6BAA2B;ErD2sT/B;AACF;;AChmTI;EoD7GA;IACE,0BAA2B;ErDitT/B;EqDltTE;IACE,4BAA2B;ErDotT/B;EqDrtTE;IACE,0BAA2B;ErDutT/B;EqDxtTE;IACE,4BAA2B;ErD0tT/B;EqD3tTE;IACE,6BAA2B;ErD6tT/B;EqD9tTE;IACE,0BAA2B;ErDguT/B;EqDjuTE;IACE,6BAA2B;ErDmuT/B;AACF;;ACzmTI;EoD5HA;IACE,0BAA2B;ErDyuT/B;EqD1uTE;IACE,4BAA2B;ErD4uT/B;EqD7uTE;IACE,0BAA2B;ErD+uT/B;EqDhvTE;IACE,4BAA2B;ErDkvT/B;EqDnvTE;IACE,6BAA2B;ErDqvT/B;EqDtvTE;IACE,0BAA2B;ErDwvT/B;EqDzvTE;IACE,6BAA2B;ErD2vT/B;AACF;;AqDnuTE;EACE,6BAAqC;ArDsuTzC;;AqDvuTE;EACE,8BAAqC;ArD0uTzC;;AqD3uTE;EACE,2BAAqC;ArD8uTzC;;AqD/uTE;EACE,4BAAqC;ArDkvTzC;;AC/rTE;EoD/CE;IACE,6BAAqC;ErDkvTzC;AACF;;ACjsTE;EoDhDE;IACE,6BAAqC;ErDqvTzC;AACF;;ACnsTE;EoDjDE;IACE,6BAAqC;ErDwvTzC;AACF;;ACrsTE;EoDlDE;IACE,6BAAqC;ErD2vTzC;AACF;;ACvsTE;EoDnDE;IACE,6BAAqC;ErD8vTzC;AACF;;ACxsTI;EoDrDA;IACE,6BAAqC;ErDiwTzC;AACF;;ACpsTI;EoD5DA;IACE,6BAAqC;ErDowTzC;AACF;;ACrsTI;EoD9DA;IACE,6BAAqC;ErDuwTzC;AACF;;ACjsTI;EoDrEA;IACE,6BAAqC;ErD0wTzC;AACF;;ACrvTE;EoD/CE;IACE,8BAAqC;ErDwyTzC;AACF;;ACvvTE;EoDhDE;IACE,8BAAqC;ErD2yTzC;AACF;;ACzvTE;EoDjDE;IACE,8BAAqC;ErD8yTzC;AACF;;AC3vTE;EoDlDE;IACE,8BAAqC;ErDizTzC;AACF;;AC7vTE;EoDnDE;IACE,8BAAqC;ErDozTzC;AACF;;AC9vTI;EoDrDA;IACE,8BAAqC;ErDuzTzC;AACF;;AC1vTI;EoD5DA;IACE,8BAAqC;ErD0zTzC;AACF;;AC3vTI;EoD9DA;IACE,8BAAqC;ErD6zTzC;AACF;;ACvvTI;EoDrEA;IACE,8BAAqC;ErDg0TzC;AACF;;AC3yTE;EoD/CE;IACE,2BAAqC;ErD81TzC;AACF;;AC7yTE;EoDhDE;IACE,2BAAqC;ErDi2TzC;AACF;;AC/yTE;EoDjDE;IACE,2BAAqC;ErDo2TzC;AACF;;ACjzTE;EoDlDE;IACE,2BAAqC;ErDu2TzC;AACF;;ACnzTE;EoDnDE;IACE,2BAAqC;ErD02TzC;AACF;;ACpzTI;EoDrDA;IACE,2BAAqC;ErD62TzC;AACF;;AChzTI;EoD5DA;IACE,2BAAqC;ErDg3TzC;AACF;;ACjzTI;EoD9DA;IACE,2BAAqC;ErDm3TzC;AACF;;AC7yTI;EoDrEA;IACE,2BAAqC;ErDs3TzC;AACF;;ACj2TE;EoD/CE;IACE,4BAAqC;ErDo5TzC;AACF;;ACn2TE;EoDhDE;IACE,4BAAqC;ErDu5TzC;AACF;;ACr2TE;EoDjDE;IACE,4BAAqC;ErD05TzC;AACF;;ACv2TE;EoDlDE;IACE,4BAAqC;ErD65TzC;AACF;;ACz2TE;EoDnDE;IACE,4BAAqC;ErDg6TzC;AACF;;AC12TI;EoDrDA;IACE,4BAAqC;ErDm6TzC;AACF;;ACt2TI;EoD5DA;IACE,4BAAqC;ErDs6TzC;AACF;;ACv2TI;EoD9DA;IACE,4BAAqC;ErDy6TzC;AACF;;ACn2TI;EoDrEA;IACE,4BAAqC;ErD46TzC;AACF;;AqD36TA;EACE,qCAAqC;ArD86TvC;;AqD56TA;EACE,oCAAoC;ArD+6TtC;;AqD76TA;EACE,oCAAoC;ArDg7TtC;;AqD96TA;EACE,6BAA6B;ArDi7T/B;;AqD/6TA;EACE,2BAAqC;ArDk7TvC;;AqDj7TA;EACE,2BAAsC;ArDo7TxC;;AqDn7TA;EACE,2BAAsC;ArDs7TxC;;AqDr7TA;EACE,2BAAwC;ArDw7T1C;;AqDv7TA;EACE,2BAAoC;ArD07TtC;;AqDx7TA;EACE,+LAAuC;ArD27TzC;;AqDz7TA;EACE,+LAAyC;ArD47T3C;;AqD17TA;EACE,+LAA0C;ArD67T5C;;AqD37TA;EACE,iCAAyC;ArD87T3C;;AqD57TA;EACE,iCAAoC;ArD+7TtC;;AsD3hUE;EACE,yBAA+B;AtD8hUnC;;ACn9TE;EqDzEE;IACE,yBAA+B;EtDgiUnC;AACF;;ACr9TE;EqD1EE;IACE,yBAA+B;EtDmiUnC;AACF;;ACv9TE;EqD3EE;IACE,yBAA+B;EtDsiUnC;AACF;;ACz9TE;EqD5EE;IACE,yBAA+B;EtDyiUnC;AACF;;AC39TE;EqD7EE;IACE,yBAA+B;EtD4iUnC;AACF;;AC59TI;EqD/EA;IACE,yBAA+B;EtD+iUnC;AACF;;ACx9TI;EqDtFA;IACE,yBAA+B;EtDkjUnC;AACF;;ACz9TI;EqDxFA;IACE,yBAA+B;EtDqjUnC;AACF;;ACr9TI;EqD/FA;IACE,yBAA+B;EtDwjUnC;AACF;;AsDrlUE;EACE,wBAA+B;AtDwlUnC;;AC7gUE;EqDzEE;IACE,wBAA+B;EtD0lUnC;AACF;;AC/gUE;EqD1EE;IACE,wBAA+B;EtD6lUnC;AACF;;ACjhUE;EqD3EE;IACE,wBAA+B;EtDgmUnC;AACF;;ACnhUE;EqD5EE;IACE,wBAA+B;EtDmmUnC;AACF;;ACrhUE;EqD7EE;IACE,wBAA+B;EtDsmUnC;AACF;;ACthUI;EqD/EA;IACE,wBAA+B;EtDymUnC;AACF;;AClhUI;EqDtFA;IACE,wBAA+B;EtD4mUnC;AACF;;ACnhUI;EqDxFA;IACE,wBAA+B;EtD+mUnC;AACF;;AC/gUI;EqD/FA;IACE,wBAA+B;EtDknUnC;AACF;;AsD/oUE;EACE,0BAA+B;AtDkpUnC;;ACvkUE;EqDzEE;IACE,0BAA+B;EtDopUnC;AACF;;ACzkUE;EqD1EE;IACE,0BAA+B;EtDupUnC;AACF;;AC3kUE;EqD3EE;IACE,0BAA+B;EtD0pUnC;AACF;;AC7kUE;EqD5EE;IACE,0BAA+B;EtD6pUnC;AACF;;AC/kUE;EqD7EE;IACE,0BAA+B;EtDgqUnC;AACF;;AChlUI;EqD/EA;IACE,0BAA+B;EtDmqUnC;AACF;;AC5kUI;EqDtFA;IACE,0BAA+B;EtDsqUnC;AACF;;AC7kUI;EqDxFA;IACE,0BAA+B;EtDyqUnC;AACF;;ACzkUI;EqD/FA;IACE,0BAA+B;EtD4qUnC;AACF;;AsDzsUE;EACE,gCAA+B;AtD4sUnC;;ACjoUE;EqDzEE;IACE,gCAA+B;EtD8sUnC;AACF;;ACnoUE;EqD1EE;IACE,gCAA+B;EtDitUnC;AACF;;ACroUE;EqD3EE;IACE,gCAA+B;EtDotUnC;AACF;;ACvoUE;EqD5EE;IACE,gCAA+B;EtDutUnC;AACF;;ACzoUE;EqD7EE;IACE,gCAA+B;EtD0tUnC;AACF;;AC1oUI;EqD/EA;IACE,gCAA+B;EtD6tUnC;AACF;;ACtoUI;EqDtFA;IACE,gCAA+B;EtDguUnC;AACF;;ACvoUI;EqDxFA;IACE,gCAA+B;EtDmuUnC;AACF;;ACnoUI;EqD/FA;IACE,gCAA+B;EtDsuUnC;AACF;;AsDnwUE;EACE,+BAA+B;AtDswUnC;;AC3rUE;EqDzEE;IACE,+BAA+B;EtDwwUnC;AACF;;AC7rUE;EqD1EE;IACE,+BAA+B;EtD2wUnC;AACF;;AC/rUE;EqD3EE;IACE,+BAA+B;EtD8wUnC;AACF;;ACjsUE;EqD5EE;IACE,+BAA+B;EtDixUnC;AACF;;ACnsUE;EqD7EE;IACE,+BAA+B;EtDoxUnC;AACF;;ACpsUI;EqD/EA;IACE,+BAA+B;EtDuxUnC;AACF;;AChsUI;EqDtFA;IACE,+BAA+B;EtD0xUnC;AACF;;ACjsUI;EqDxFA;IACE,+BAA+B;EtD6xUnC;AACF;;AC7rUI;EqD/FA;IACE,+BAA+B;EtDgyUnC;AACF;;AsD/xUA;EACE,wBAAwB;AtDkyU1B;;AsDhyUA;EACE,uBAAuB;EACvB,iCAAiC;EACjC,yBAAyB;EACzB,2BAA2B;EAC3B,qBAAqB;EACrB,6BAA6B;EAC7B,8BAA8B;EAC9B,wBAAwB;AtDmyU1B;;AChwUE;EqDhCA;IACE,wBAAwB;EtDoyU1B;AACF;;AClwUE;EqDhCA;IACE,wBAAwB;EtDsyU1B;AACF;;ACpwUE;EqDhCA;IACE,wBAAwB;EtDwyU1B;AACF;;ACtwUE;EqDhCA;IACE,wBAAwB;EtD0yU1B;AACF;;ACxwUE;EqDhCA;IACE,wBAAwB;EtD4yU1B;AACF;;ACzwUI;EqDjCF;IACE,wBAAwB;EtD8yU1B;AACF;;ACrwUI;EqDvCF;IACE,wBAAwB;EtDgzU1B;AACF;;ACtwUI;EqDxCF;IACE,wBAAwB;EtDkzU1B;AACF;;AClwUI;EqD9CF;IACE,wBAAwB;EtDozU1B;AACF;;AsDnzUA;EACE,6BAA6B;AtDszU/B;;AC1zUE;EqDOA;IACE,6BAA6B;EtDuzU/B;AACF;;AC5zUE;EqDOA;IACE,6BAA6B;EtDyzU/B;AACF;;AC9zUE;EqDOA;IACE,6BAA6B;EtD2zU/B;AACF;;ACh0UE;EqDOA;IACE,6BAA6B;EtD6zU/B;AACF;;ACl0UE;EqDOA;IACE,6BAA6B;EtD+zU/B;AACF;;ACn0UI;EqDMF;IACE,6BAA6B;EtDi0U/B;AACF;;AC/zUI;EqDAF;IACE,6BAA6B;EtDm0U/B;AACF;;ACh0UI;EqDDF;IACE,6BAA6B;EtDq0U/B;AACF;;AC5zUI;EqDPF;IACE,6BAA6B;EtDu0U/B;AACF;;AuDj8UA,iBAAA;ACQA;EACE,oBAAoB;EACpB,aAAa;EACb,sBAAsB;EACtB,8BAA8B;AxD67UhC;;AwDj8UA;EAMI,gBAAgB;AxD+7UpB;;AwDr8UA;EASM,mBAAmB;AxDg8UzB;;AwDz8UA;EAeM,uBtDRyB;EsDSzB,ctDtBuB;AFo9U7B;;AwD98UA;;EAmBQ,cAAc;AxDg8UtB;;AwDn9UA;EAqBQ,ctD3BqB;AF69U7B;;AwDv9UA;EAuBQ,4BtD7BqB;AFi+U7B;;AwD39UA;;EA0BU,ctDhCmB;AFs+U7B;;AC34UE;EuDrFF;IA6BU,uBtDtBqB;EF89U7B;AACF;;AwDt+UA;;EAgCQ,4BtDtCqB;AFi/U7B;;AwD3+UA;;;EAqCU,yB7CgEuB;E6C/DvB,ctD5CmB;AFw/U7B;;AwDl/UA;EAyCU,ctD/CmB;EsDgDnB,YAAY;AxD68UtB;;AwDv/UA;EA4CY,UAAU;AxD+8UtB;;AwD3/UA;EA+CY,UAAU;AxDg9UtB;;AwD//UA;EAmDY,ctDzDiB;AFygV7B;;AwDngVA;EAqDc,uCtD3De;AF6gV7B;;AwDvgVA;EAyDc,yBtD/De;EsDgEf,qBtDhEe;EsDiEf,YtDpDiB;AFsgV/B;;AwD7gVA;EAiEU,4EAAyG;AxDg9UnH;;ACx8UE;EuDzEF;IAoEc,4EAAyG;ExDk9UrH;AACF;;AwDvhVA;EAeM,yBtDrBuB;EsDsBvB,YtDTyB;AFqhV/B;;AwD5hVA;;EAmBQ,cAAc;AxD8gVtB;;AwDjiVA;EAqBQ,YtDduB;AF8hV/B;;AwDriVA;EAuBQ,+BtDhBuB;AFkiV/B;;AwDziVA;;EA0BU,YtDnBqB;AFuiV/B;;ACz9UE;EuDrFF;IA6BU,yBtDnCmB;EFyjV3B;AACF;;AwDpjVA;;EAgCQ,+BtDzBuB;AFkjV/B;;AwDzjVA;;;EAqCU,uB7CgEuB;E6C/DvB,YtD/BqB;AFyjV/B;;AwDhkVA;EAyCU,YtDlCqB;EsDmCrB,YAAY;AxD2hVtB;;AwDrkVA;EA4CY,UAAU;AxD6hVtB;;AwDzkVA;EA+CY,UAAU;AxD8hVtB;;AwD7kVA;EAmDY,YtD5CmB;AF0kV/B;;AwDjlVA;EAqDc,uCtD3De;AF2lV7B;;AwDrlVA;EAyDc,uBtDlDiB;EsDmDjB,mBtDnDiB;EsDoDjB,ctDjEe;AFimV7B;;AwD3lVA;EAiEU,8EAAyG;AxD8hVnH;;ACthVE;EuDzEF;IAoEc,8EAAyG;ExDgiVrH;AACF;;AwDrmVA;EAeM,4BtDVwB;EsDWxB,yB7CwDe;AXkiVrB;;AwD1mVA;;EAmBQ,cAAc;AxD4lVtB;;AwD/mVA;EAqBQ,yB7CmDa;AX2iVrB;;AwDnnVA;EAuBQ,yB7CiDa;AX+iVrB;;AwDvnVA;;EA0BU,yB7C8CW;AXojVrB;;ACviVE;EuDrFF;IA6BU,4BtDxBoB;EF4nV5B;AACF;;AwDloVA;;EAgCQ,yB7CwCa;AX+jVrB;;AwDvoVA;;;EAqCU,yB7CgEuB;E6C/DvB,yB7CkCW;AXskVrB;;AwD9oVA;EAyCU,yB7C+BW;E6C9BX,YAAY;AxDymVtB;;AwDnpVA;EA4CY,UAAU;AxD2mVtB;;AwDvpVA;EA+CY,UAAU;AxD4mVtB;;AwD3pVA;EAmDY,yB7CqBS;AXulVrB;;AwD/pVA;EAqDc,uCtD3De;AFyqV7B;;AwDnqVA;EAyDc,oC7CeO;E6CdP,gC7CcO;E6CbP,iBtDtDgB;AFoqV9B;;AwDzqVA;EAiEU,iFAAyG;AxD4mVnH;;ACpmVE;EuDzEF;IAoEc,iFAAyG;ExD8mVrH;AACF;;AwDnrVA;EAeM,yBtDjBwB;EsDkBxB,W7C0DU;AX8mVhB;;AwDxrVA;;EAmBQ,cAAc;AxD0qVtB;;AwD7rVA;EAqBQ,W7CqDQ;AXunVhB;;AwDjsVA;EAuBQ,+B7CmDQ;AX2nVhB;;AwDrsVA;;EA0BU,W7CgDM;AXgoVhB;;ACrnVE;EuDrFF;IA6BU,yBtD/BoB;EFitV5B;AACF;;AwDhtVA;;EAgCQ,+B7C0CQ;AX2oVhB;;AwDrtVA;;;EAqCU,yB7CgEuB;E6C/DvB,W7CoCM;AXkpVhB;;AwD5tVA;EAyCU,W7CiCM;E6ChCN,YAAY;AxDurVtB;;AwDjuVA;EA4CY,UAAU;AxDyrVtB;;AwDruVA;EA+CY,UAAU;AxD0rVtB;;AwDzuVA;EAmDY,W7CuBI;AXmqVhB;;AwD7uVA;EAqDc,uCtD3De;AFuvV7B;;AwDjvVA;EAyDc,sB7CiBE;E6ChBF,kB7CgBE;E6CfF,ctD7DgB;AFyvV9B;;AwDvvVA;EAiEU,gFAAyG;AxD0rVnH;;AClrVE;EuDzEF;IAoEc,gFAAyG;ExD4rVrH;AACF;;AwDjwVA;EAeM,yBtDH4B;EsDI5B,W7C0DU;AX4rVhB;;AwDtwVA;;EAmBQ,cAAc;AxDwvVtB;;AwD3wVA;EAqBQ,W7CqDQ;AXqsVhB;;AwD/wVA;EAuBQ,+B7CmDQ;AXysVhB;;AwDnxVA;;EA0BU,W7CgDM;AX8sVhB;;ACnsVE;EuDrFF;IA6BU,yBtDjBwB;EFixVhC;AACF;;AwD9xVA;;EAgCQ,+B7C0CQ;AXytVhB;;AwDnyVA;;;EAqCU,yB7CgEuB;E6C/DvB,W7CoCM;AXguVhB;;AwD1yVA;EAyCU,W7CiCM;E6ChCN,YAAY;AxDqwVtB;;AwD/yVA;EA4CY,UAAU;AxDuwVtB;;AwDnzVA;EA+CY,UAAU;AxDwwVtB;;AwDvzVA;EAmDY,W7CuBI;AXivVhB;;AwD3zVA;EAqDc,uCtD3De;AFq0V7B;;AwD/zVA;EAyDc,sB7CiBE;E6ChBF,kB7CgBE;E6CfF,ctD/CoB;AFyzVlC;;AwDr0VA;EAiEU,gFAAyG;AxDwwVnH;;AChwVE;EuDzEF;IAoEc,gFAAyG;ExD0wVrH;AACF;;AwD/0VA;EAeM,yBtDD4B;EsDE5B,W7C0DU;AX0wVhB;;AwDp1VA;;EAmBQ,cAAc;AxDs0VtB;;AwDz1VA;EAqBQ,W7CqDQ;AXmxVhB;;AwD71VA;EAuBQ,+B7CmDQ;AXuxVhB;;AwDj2VA;;EA0BU,W7CgDM;AX4xVhB;;ACjxVE;EuDrFF;IA6BU,yBtDfwB;EF61VhC;AACF;;AwD52VA;;EAgCQ,+B7C0CQ;AXuyVhB;;AwDj3VA;;;EAqCU,yB7CgEuB;E6C/DvB,W7CoCM;AX8yVhB;;AwDx3VA;EAyCU,W7CiCM;E6ChCN,YAAY;AxDm1VtB;;AwD73VA;EA4CY,UAAU;AxDq1VtB;;AwDj4VA;EA+CY,UAAU;AxDs1VtB;;AwDr4VA;EAmDY,W7CuBI;AX+zVhB;;AwDz4VA;EAqDc,uCtD3De;AFm5V7B;;AwD74VA;EAyDc,sB7CiBE;E6ChBF,kB7CgBE;E6CfF,ctD7CoB;AFq4VlC;;AwDn5VA;EAiEU,gFAAyG;AxDs1VnH;;AC90VE;EuDzEF;IAoEc,gFAAyG;ExDw1VrH;AACF;;AwD75VA;EAeM,yBtDF4B;EsDG5B,W7C0DU;AXw1VhB;;AwDl6VA;;EAmBQ,cAAc;AxDo5VtB;;AwDv6VA;EAqBQ,W7CqDQ;AXi2VhB;;AwD36VA;EAuBQ,+B7CmDQ;AXq2VhB;;AwD/6VA;;EA0BU,W7CgDM;AX02VhB;;AC/1VE;EuDrFF;IA6BU,yBtDhBwB;EF46VhC;AACF;;AwD17VA;;EAgCQ,+B7C0CQ;AXq3VhB;;AwD/7VA;;;EAqCU,yB7CgEuB;E6C/DvB,W7CoCM;AX43VhB;;AwDt8VA;EAyCU,W7CiCM;E6ChCN,YAAY;AxDi6VtB;;AwD38VA;EA4CY,UAAU;AxDm6VtB;;AwD/8VA;EA+CY,UAAU;AxDo6VtB;;AwDn9VA;EAmDY,W7CuBI;AX64VhB;;AwDv9VA;EAqDc,uCtD3De;AFi+V7B;;AwD39VA;EAyDc,sB7CiBE;E6ChBF,kB7CgBE;E6CfF,ctD9CoB;AFo9VlC;;AwDj+VA;EAiEU,gFAAyG;AxDo6VnH;;AC55VE;EuDzEF;IAoEc,gFAAyG;ExDs6VrH;AACF;;AwD3+VA;EAeM,yBtDJ4B;EsDK5B,W7C0DU;AXs6VhB;;AwDh/VA;;EAmBQ,cAAc;AxDk+VtB;;AwDr/VA;EAqBQ,W7CqDQ;AX+6VhB;;AwDz/VA;EAuBQ,+B7CmDQ;AXm7VhB;;AwD7/VA;;EA0BU,W7CgDM;AXw7VhB;;AC76VE;EuDrFF;IA6BU,yBtDlBwB;EF4/VhC;AACF;;AwDxgWA;;EAgCQ,+B7C0CQ;AXm8VhB;;AwD7gWA;;;EAqCU,yB7CgEuB;E6C/DvB,W7CoCM;AX08VhB;;AwDphWA;EAyCU,W7CiCM;E6ChCN,YAAY;AxD++VtB;;AwDzhWA;EA4CY,UAAU;AxDi/VtB;;AwD7hWA;EA+CY,UAAU;AxDk/VtB;;AwDjiWA;EAmDY,W7CuBI;AX29VhB;;AwDriWA;EAqDc,uCtD3De;AF+iW7B;;AwDziWA;EAyDc,sB7CiBE;E6ChBF,kB7CgBE;E6CfF,ctDhDoB;AFoiWlC;;AwD/iWA;EAiEU,gFAAyG;AxDk/VnH;;AC1+VE;EuDzEF;IAoEc,gFAAyG;ExDo/VrH;AACF;;AwDzjWA;EAeM,yBtDL4B;EsDM5B,yB7CwDe;AXs/VrB;;AwD9jWA;;EAmBQ,cAAc;AxDgjWtB;;AwDnkWA;EAqBQ,yB7CmDa;AX+/VrB;;AwDvkWA;EAuBQ,yB7CiDa;AXmgWrB;;AwD3kWA;;EA0BU,yB7C8CW;AXwgWrB;;AC3/VE;EuDrFF;IA6BU,yBtDnBwB;EF2kWhC;AACF;;AwDtlWA;;EAgCQ,yB7CwCa;AXmhWrB;;AwD3lWA;;;EAqCU,yB7CgEuB;E6C/DvB,yB7CkCW;AX0hWrB;;AwDlmWA;EAyCU,yB7C+BW;E6C9BX,YAAY;AxD6jWtB;;AwDvmWA;EA4CY,UAAU;AxD+jWtB;;AwD3mWA;EA+CY,UAAU;AxDgkWtB;;AwD/mWA;EAmDY,yB7CqBS;AX2iWrB;;AwDnnWA;EAqDc,uCtD3De;AF6nW7B;;AwDvnWA;EAyDc,oC7CeO;E6CdP,gC7CcO;E6CbP,ctDjDoB;AFmnWlC;;AwD7nWA;EAiEU,gFAAyG;AxDgkWnH;;ACxjWE;EuDzEF;IAoEc,gFAAyG;ExDkkWrH;AACF;;AwDvoWA;EAeM,yBtDC2B;EsDA3B,W7C0DU;AXkkWhB;;AwD5oWA;;EAmBQ,cAAc;AxD8nWtB;;AwDjpWA;EAqBQ,W7CqDQ;AX2kWhB;;AwDrpWA;EAuBQ,+B7CmDQ;AX+kWhB;;AwDzpWA;;EA0BU,W7CgDM;AXolWhB;;ACzkWE;EuDrFF;IA6BU,yBtDbuB;EFmpW/B;AACF;;AwDpqWA;;EAgCQ,+B7C0CQ;AX+lWhB;;AwDzqWA;;;EAqCU,yB7CgEuB;E6C/DvB,W7CoCM;AXsmWhB;;AwDhrWA;EAyCU,W7CiCM;E6ChCN,YAAY;AxD2oWtB;;AwDrrWA;EA4CY,UAAU;AxD6oWtB;;AwDzrWA;EA+CY,UAAU;AxD8oWtB;;AwD7rWA;EAmDY,W7CuBI;AXunWhB;;AwDjsWA;EAqDc,uCtD3De;AF2sW7B;;AwDrsWA;EAyDc,sB7CiBE;E6ChBF,kB7CgBE;E6CfF,ctD3CmB;AF2rWjC;;AwD3sWA;EAiEU,gFAAyG;AxD8oWnH;;ACtoWE;EuDzEF;IAoEc,gFAAyG;ExDgpWrH;AACF;;AwDrtWA;EAwEM,eA/E0B;AxDguWhC;;AC5oWE;EuD7EF;IA4EQ,oBAlF8B;ExDouWpC;AACF;;AClpWE;EuD7EF;IAgFQ,qBArF8B;ExDyuWpC;AACF;;AwDruWA;EAqFM,mBAAmB;EACnB,aAAa;AxDopWnB;;AwD1uWA;EAwFQ,YAAY;EACZ,cAAc;AxDspWtB;;AwD/uWA;EA2FI,gBAAgB;AxDwpWpB;;AwDnvWA;EA6FI,iBAAiB;AxD0pWrB;;AwDtpWA;EAEE,gBAAgB;AxDwpWlB;;AwD1pWA;EAII,SAAS;EACT,gBAAgB;EAChB,eAAe;EACf,kBAAkB;EAClB,QAAQ;EACR,qCAAqC;AxD0pWzC;;AwDnqWA;EAYI,YAAY;AxD2pWhB;;AC/rWE;EuDwBF;IAeI,aAAa;ExD6pWf;AACF;;AwD5pWA;EACE,kBAAkB;AxD+pWpB;;ACzsWE;EuDyCF;IAKM,aAAa;ExDgqWjB;EwDrqWF;IAOQ,sBAAsB;ExDiqW5B;AACF;;AC9sWE;EuDqCF;IASI,aAAa;IACb,uBAAuB;ExDqqWzB;EwD/qWF;IvDsBI,oBuDVwC;ExDsqW1C;AACF;;AwDnqWA;;EAEE,YAAY;EACZ,cAAc;AxDsqWhB;;AwDpqWA;EACE,YAAY;EACZ,cAAc;EACd,oBAlJ6B;AxDyzW/B;;AyDrzWA;EACE,oBAL2B;AzD6zW7B;;AC5tWE;EwD7FF;IAMM,oBAT8B;EzDi0WlC;EyD9zWF;IAQM,qBAV8B;EzDm0WlC;AACF;;A0Dl0WA;EACE,yBxDS4B;EwDR5B,yBAJ+B;A1Dy0WjC","file":"bulma.css"} \ No newline at end of file diff --git a/docs/static/css/bulma.min.css b/docs/static/css/bulma.min.css new file mode 100644 index 0000000..a807a31 --- /dev/null +++ b/docs/static/css/bulma.min.css @@ -0,0 +1 @@ +/*! bulma.io v0.9.1 | MIT License | github.com/jgthms/bulma */@-webkit-keyframes spinAround{from{transform:rotate(0)}to{transform:rotate(359deg)}}@keyframes spinAround{from{transform:rotate(0)}to{transform:rotate(359deg)}}.breadcrumb,.button,.delete,.file,.is-unselectable,.modal-close,.pagination-ellipsis,.pagination-link,.pagination-next,.pagination-previous,.tabs{-webkit-touch-callout:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.navbar-link:not(.is-arrowless)::after,.select:not(.is-multiple):not(.is-loading)::after{border:3px solid transparent;border-radius:2px;border-right:0;border-top:0;content:" ";display:block;height:.625em;margin-top:-.4375em;pointer-events:none;position:absolute;top:50%;transform:rotate(-45deg);transform-origin:center;width:.625em}.block:not(:last-child),.box:not(:last-child),.breadcrumb:not(:last-child),.content:not(:last-child),.highlight:not(:last-child),.level:not(:last-child),.message:not(:last-child),.notification:not(:last-child),.pagination:not(:last-child),.progress:not(:last-child),.subtitle:not(:last-child),.table-container:not(:last-child),.table:not(:last-child),.tabs:not(:last-child),.title:not(:last-child){margin-bottom:1.5rem}.delete,.modal-close{-moz-appearance:none;-webkit-appearance:none;background-color:rgba(10,10,10,.2);border:none;border-radius:290486px;cursor:pointer;pointer-events:auto;display:inline-block;flex-grow:0;flex-shrink:0;font-size:0;height:20px;max-height:20px;max-width:20px;min-height:20px;min-width:20px;outline:0;position:relative;vertical-align:top;width:20px}.delete::after,.delete::before,.modal-close::after,.modal-close::before{background-color:#fff;content:"";display:block;left:50%;position:absolute;top:50%;transform:translateX(-50%) translateY(-50%) rotate(45deg);transform-origin:center center}.delete::before,.modal-close::before{height:2px;width:50%}.delete::after,.modal-close::after{height:50%;width:2px}.delete:focus,.delete:hover,.modal-close:focus,.modal-close:hover{background-color:rgba(10,10,10,.3)}.delete:active,.modal-close:active{background-color:rgba(10,10,10,.4)}.is-small.delete,.is-small.modal-close{height:16px;max-height:16px;max-width:16px;min-height:16px;min-width:16px;width:16px}.is-medium.delete,.is-medium.modal-close{height:24px;max-height:24px;max-width:24px;min-height:24px;min-width:24px;width:24px}.is-large.delete,.is-large.modal-close{height:32px;max-height:32px;max-width:32px;min-height:32px;min-width:32px;width:32px}.button.is-loading::after,.control.is-loading::after,.loader,.select.is-loading::after{-webkit-animation:spinAround .5s infinite linear;animation:spinAround .5s infinite linear;border:2px solid #dbdbdb;border-radius:290486px;border-right-color:transparent;border-top-color:transparent;content:"";display:block;height:1em;position:relative;width:1em}.hero-video,.image.is-16by9 .has-ratio,.image.is-16by9 img,.image.is-1by1 .has-ratio,.image.is-1by1 img,.image.is-1by2 .has-ratio,.image.is-1by2 img,.image.is-1by3 .has-ratio,.image.is-1by3 img,.image.is-2by1 .has-ratio,.image.is-2by1 img,.image.is-2by3 .has-ratio,.image.is-2by3 img,.image.is-3by1 .has-ratio,.image.is-3by1 img,.image.is-3by2 .has-ratio,.image.is-3by2 img,.image.is-3by4 .has-ratio,.image.is-3by4 img,.image.is-3by5 .has-ratio,.image.is-3by5 img,.image.is-4by3 .has-ratio,.image.is-4by3 img,.image.is-4by5 .has-ratio,.image.is-4by5 img,.image.is-5by3 .has-ratio,.image.is-5by3 img,.image.is-5by4 .has-ratio,.image.is-5by4 img,.image.is-9by16 .has-ratio,.image.is-9by16 img,.image.is-square .has-ratio,.image.is-square img,.is-overlay,.modal,.modal-background{bottom:0;left:0;position:absolute;right:0;top:0}.button,.file-cta,.file-name,.input,.pagination-ellipsis,.pagination-link,.pagination-next,.pagination-previous,.select select,.textarea{-moz-appearance:none;-webkit-appearance:none;align-items:center;border:1px solid transparent;border-radius:4px;box-shadow:none;display:inline-flex;font-size:1rem;height:2.5em;justify-content:flex-start;line-height:1.5;padding-bottom:calc(.5em - 1px);padding-left:calc(.75em - 1px);padding-right:calc(.75em - 1px);padding-top:calc(.5em - 1px);position:relative;vertical-align:top}.button:active,.button:focus,.file-cta:active,.file-cta:focus,.file-name:active,.file-name:focus,.input:active,.input:focus,.is-active.button,.is-active.file-cta,.is-active.file-name,.is-active.input,.is-active.pagination-ellipsis,.is-active.pagination-link,.is-active.pagination-next,.is-active.pagination-previous,.is-active.textarea,.is-focused.button,.is-focused.file-cta,.is-focused.file-name,.is-focused.input,.is-focused.pagination-ellipsis,.is-focused.pagination-link,.is-focused.pagination-next,.is-focused.pagination-previous,.is-focused.textarea,.pagination-ellipsis:active,.pagination-ellipsis:focus,.pagination-link:active,.pagination-link:focus,.pagination-next:active,.pagination-next:focus,.pagination-previous:active,.pagination-previous:focus,.select select.is-active,.select select.is-focused,.select select:active,.select select:focus,.textarea:active,.textarea:focus{outline:0}.button[disabled],.file-cta[disabled],.file-name[disabled],.input[disabled],.pagination-ellipsis[disabled],.pagination-link[disabled],.pagination-next[disabled],.pagination-previous[disabled],.select fieldset[disabled] select,.select select[disabled],.textarea[disabled],fieldset[disabled] .button,fieldset[disabled] .file-cta,fieldset[disabled] .file-name,fieldset[disabled] .input,fieldset[disabled] .pagination-ellipsis,fieldset[disabled] .pagination-link,fieldset[disabled] .pagination-next,fieldset[disabled] .pagination-previous,fieldset[disabled] .select select,fieldset[disabled] .textarea{cursor:not-allowed}/*! minireset.css v0.0.6 | MIT License | github.com/jgthms/minireset.css */blockquote,body,dd,dl,dt,fieldset,figure,h1,h2,h3,h4,h5,h6,hr,html,iframe,legend,li,ol,p,pre,textarea,ul{margin:0;padding:0}h1,h2,h3,h4,h5,h6{font-size:100%;font-weight:400}ul{list-style:none}button,input,select,textarea{margin:0}html{box-sizing:border-box}*,::after,::before{box-sizing:inherit}img,video{height:auto;max-width:100%}iframe{border:0}table{border-collapse:collapse;border-spacing:0}td,th{padding:0}td:not([align]),th:not([align]){text-align:inherit}html{background-color:#fff;font-size:16px;-moz-osx-font-smoothing:grayscale;-webkit-font-smoothing:antialiased;min-width:300px;overflow-x:hidden;overflow-y:scroll;text-rendering:optimizeLegibility;-webkit-text-size-adjust:100%;-moz-text-size-adjust:100%;-ms-text-size-adjust:100%;text-size-adjust:100%}article,aside,figure,footer,header,hgroup,section{display:block}body,button,input,optgroup,select,textarea{font-family:BlinkMacSystemFont,-apple-system,"Segoe UI",Roboto,Oxygen,Ubuntu,Cantarell,"Fira Sans","Droid Sans","Helvetica Neue",Helvetica,Arial,sans-serif}code,pre{-moz-osx-font-smoothing:auto;-webkit-font-smoothing:auto;font-family:monospace}body{color:#4a4a4a;font-size:1em;font-weight:400;line-height:1.5}a{color:#3273dc;cursor:pointer;text-decoration:none}a strong{color:currentColor}a:hover{color:#363636}code{background-color:#f5f5f5;color:#da1039;font-size:.875em;font-weight:400;padding:.25em .5em .25em}hr{background-color:#f5f5f5;border:none;display:block;height:2px;margin:1.5rem 0}img{height:auto;max-width:100%}input[type=checkbox],input[type=radio]{vertical-align:baseline}small{font-size:.875em}span{font-style:inherit;font-weight:inherit}strong{color:#363636;font-weight:700}fieldset{border:none}pre{-webkit-overflow-scrolling:touch;background-color:#f5f5f5;color:#4a4a4a;font-size:.875em;overflow-x:auto;padding:1.25rem 1.5rem;white-space:pre;word-wrap:normal}pre code{background-color:transparent;color:currentColor;font-size:1em;padding:0}table td,table th{vertical-align:top}table td:not([align]),table th:not([align]){text-align:inherit}table th{color:#363636}.box{background-color:#fff;border-radius:6px;box-shadow:0 .5em 1em -.125em rgba(10,10,10,.1),0 0 0 1px rgba(10,10,10,.02);color:#4a4a4a;display:block;padding:1.25rem}a.box:focus,a.box:hover{box-shadow:0 .5em 1em -.125em rgba(10,10,10,.1),0 0 0 1px #3273dc}a.box:active{box-shadow:inset 0 1px 2px rgba(10,10,10,.2),0 0 0 1px #3273dc}.button{background-color:#fff;border-color:#dbdbdb;border-width:1px;color:#363636;cursor:pointer;justify-content:center;padding-bottom:calc(.5em - 1px);padding-left:1em;padding-right:1em;padding-top:calc(.5em - 1px);text-align:center;white-space:nowrap}.button strong{color:inherit}.button .icon,.button .icon.is-large,.button .icon.is-medium,.button .icon.is-small{height:1.5em;width:1.5em}.button .icon:first-child:not(:last-child){margin-left:calc(-.5em - 1px);margin-right:.25em}.button .icon:last-child:not(:first-child){margin-left:.25em;margin-right:calc(-.5em - 1px)}.button .icon:first-child:last-child{margin-left:calc(-.5em - 1px);margin-right:calc(-.5em - 1px)}.button.is-hovered,.button:hover{border-color:#b5b5b5;color:#363636}.button.is-focused,.button:focus{border-color:#3273dc;color:#363636}.button.is-focused:not(:active),.button:focus:not(:active){box-shadow:0 0 0 .125em rgba(50,115,220,.25)}.button.is-active,.button:active{border-color:#4a4a4a;color:#363636}.button.is-text{background-color:transparent;border-color:transparent;color:#4a4a4a;text-decoration:underline}.button.is-text.is-focused,.button.is-text.is-hovered,.button.is-text:focus,.button.is-text:hover{background-color:#f5f5f5;color:#363636}.button.is-text.is-active,.button.is-text:active{background-color:#e8e8e8;color:#363636}.button.is-text[disabled],fieldset[disabled] .button.is-text{background-color:transparent;border-color:transparent;box-shadow:none}.button.is-white{background-color:#fff;border-color:transparent;color:#0a0a0a}.button.is-white.is-hovered,.button.is-white:hover{background-color:#f9f9f9;border-color:transparent;color:#0a0a0a}.button.is-white.is-focused,.button.is-white:focus{border-color:transparent;color:#0a0a0a}.button.is-white.is-focused:not(:active),.button.is-white:focus:not(:active){box-shadow:0 0 0 .125em rgba(255,255,255,.25)}.button.is-white.is-active,.button.is-white:active{background-color:#f2f2f2;border-color:transparent;color:#0a0a0a}.button.is-white[disabled],fieldset[disabled] .button.is-white{background-color:#fff;border-color:transparent;box-shadow:none}.button.is-white.is-inverted{background-color:#0a0a0a;color:#fff}.button.is-white.is-inverted.is-hovered,.button.is-white.is-inverted:hover{background-color:#000}.button.is-white.is-inverted[disabled],fieldset[disabled] .button.is-white.is-inverted{background-color:#0a0a0a;border-color:transparent;box-shadow:none;color:#fff}.button.is-white.is-loading::after{border-color:transparent transparent #0a0a0a #0a0a0a!important}.button.is-white.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-white.is-outlined.is-focused,.button.is-white.is-outlined.is-hovered,.button.is-white.is-outlined:focus,.button.is-white.is-outlined:hover{background-color:#fff;border-color:#fff;color:#0a0a0a}.button.is-white.is-outlined.is-loading::after{border-color:transparent transparent #fff #fff!important}.button.is-white.is-outlined.is-loading.is-focused::after,.button.is-white.is-outlined.is-loading.is-hovered::after,.button.is-white.is-outlined.is-loading:focus::after,.button.is-white.is-outlined.is-loading:hover::after{border-color:transparent transparent #0a0a0a #0a0a0a!important}.button.is-white.is-outlined[disabled],fieldset[disabled] .button.is-white.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-white.is-inverted.is-outlined{background-color:transparent;border-color:#0a0a0a;color:#0a0a0a}.button.is-white.is-inverted.is-outlined.is-focused,.button.is-white.is-inverted.is-outlined.is-hovered,.button.is-white.is-inverted.is-outlined:focus,.button.is-white.is-inverted.is-outlined:hover{background-color:#0a0a0a;color:#fff}.button.is-white.is-inverted.is-outlined.is-loading.is-focused::after,.button.is-white.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-white.is-inverted.is-outlined.is-loading:focus::after,.button.is-white.is-inverted.is-outlined.is-loading:hover::after{border-color:transparent transparent #fff #fff!important}.button.is-white.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-white.is-inverted.is-outlined{background-color:transparent;border-color:#0a0a0a;box-shadow:none;color:#0a0a0a}.button.is-black{background-color:#0a0a0a;border-color:transparent;color:#fff}.button.is-black.is-hovered,.button.is-black:hover{background-color:#040404;border-color:transparent;color:#fff}.button.is-black.is-focused,.button.is-black:focus{border-color:transparent;color:#fff}.button.is-black.is-focused:not(:active),.button.is-black:focus:not(:active){box-shadow:0 0 0 .125em rgba(10,10,10,.25)}.button.is-black.is-active,.button.is-black:active{background-color:#000;border-color:transparent;color:#fff}.button.is-black[disabled],fieldset[disabled] .button.is-black{background-color:#0a0a0a;border-color:transparent;box-shadow:none}.button.is-black.is-inverted{background-color:#fff;color:#0a0a0a}.button.is-black.is-inverted.is-hovered,.button.is-black.is-inverted:hover{background-color:#f2f2f2}.button.is-black.is-inverted[disabled],fieldset[disabled] .button.is-black.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#0a0a0a}.button.is-black.is-loading::after{border-color:transparent transparent #fff #fff!important}.button.is-black.is-outlined{background-color:transparent;border-color:#0a0a0a;color:#0a0a0a}.button.is-black.is-outlined.is-focused,.button.is-black.is-outlined.is-hovered,.button.is-black.is-outlined:focus,.button.is-black.is-outlined:hover{background-color:#0a0a0a;border-color:#0a0a0a;color:#fff}.button.is-black.is-outlined.is-loading::after{border-color:transparent transparent #0a0a0a #0a0a0a!important}.button.is-black.is-outlined.is-loading.is-focused::after,.button.is-black.is-outlined.is-loading.is-hovered::after,.button.is-black.is-outlined.is-loading:focus::after,.button.is-black.is-outlined.is-loading:hover::after{border-color:transparent transparent #fff #fff!important}.button.is-black.is-outlined[disabled],fieldset[disabled] .button.is-black.is-outlined{background-color:transparent;border-color:#0a0a0a;box-shadow:none;color:#0a0a0a}.button.is-black.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-black.is-inverted.is-outlined.is-focused,.button.is-black.is-inverted.is-outlined.is-hovered,.button.is-black.is-inverted.is-outlined:focus,.button.is-black.is-inverted.is-outlined:hover{background-color:#fff;color:#0a0a0a}.button.is-black.is-inverted.is-outlined.is-loading.is-focused::after,.button.is-black.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-black.is-inverted.is-outlined.is-loading:focus::after,.button.is-black.is-inverted.is-outlined.is-loading:hover::after{border-color:transparent transparent #0a0a0a #0a0a0a!important}.button.is-black.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-black.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-light{background-color:#f5f5f5;border-color:transparent;color:rgba(0,0,0,.7)}.button.is-light.is-hovered,.button.is-light:hover{background-color:#eee;border-color:transparent;color:rgba(0,0,0,.7)}.button.is-light.is-focused,.button.is-light:focus{border-color:transparent;color:rgba(0,0,0,.7)}.button.is-light.is-focused:not(:active),.button.is-light:focus:not(:active){box-shadow:0 0 0 .125em rgba(245,245,245,.25)}.button.is-light.is-active,.button.is-light:active{background-color:#e8e8e8;border-color:transparent;color:rgba(0,0,0,.7)}.button.is-light[disabled],fieldset[disabled] .button.is-light{background-color:#f5f5f5;border-color:transparent;box-shadow:none}.button.is-light.is-inverted{background-color:rgba(0,0,0,.7);color:#f5f5f5}.button.is-light.is-inverted.is-hovered,.button.is-light.is-inverted:hover{background-color:rgba(0,0,0,.7)}.button.is-light.is-inverted[disabled],fieldset[disabled] .button.is-light.is-inverted{background-color:rgba(0,0,0,.7);border-color:transparent;box-shadow:none;color:#f5f5f5}.button.is-light.is-loading::after{border-color:transparent transparent rgba(0,0,0,.7) rgba(0,0,0,.7)!important}.button.is-light.is-outlined{background-color:transparent;border-color:#f5f5f5;color:#f5f5f5}.button.is-light.is-outlined.is-focused,.button.is-light.is-outlined.is-hovered,.button.is-light.is-outlined:focus,.button.is-light.is-outlined:hover{background-color:#f5f5f5;border-color:#f5f5f5;color:rgba(0,0,0,.7)}.button.is-light.is-outlined.is-loading::after{border-color:transparent transparent #f5f5f5 #f5f5f5!important}.button.is-light.is-outlined.is-loading.is-focused::after,.button.is-light.is-outlined.is-loading.is-hovered::after,.button.is-light.is-outlined.is-loading:focus::after,.button.is-light.is-outlined.is-loading:hover::after{border-color:transparent transparent rgba(0,0,0,.7) rgba(0,0,0,.7)!important}.button.is-light.is-outlined[disabled],fieldset[disabled] .button.is-light.is-outlined{background-color:transparent;border-color:#f5f5f5;box-shadow:none;color:#f5f5f5}.button.is-light.is-inverted.is-outlined{background-color:transparent;border-color:rgba(0,0,0,.7);color:rgba(0,0,0,.7)}.button.is-light.is-inverted.is-outlined.is-focused,.button.is-light.is-inverted.is-outlined.is-hovered,.button.is-light.is-inverted.is-outlined:focus,.button.is-light.is-inverted.is-outlined:hover{background-color:rgba(0,0,0,.7);color:#f5f5f5}.button.is-light.is-inverted.is-outlined.is-loading.is-focused::after,.button.is-light.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-light.is-inverted.is-outlined.is-loading:focus::after,.button.is-light.is-inverted.is-outlined.is-loading:hover::after{border-color:transparent transparent #f5f5f5 #f5f5f5!important}.button.is-light.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-light.is-inverted.is-outlined{background-color:transparent;border-color:rgba(0,0,0,.7);box-shadow:none;color:rgba(0,0,0,.7)}.button.is-dark{background-color:#363636;border-color:transparent;color:#fff}.button.is-dark.is-hovered,.button.is-dark:hover{background-color:#2f2f2f;border-color:transparent;color:#fff}.button.is-dark.is-focused,.button.is-dark:focus{border-color:transparent;color:#fff}.button.is-dark.is-focused:not(:active),.button.is-dark:focus:not(:active){box-shadow:0 0 0 .125em rgba(54,54,54,.25)}.button.is-dark.is-active,.button.is-dark:active{background-color:#292929;border-color:transparent;color:#fff}.button.is-dark[disabled],fieldset[disabled] .button.is-dark{background-color:#363636;border-color:transparent;box-shadow:none}.button.is-dark.is-inverted{background-color:#fff;color:#363636}.button.is-dark.is-inverted.is-hovered,.button.is-dark.is-inverted:hover{background-color:#f2f2f2}.button.is-dark.is-inverted[disabled],fieldset[disabled] .button.is-dark.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#363636}.button.is-dark.is-loading::after{border-color:transparent transparent #fff #fff!important}.button.is-dark.is-outlined{background-color:transparent;border-color:#363636;color:#363636}.button.is-dark.is-outlined.is-focused,.button.is-dark.is-outlined.is-hovered,.button.is-dark.is-outlined:focus,.button.is-dark.is-outlined:hover{background-color:#363636;border-color:#363636;color:#fff}.button.is-dark.is-outlined.is-loading::after{border-color:transparent transparent #363636 #363636!important}.button.is-dark.is-outlined.is-loading.is-focused::after,.button.is-dark.is-outlined.is-loading.is-hovered::after,.button.is-dark.is-outlined.is-loading:focus::after,.button.is-dark.is-outlined.is-loading:hover::after{border-color:transparent transparent #fff #fff!important}.button.is-dark.is-outlined[disabled],fieldset[disabled] .button.is-dark.is-outlined{background-color:transparent;border-color:#363636;box-shadow:none;color:#363636}.button.is-dark.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-dark.is-inverted.is-outlined.is-focused,.button.is-dark.is-inverted.is-outlined.is-hovered,.button.is-dark.is-inverted.is-outlined:focus,.button.is-dark.is-inverted.is-outlined:hover{background-color:#fff;color:#363636}.button.is-dark.is-inverted.is-outlined.is-loading.is-focused::after,.button.is-dark.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-dark.is-inverted.is-outlined.is-loading:focus::after,.button.is-dark.is-inverted.is-outlined.is-loading:hover::after{border-color:transparent transparent #363636 #363636!important}.button.is-dark.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-dark.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-primary{background-color:#00d1b2;border-color:transparent;color:#fff}.button.is-primary.is-hovered,.button.is-primary:hover{background-color:#00c4a7;border-color:transparent;color:#fff}.button.is-primary.is-focused,.button.is-primary:focus{border-color:transparent;color:#fff}.button.is-primary.is-focused:not(:active),.button.is-primary:focus:not(:active){box-shadow:0 0 0 .125em rgba(0,209,178,.25)}.button.is-primary.is-active,.button.is-primary:active{background-color:#00b89c;border-color:transparent;color:#fff}.button.is-primary[disabled],fieldset[disabled] .button.is-primary{background-color:#00d1b2;border-color:transparent;box-shadow:none}.button.is-primary.is-inverted{background-color:#fff;color:#00d1b2}.button.is-primary.is-inverted.is-hovered,.button.is-primary.is-inverted:hover{background-color:#f2f2f2}.button.is-primary.is-inverted[disabled],fieldset[disabled] .button.is-primary.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#00d1b2}.button.is-primary.is-loading::after{border-color:transparent transparent #fff #fff!important}.button.is-primary.is-outlined{background-color:transparent;border-color:#00d1b2;color:#00d1b2}.button.is-primary.is-outlined.is-focused,.button.is-primary.is-outlined.is-hovered,.button.is-primary.is-outlined:focus,.button.is-primary.is-outlined:hover{background-color:#00d1b2;border-color:#00d1b2;color:#fff}.button.is-primary.is-outlined.is-loading::after{border-color:transparent transparent #00d1b2 #00d1b2!important}.button.is-primary.is-outlined.is-loading.is-focused::after,.button.is-primary.is-outlined.is-loading.is-hovered::after,.button.is-primary.is-outlined.is-loading:focus::after,.button.is-primary.is-outlined.is-loading:hover::after{border-color:transparent transparent #fff #fff!important}.button.is-primary.is-outlined[disabled],fieldset[disabled] .button.is-primary.is-outlined{background-color:transparent;border-color:#00d1b2;box-shadow:none;color:#00d1b2}.button.is-primary.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-primary.is-inverted.is-outlined.is-focused,.button.is-primary.is-inverted.is-outlined.is-hovered,.button.is-primary.is-inverted.is-outlined:focus,.button.is-primary.is-inverted.is-outlined:hover{background-color:#fff;color:#00d1b2}.button.is-primary.is-inverted.is-outlined.is-loading.is-focused::after,.button.is-primary.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-primary.is-inverted.is-outlined.is-loading:focus::after,.button.is-primary.is-inverted.is-outlined.is-loading:hover::after{border-color:transparent transparent #00d1b2 #00d1b2!important}.button.is-primary.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-primary.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-primary.is-light{background-color:#ebfffc;color:#00947e}.button.is-primary.is-light.is-hovered,.button.is-primary.is-light:hover{background-color:#defffa;border-color:transparent;color:#00947e}.button.is-primary.is-light.is-active,.button.is-primary.is-light:active{background-color:#d1fff8;border-color:transparent;color:#00947e}.button.is-link{background-color:#3273dc;border-color:transparent;color:#fff}.button.is-link.is-hovered,.button.is-link:hover{background-color:#276cda;border-color:transparent;color:#fff}.button.is-link.is-focused,.button.is-link:focus{border-color:transparent;color:#fff}.button.is-link.is-focused:not(:active),.button.is-link:focus:not(:active){box-shadow:0 0 0 .125em rgba(50,115,220,.25)}.button.is-link.is-active,.button.is-link:active{background-color:#2366d1;border-color:transparent;color:#fff}.button.is-link[disabled],fieldset[disabled] .button.is-link{background-color:#3273dc;border-color:transparent;box-shadow:none}.button.is-link.is-inverted{background-color:#fff;color:#3273dc}.button.is-link.is-inverted.is-hovered,.button.is-link.is-inverted:hover{background-color:#f2f2f2}.button.is-link.is-inverted[disabled],fieldset[disabled] .button.is-link.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#3273dc}.button.is-link.is-loading::after{border-color:transparent transparent #fff #fff!important}.button.is-link.is-outlined{background-color:transparent;border-color:#3273dc;color:#3273dc}.button.is-link.is-outlined.is-focused,.button.is-link.is-outlined.is-hovered,.button.is-link.is-outlined:focus,.button.is-link.is-outlined:hover{background-color:#3273dc;border-color:#3273dc;color:#fff}.button.is-link.is-outlined.is-loading::after{border-color:transparent transparent #3273dc #3273dc!important}.button.is-link.is-outlined.is-loading.is-focused::after,.button.is-link.is-outlined.is-loading.is-hovered::after,.button.is-link.is-outlined.is-loading:focus::after,.button.is-link.is-outlined.is-loading:hover::after{border-color:transparent transparent #fff #fff!important}.button.is-link.is-outlined[disabled],fieldset[disabled] .button.is-link.is-outlined{background-color:transparent;border-color:#3273dc;box-shadow:none;color:#3273dc}.button.is-link.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-link.is-inverted.is-outlined.is-focused,.button.is-link.is-inverted.is-outlined.is-hovered,.button.is-link.is-inverted.is-outlined:focus,.button.is-link.is-inverted.is-outlined:hover{background-color:#fff;color:#3273dc}.button.is-link.is-inverted.is-outlined.is-loading.is-focused::after,.button.is-link.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-link.is-inverted.is-outlined.is-loading:focus::after,.button.is-link.is-inverted.is-outlined.is-loading:hover::after{border-color:transparent transparent #3273dc #3273dc!important}.button.is-link.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-link.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-link.is-light{background-color:#eef3fc;color:#2160c4}.button.is-link.is-light.is-hovered,.button.is-link.is-light:hover{background-color:#e3ecfa;border-color:transparent;color:#2160c4}.button.is-link.is-light.is-active,.button.is-link.is-light:active{background-color:#d8e4f8;border-color:transparent;color:#2160c4}.button.is-info{background-color:#3298dc;border-color:transparent;color:#fff}.button.is-info.is-hovered,.button.is-info:hover{background-color:#2793da;border-color:transparent;color:#fff}.button.is-info.is-focused,.button.is-info:focus{border-color:transparent;color:#fff}.button.is-info.is-focused:not(:active),.button.is-info:focus:not(:active){box-shadow:0 0 0 .125em rgba(50,152,220,.25)}.button.is-info.is-active,.button.is-info:active{background-color:#238cd1;border-color:transparent;color:#fff}.button.is-info[disabled],fieldset[disabled] .button.is-info{background-color:#3298dc;border-color:transparent;box-shadow:none}.button.is-info.is-inverted{background-color:#fff;color:#3298dc}.button.is-info.is-inverted.is-hovered,.button.is-info.is-inverted:hover{background-color:#f2f2f2}.button.is-info.is-inverted[disabled],fieldset[disabled] .button.is-info.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#3298dc}.button.is-info.is-loading::after{border-color:transparent transparent #fff #fff!important}.button.is-info.is-outlined{background-color:transparent;border-color:#3298dc;color:#3298dc}.button.is-info.is-outlined.is-focused,.button.is-info.is-outlined.is-hovered,.button.is-info.is-outlined:focus,.button.is-info.is-outlined:hover{background-color:#3298dc;border-color:#3298dc;color:#fff}.button.is-info.is-outlined.is-loading::after{border-color:transparent transparent #3298dc #3298dc!important}.button.is-info.is-outlined.is-loading.is-focused::after,.button.is-info.is-outlined.is-loading.is-hovered::after,.button.is-info.is-outlined.is-loading:focus::after,.button.is-info.is-outlined.is-loading:hover::after{border-color:transparent transparent #fff #fff!important}.button.is-info.is-outlined[disabled],fieldset[disabled] .button.is-info.is-outlined{background-color:transparent;border-color:#3298dc;box-shadow:none;color:#3298dc}.button.is-info.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-info.is-inverted.is-outlined.is-focused,.button.is-info.is-inverted.is-outlined.is-hovered,.button.is-info.is-inverted.is-outlined:focus,.button.is-info.is-inverted.is-outlined:hover{background-color:#fff;color:#3298dc}.button.is-info.is-inverted.is-outlined.is-loading.is-focused::after,.button.is-info.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-info.is-inverted.is-outlined.is-loading:focus::after,.button.is-info.is-inverted.is-outlined.is-loading:hover::after{border-color:transparent transparent #3298dc #3298dc!important}.button.is-info.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-info.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-info.is-light{background-color:#eef6fc;color:#1d72aa}.button.is-info.is-light.is-hovered,.button.is-info.is-light:hover{background-color:#e3f1fa;border-color:transparent;color:#1d72aa}.button.is-info.is-light.is-active,.button.is-info.is-light:active{background-color:#d8ebf8;border-color:transparent;color:#1d72aa}.button.is-success{background-color:#48c774;border-color:transparent;color:#fff}.button.is-success.is-hovered,.button.is-success:hover{background-color:#3ec46d;border-color:transparent;color:#fff}.button.is-success.is-focused,.button.is-success:focus{border-color:transparent;color:#fff}.button.is-success.is-focused:not(:active),.button.is-success:focus:not(:active){box-shadow:0 0 0 .125em rgba(72,199,116,.25)}.button.is-success.is-active,.button.is-success:active{background-color:#3abb67;border-color:transparent;color:#fff}.button.is-success[disabled],fieldset[disabled] .button.is-success{background-color:#48c774;border-color:transparent;box-shadow:none}.button.is-success.is-inverted{background-color:#fff;color:#48c774}.button.is-success.is-inverted.is-hovered,.button.is-success.is-inverted:hover{background-color:#f2f2f2}.button.is-success.is-inverted[disabled],fieldset[disabled] .button.is-success.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#48c774}.button.is-success.is-loading::after{border-color:transparent transparent #fff #fff!important}.button.is-success.is-outlined{background-color:transparent;border-color:#48c774;color:#48c774}.button.is-success.is-outlined.is-focused,.button.is-success.is-outlined.is-hovered,.button.is-success.is-outlined:focus,.button.is-success.is-outlined:hover{background-color:#48c774;border-color:#48c774;color:#fff}.button.is-success.is-outlined.is-loading::after{border-color:transparent transparent #48c774 #48c774!important}.button.is-success.is-outlined.is-loading.is-focused::after,.button.is-success.is-outlined.is-loading.is-hovered::after,.button.is-success.is-outlined.is-loading:focus::after,.button.is-success.is-outlined.is-loading:hover::after{border-color:transparent transparent #fff #fff!important}.button.is-success.is-outlined[disabled],fieldset[disabled] .button.is-success.is-outlined{background-color:transparent;border-color:#48c774;box-shadow:none;color:#48c774}.button.is-success.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-success.is-inverted.is-outlined.is-focused,.button.is-success.is-inverted.is-outlined.is-hovered,.button.is-success.is-inverted.is-outlined:focus,.button.is-success.is-inverted.is-outlined:hover{background-color:#fff;color:#48c774}.button.is-success.is-inverted.is-outlined.is-loading.is-focused::after,.button.is-success.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-success.is-inverted.is-outlined.is-loading:focus::after,.button.is-success.is-inverted.is-outlined.is-loading:hover::after{border-color:transparent transparent #48c774 #48c774!important}.button.is-success.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-success.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-success.is-light{background-color:#effaf3;color:#257942}.button.is-success.is-light.is-hovered,.button.is-success.is-light:hover{background-color:#e6f7ec;border-color:transparent;color:#257942}.button.is-success.is-light.is-active,.button.is-success.is-light:active{background-color:#dcf4e4;border-color:transparent;color:#257942}.button.is-warning{background-color:#ffdd57;border-color:transparent;color:rgba(0,0,0,.7)}.button.is-warning.is-hovered,.button.is-warning:hover{background-color:#ffdb4a;border-color:transparent;color:rgba(0,0,0,.7)}.button.is-warning.is-focused,.button.is-warning:focus{border-color:transparent;color:rgba(0,0,0,.7)}.button.is-warning.is-focused:not(:active),.button.is-warning:focus:not(:active){box-shadow:0 0 0 .125em rgba(255,221,87,.25)}.button.is-warning.is-active,.button.is-warning:active{background-color:#ffd83d;border-color:transparent;color:rgba(0,0,0,.7)}.button.is-warning[disabled],fieldset[disabled] .button.is-warning{background-color:#ffdd57;border-color:transparent;box-shadow:none}.button.is-warning.is-inverted{background-color:rgba(0,0,0,.7);color:#ffdd57}.button.is-warning.is-inverted.is-hovered,.button.is-warning.is-inverted:hover{background-color:rgba(0,0,0,.7)}.button.is-warning.is-inverted[disabled],fieldset[disabled] .button.is-warning.is-inverted{background-color:rgba(0,0,0,.7);border-color:transparent;box-shadow:none;color:#ffdd57}.button.is-warning.is-loading::after{border-color:transparent transparent rgba(0,0,0,.7) rgba(0,0,0,.7)!important}.button.is-warning.is-outlined{background-color:transparent;border-color:#ffdd57;color:#ffdd57}.button.is-warning.is-outlined.is-focused,.button.is-warning.is-outlined.is-hovered,.button.is-warning.is-outlined:focus,.button.is-warning.is-outlined:hover{background-color:#ffdd57;border-color:#ffdd57;color:rgba(0,0,0,.7)}.button.is-warning.is-outlined.is-loading::after{border-color:transparent transparent #ffdd57 #ffdd57!important}.button.is-warning.is-outlined.is-loading.is-focused::after,.button.is-warning.is-outlined.is-loading.is-hovered::after,.button.is-warning.is-outlined.is-loading:focus::after,.button.is-warning.is-outlined.is-loading:hover::after{border-color:transparent transparent rgba(0,0,0,.7) rgba(0,0,0,.7)!important}.button.is-warning.is-outlined[disabled],fieldset[disabled] .button.is-warning.is-outlined{background-color:transparent;border-color:#ffdd57;box-shadow:none;color:#ffdd57}.button.is-warning.is-inverted.is-outlined{background-color:transparent;border-color:rgba(0,0,0,.7);color:rgba(0,0,0,.7)}.button.is-warning.is-inverted.is-outlined.is-focused,.button.is-warning.is-inverted.is-outlined.is-hovered,.button.is-warning.is-inverted.is-outlined:focus,.button.is-warning.is-inverted.is-outlined:hover{background-color:rgba(0,0,0,.7);color:#ffdd57}.button.is-warning.is-inverted.is-outlined.is-loading.is-focused::after,.button.is-warning.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-warning.is-inverted.is-outlined.is-loading:focus::after,.button.is-warning.is-inverted.is-outlined.is-loading:hover::after{border-color:transparent transparent #ffdd57 #ffdd57!important}.button.is-warning.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-warning.is-inverted.is-outlined{background-color:transparent;border-color:rgba(0,0,0,.7);box-shadow:none;color:rgba(0,0,0,.7)}.button.is-warning.is-light{background-color:#fffbeb;color:#947600}.button.is-warning.is-light.is-hovered,.button.is-warning.is-light:hover{background-color:#fff8de;border-color:transparent;color:#947600}.button.is-warning.is-light.is-active,.button.is-warning.is-light:active{background-color:#fff6d1;border-color:transparent;color:#947600}.button.is-danger{background-color:#f14668;border-color:transparent;color:#fff}.button.is-danger.is-hovered,.button.is-danger:hover{background-color:#f03a5f;border-color:transparent;color:#fff}.button.is-danger.is-focused,.button.is-danger:focus{border-color:transparent;color:#fff}.button.is-danger.is-focused:not(:active),.button.is-danger:focus:not(:active){box-shadow:0 0 0 .125em rgba(241,70,104,.25)}.button.is-danger.is-active,.button.is-danger:active{background-color:#ef2e55;border-color:transparent;color:#fff}.button.is-danger[disabled],fieldset[disabled] .button.is-danger{background-color:#f14668;border-color:transparent;box-shadow:none}.button.is-danger.is-inverted{background-color:#fff;color:#f14668}.button.is-danger.is-inverted.is-hovered,.button.is-danger.is-inverted:hover{background-color:#f2f2f2}.button.is-danger.is-inverted[disabled],fieldset[disabled] .button.is-danger.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#f14668}.button.is-danger.is-loading::after{border-color:transparent transparent #fff #fff!important}.button.is-danger.is-outlined{background-color:transparent;border-color:#f14668;color:#f14668}.button.is-danger.is-outlined.is-focused,.button.is-danger.is-outlined.is-hovered,.button.is-danger.is-outlined:focus,.button.is-danger.is-outlined:hover{background-color:#f14668;border-color:#f14668;color:#fff}.button.is-danger.is-outlined.is-loading::after{border-color:transparent transparent #f14668 #f14668!important}.button.is-danger.is-outlined.is-loading.is-focused::after,.button.is-danger.is-outlined.is-loading.is-hovered::after,.button.is-danger.is-outlined.is-loading:focus::after,.button.is-danger.is-outlined.is-loading:hover::after{border-color:transparent transparent #fff #fff!important}.button.is-danger.is-outlined[disabled],fieldset[disabled] .button.is-danger.is-outlined{background-color:transparent;border-color:#f14668;box-shadow:none;color:#f14668}.button.is-danger.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-danger.is-inverted.is-outlined.is-focused,.button.is-danger.is-inverted.is-outlined.is-hovered,.button.is-danger.is-inverted.is-outlined:focus,.button.is-danger.is-inverted.is-outlined:hover{background-color:#fff;color:#f14668}.button.is-danger.is-inverted.is-outlined.is-loading.is-focused::after,.button.is-danger.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-danger.is-inverted.is-outlined.is-loading:focus::after,.button.is-danger.is-inverted.is-outlined.is-loading:hover::after{border-color:transparent transparent #f14668 #f14668!important}.button.is-danger.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-danger.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-danger.is-light{background-color:#feecf0;color:#cc0f35}.button.is-danger.is-light.is-hovered,.button.is-danger.is-light:hover{background-color:#fde0e6;border-color:transparent;color:#cc0f35}.button.is-danger.is-light.is-active,.button.is-danger.is-light:active{background-color:#fcd4dc;border-color:transparent;color:#cc0f35}.button.is-small{border-radius:2px;font-size:.75rem}.button.is-normal{font-size:1rem}.button.is-medium{font-size:1.25rem}.button.is-large{font-size:1.5rem}.button[disabled],fieldset[disabled] .button{background-color:#fff;border-color:#dbdbdb;box-shadow:none;opacity:.5}.button.is-fullwidth{display:flex;width:100%}.button.is-loading{color:transparent!important;pointer-events:none}.button.is-loading::after{position:absolute;left:calc(50% - (1em / 2));top:calc(50% - (1em / 2));position:absolute!important}.button.is-static{background-color:#f5f5f5;border-color:#dbdbdb;color:#7a7a7a;box-shadow:none;pointer-events:none}.button.is-rounded{border-radius:290486px;padding-left:calc(1em + .25em);padding-right:calc(1em + .25em)}.buttons{align-items:center;display:flex;flex-wrap:wrap;justify-content:flex-start}.buttons .button{margin-bottom:.5rem}.buttons .button:not(:last-child):not(.is-fullwidth){margin-right:.5rem}.buttons:last-child{margin-bottom:-.5rem}.buttons:not(:last-child){margin-bottom:1rem}.buttons.are-small .button:not(.is-normal):not(.is-medium):not(.is-large){border-radius:2px;font-size:.75rem}.buttons.are-medium .button:not(.is-small):not(.is-normal):not(.is-large){font-size:1.25rem}.buttons.are-large .button:not(.is-small):not(.is-normal):not(.is-medium){font-size:1.5rem}.buttons.has-addons .button:not(:first-child){border-bottom-left-radius:0;border-top-left-radius:0}.buttons.has-addons .button:not(:last-child){border-bottom-right-radius:0;border-top-right-radius:0;margin-right:-1px}.buttons.has-addons .button:last-child{margin-right:0}.buttons.has-addons .button.is-hovered,.buttons.has-addons .button:hover{z-index:2}.buttons.has-addons .button.is-active,.buttons.has-addons .button.is-focused,.buttons.has-addons .button.is-selected,.buttons.has-addons .button:active,.buttons.has-addons .button:focus{z-index:3}.buttons.has-addons .button.is-active:hover,.buttons.has-addons .button.is-focused:hover,.buttons.has-addons .button.is-selected:hover,.buttons.has-addons .button:active:hover,.buttons.has-addons .button:focus:hover{z-index:4}.buttons.has-addons .button.is-expanded{flex-grow:1;flex-shrink:1}.buttons.is-centered{justify-content:center}.buttons.is-centered:not(.has-addons) .button:not(.is-fullwidth){margin-left:.25rem;margin-right:.25rem}.buttons.is-right{justify-content:flex-end}.buttons.is-right:not(.has-addons) .button:not(.is-fullwidth){margin-left:.25rem;margin-right:.25rem}.container{flex-grow:1;margin:0 auto;position:relative;width:auto}.container.is-fluid{max-width:none!important;padding-left:32px;padding-right:32px;width:100%}@media screen and (min-width:1024px){.container{max-width:960px}}@media screen and (max-width:1215px){.container.is-widescreen:not(.is-max-desktop){max-width:1152px}}@media screen and (max-width:1407px){.container.is-fullhd:not(.is-max-desktop):not(.is-max-widescreen){max-width:1344px}}@media screen and (min-width:1216px){.container:not(.is-max-desktop){max-width:1152px}}@media screen and (min-width:1408px){.container:not(.is-max-desktop):not(.is-max-widescreen){max-width:1344px}}.content li+li{margin-top:.25em}.content blockquote:not(:last-child),.content dl:not(:last-child),.content ol:not(:last-child),.content p:not(:last-child),.content pre:not(:last-child),.content table:not(:last-child),.content ul:not(:last-child){margin-bottom:1em}.content h1,.content h2,.content h3,.content h4,.content h5,.content h6{color:#363636;font-weight:600;line-height:1.125}.content h1{font-size:2em;margin-bottom:.5em}.content h1:not(:first-child){margin-top:1em}.content h2{font-size:1.75em;margin-bottom:.5714em}.content h2:not(:first-child){margin-top:1.1428em}.content h3{font-size:1.5em;margin-bottom:.6666em}.content h3:not(:first-child){margin-top:1.3333em}.content h4{font-size:1.25em;margin-bottom:.8em}.content h5{font-size:1.125em;margin-bottom:.8888em}.content h6{font-size:1em;margin-bottom:1em}.content blockquote{background-color:#f5f5f5;border-left:5px solid #dbdbdb;padding:1.25em 1.5em}.content ol{list-style-position:outside;margin-left:2em;margin-top:1em}.content ol:not([type]){list-style-type:decimal}.content ol:not([type]).is-lower-alpha{list-style-type:lower-alpha}.content ol:not([type]).is-lower-roman{list-style-type:lower-roman}.content ol:not([type]).is-upper-alpha{list-style-type:upper-alpha}.content ol:not([type]).is-upper-roman{list-style-type:upper-roman}.content ul{list-style:disc outside;margin-left:2em;margin-top:1em}.content ul ul{list-style-type:circle;margin-top:.5em}.content ul ul ul{list-style-type:square}.content dd{margin-left:2em}.content figure{margin-left:2em;margin-right:2em;text-align:center}.content figure:not(:first-child){margin-top:2em}.content figure:not(:last-child){margin-bottom:2em}.content figure img{display:inline-block}.content figure figcaption{font-style:italic}.content pre{-webkit-overflow-scrolling:touch;overflow-x:auto;padding:1.25em 1.5em;white-space:pre;word-wrap:normal}.content sub,.content sup{font-size:75%}.content table{width:100%}.content table td,.content table th{border:1px solid #dbdbdb;border-width:0 0 1px;padding:.5em .75em;vertical-align:top}.content table th{color:#363636}.content table th:not([align]){text-align:inherit}.content table thead td,.content table thead th{border-width:0 0 2px;color:#363636}.content table tfoot td,.content table tfoot th{border-width:2px 0 0;color:#363636}.content table tbody tr:last-child td,.content table tbody tr:last-child th{border-bottom-width:0}.content .tabs li+li{margin-top:0}.content.is-small{font-size:.75rem}.content.is-medium{font-size:1.25rem}.content.is-large{font-size:1.5rem}.icon{align-items:center;display:inline-flex;justify-content:center;height:1.5rem;width:1.5rem}.icon.is-small{height:1rem;width:1rem}.icon.is-medium{height:2rem;width:2rem}.icon.is-large{height:3rem;width:3rem}.image{display:block;position:relative}.image img{display:block;height:auto;width:100%}.image img.is-rounded{border-radius:290486px}.image.is-fullwidth{width:100%}.image.is-16by9 .has-ratio,.image.is-16by9 img,.image.is-1by1 .has-ratio,.image.is-1by1 img,.image.is-1by2 .has-ratio,.image.is-1by2 img,.image.is-1by3 .has-ratio,.image.is-1by3 img,.image.is-2by1 .has-ratio,.image.is-2by1 img,.image.is-2by3 .has-ratio,.image.is-2by3 img,.image.is-3by1 .has-ratio,.image.is-3by1 img,.image.is-3by2 .has-ratio,.image.is-3by2 img,.image.is-3by4 .has-ratio,.image.is-3by4 img,.image.is-3by5 .has-ratio,.image.is-3by5 img,.image.is-4by3 .has-ratio,.image.is-4by3 img,.image.is-4by5 .has-ratio,.image.is-4by5 img,.image.is-5by3 .has-ratio,.image.is-5by3 img,.image.is-5by4 .has-ratio,.image.is-5by4 img,.image.is-9by16 .has-ratio,.image.is-9by16 img,.image.is-square .has-ratio,.image.is-square img{height:100%;width:100%}.image.is-1by1,.image.is-square{padding-top:100%}.image.is-5by4{padding-top:80%}.image.is-4by3{padding-top:75%}.image.is-3by2{padding-top:66.6666%}.image.is-5by3{padding-top:60%}.image.is-16by9{padding-top:56.25%}.image.is-2by1{padding-top:50%}.image.is-3by1{padding-top:33.3333%}.image.is-4by5{padding-top:125%}.image.is-3by4{padding-top:133.3333%}.image.is-2by3{padding-top:150%}.image.is-3by5{padding-top:166.6666%}.image.is-9by16{padding-top:177.7777%}.image.is-1by2{padding-top:200%}.image.is-1by3{padding-top:300%}.image.is-16x16{height:16px;width:16px}.image.is-24x24{height:24px;width:24px}.image.is-32x32{height:32px;width:32px}.image.is-48x48{height:48px;width:48px}.image.is-64x64{height:64px;width:64px}.image.is-96x96{height:96px;width:96px}.image.is-128x128{height:128px;width:128px}.notification{background-color:#f5f5f5;border-radius:4px;position:relative;padding:1.25rem 2.5rem 1.25rem 1.5rem}.notification a:not(.button):not(.dropdown-item){color:currentColor;text-decoration:underline}.notification strong{color:currentColor}.notification code,.notification pre{background:#fff}.notification pre code{background:0 0}.notification>.delete{right:.5rem;position:absolute;top:.5rem}.notification .content,.notification .subtitle,.notification .title{color:currentColor}.notification.is-white{background-color:#fff;color:#0a0a0a}.notification.is-black{background-color:#0a0a0a;color:#fff}.notification.is-light{background-color:#f5f5f5;color:rgba(0,0,0,.7)}.notification.is-dark{background-color:#363636;color:#fff}.notification.is-primary{background-color:#00d1b2;color:#fff}.notification.is-primary.is-light{background-color:#ebfffc;color:#00947e}.notification.is-link{background-color:#3273dc;color:#fff}.notification.is-link.is-light{background-color:#eef3fc;color:#2160c4}.notification.is-info{background-color:#3298dc;color:#fff}.notification.is-info.is-light{background-color:#eef6fc;color:#1d72aa}.notification.is-success{background-color:#48c774;color:#fff}.notification.is-success.is-light{background-color:#effaf3;color:#257942}.notification.is-warning{background-color:#ffdd57;color:rgba(0,0,0,.7)}.notification.is-warning.is-light{background-color:#fffbeb;color:#947600}.notification.is-danger{background-color:#f14668;color:#fff}.notification.is-danger.is-light{background-color:#feecf0;color:#cc0f35}.progress{-moz-appearance:none;-webkit-appearance:none;border:none;border-radius:290486px;display:block;height:1rem;overflow:hidden;padding:0;width:100%}.progress::-webkit-progress-bar{background-color:#ededed}.progress::-webkit-progress-value{background-color:#4a4a4a}.progress::-moz-progress-bar{background-color:#4a4a4a}.progress::-ms-fill{background-color:#4a4a4a;border:none}.progress.is-white::-webkit-progress-value{background-color:#fff}.progress.is-white::-moz-progress-bar{background-color:#fff}.progress.is-white::-ms-fill{background-color:#fff}.progress.is-white:indeterminate{background-image:linear-gradient(to right,#fff 30%,#ededed 30%)}.progress.is-black::-webkit-progress-value{background-color:#0a0a0a}.progress.is-black::-moz-progress-bar{background-color:#0a0a0a}.progress.is-black::-ms-fill{background-color:#0a0a0a}.progress.is-black:indeterminate{background-image:linear-gradient(to right,#0a0a0a 30%,#ededed 30%)}.progress.is-light::-webkit-progress-value{background-color:#f5f5f5}.progress.is-light::-moz-progress-bar{background-color:#f5f5f5}.progress.is-light::-ms-fill{background-color:#f5f5f5}.progress.is-light:indeterminate{background-image:linear-gradient(to right,#f5f5f5 30%,#ededed 30%)}.progress.is-dark::-webkit-progress-value{background-color:#363636}.progress.is-dark::-moz-progress-bar{background-color:#363636}.progress.is-dark::-ms-fill{background-color:#363636}.progress.is-dark:indeterminate{background-image:linear-gradient(to right,#363636 30%,#ededed 30%)}.progress.is-primary::-webkit-progress-value{background-color:#00d1b2}.progress.is-primary::-moz-progress-bar{background-color:#00d1b2}.progress.is-primary::-ms-fill{background-color:#00d1b2}.progress.is-primary:indeterminate{background-image:linear-gradient(to right,#00d1b2 30%,#ededed 30%)}.progress.is-link::-webkit-progress-value{background-color:#3273dc}.progress.is-link::-moz-progress-bar{background-color:#3273dc}.progress.is-link::-ms-fill{background-color:#3273dc}.progress.is-link:indeterminate{background-image:linear-gradient(to right,#3273dc 30%,#ededed 30%)}.progress.is-info::-webkit-progress-value{background-color:#3298dc}.progress.is-info::-moz-progress-bar{background-color:#3298dc}.progress.is-info::-ms-fill{background-color:#3298dc}.progress.is-info:indeterminate{background-image:linear-gradient(to right,#3298dc 30%,#ededed 30%)}.progress.is-success::-webkit-progress-value{background-color:#48c774}.progress.is-success::-moz-progress-bar{background-color:#48c774}.progress.is-success::-ms-fill{background-color:#48c774}.progress.is-success:indeterminate{background-image:linear-gradient(to right,#48c774 30%,#ededed 30%)}.progress.is-warning::-webkit-progress-value{background-color:#ffdd57}.progress.is-warning::-moz-progress-bar{background-color:#ffdd57}.progress.is-warning::-ms-fill{background-color:#ffdd57}.progress.is-warning:indeterminate{background-image:linear-gradient(to right,#ffdd57 30%,#ededed 30%)}.progress.is-danger::-webkit-progress-value{background-color:#f14668}.progress.is-danger::-moz-progress-bar{background-color:#f14668}.progress.is-danger::-ms-fill{background-color:#f14668}.progress.is-danger:indeterminate{background-image:linear-gradient(to right,#f14668 30%,#ededed 30%)}.progress:indeterminate{-webkit-animation-duration:1.5s;animation-duration:1.5s;-webkit-animation-iteration-count:infinite;animation-iteration-count:infinite;-webkit-animation-name:moveIndeterminate;animation-name:moveIndeterminate;-webkit-animation-timing-function:linear;animation-timing-function:linear;background-color:#ededed;background-image:linear-gradient(to right,#4a4a4a 30%,#ededed 30%);background-position:top left;background-repeat:no-repeat;background-size:150% 150%}.progress:indeterminate::-webkit-progress-bar{background-color:transparent}.progress:indeterminate::-moz-progress-bar{background-color:transparent}.progress:indeterminate::-ms-fill{animation-name:none}.progress.is-small{height:.75rem}.progress.is-medium{height:1.25rem}.progress.is-large{height:1.5rem}@-webkit-keyframes moveIndeterminate{from{background-position:200% 0}to{background-position:-200% 0}}@keyframes moveIndeterminate{from{background-position:200% 0}to{background-position:-200% 0}}.table{background-color:#fff;color:#363636}.table td,.table th{border:1px solid #dbdbdb;border-width:0 0 1px;padding:.5em .75em;vertical-align:top}.table td.is-white,.table th.is-white{background-color:#fff;border-color:#fff;color:#0a0a0a}.table td.is-black,.table th.is-black{background-color:#0a0a0a;border-color:#0a0a0a;color:#fff}.table td.is-light,.table th.is-light{background-color:#f5f5f5;border-color:#f5f5f5;color:rgba(0,0,0,.7)}.table td.is-dark,.table th.is-dark{background-color:#363636;border-color:#363636;color:#fff}.table td.is-primary,.table th.is-primary{background-color:#00d1b2;border-color:#00d1b2;color:#fff}.table td.is-link,.table th.is-link{background-color:#3273dc;border-color:#3273dc;color:#fff}.table td.is-info,.table th.is-info{background-color:#3298dc;border-color:#3298dc;color:#fff}.table td.is-success,.table th.is-success{background-color:#48c774;border-color:#48c774;color:#fff}.table td.is-warning,.table th.is-warning{background-color:#ffdd57;border-color:#ffdd57;color:rgba(0,0,0,.7)}.table td.is-danger,.table th.is-danger{background-color:#f14668;border-color:#f14668;color:#fff}.table td.is-narrow,.table th.is-narrow{white-space:nowrap;width:1%}.table td.is-selected,.table th.is-selected{background-color:#00d1b2;color:#fff}.table td.is-selected a,.table td.is-selected strong,.table th.is-selected a,.table th.is-selected strong{color:currentColor}.table td.is-vcentered,.table th.is-vcentered{vertical-align:middle}.table th{color:#363636}.table th:not([align]){text-align:inherit}.table tr.is-selected{background-color:#00d1b2;color:#fff}.table tr.is-selected a,.table tr.is-selected strong{color:currentColor}.table tr.is-selected td,.table tr.is-selected th{border-color:#fff;color:currentColor}.table thead{background-color:transparent}.table thead td,.table thead th{border-width:0 0 2px;color:#363636}.table tfoot{background-color:transparent}.table tfoot td,.table tfoot th{border-width:2px 0 0;color:#363636}.table tbody{background-color:transparent}.table tbody tr:last-child td,.table tbody tr:last-child th{border-bottom-width:0}.table.is-bordered td,.table.is-bordered th{border-width:1px}.table.is-bordered tr:last-child td,.table.is-bordered tr:last-child th{border-bottom-width:1px}.table.is-fullwidth{width:100%}.table.is-hoverable tbody tr:not(.is-selected):hover{background-color:#fafafa}.table.is-hoverable.is-striped tbody tr:not(.is-selected):hover{background-color:#fafafa}.table.is-hoverable.is-striped tbody tr:not(.is-selected):hover:nth-child(even){background-color:#f5f5f5}.table.is-narrow td,.table.is-narrow th{padding:.25em .5em}.table.is-striped tbody tr:not(.is-selected):nth-child(even){background-color:#fafafa}.table-container{-webkit-overflow-scrolling:touch;overflow:auto;overflow-y:hidden;max-width:100%}.tags{align-items:center;display:flex;flex-wrap:wrap;justify-content:flex-start}.tags .tag{margin-bottom:.5rem}.tags .tag:not(:last-child){margin-right:.5rem}.tags:last-child{margin-bottom:-.5rem}.tags:not(:last-child){margin-bottom:1rem}.tags.are-medium .tag:not(.is-normal):not(.is-large){font-size:1rem}.tags.are-large .tag:not(.is-normal):not(.is-medium){font-size:1.25rem}.tags.is-centered{justify-content:center}.tags.is-centered .tag{margin-right:.25rem;margin-left:.25rem}.tags.is-right{justify-content:flex-end}.tags.is-right .tag:not(:first-child){margin-left:.5rem}.tags.is-right .tag:not(:last-child){margin-right:0}.tags.has-addons .tag{margin-right:0}.tags.has-addons .tag:not(:first-child){margin-left:0;border-top-left-radius:0;border-bottom-left-radius:0}.tags.has-addons .tag:not(:last-child){border-top-right-radius:0;border-bottom-right-radius:0}.tag:not(body){align-items:center;background-color:#f5f5f5;border-radius:4px;color:#4a4a4a;display:inline-flex;font-size:.75rem;height:2em;justify-content:center;line-height:1.5;padding-left:.75em;padding-right:.75em;white-space:nowrap}.tag:not(body) .delete{margin-left:.25rem;margin-right:-.375rem}.tag:not(body).is-white{background-color:#fff;color:#0a0a0a}.tag:not(body).is-black{background-color:#0a0a0a;color:#fff}.tag:not(body).is-light{background-color:#f5f5f5;color:rgba(0,0,0,.7)}.tag:not(body).is-dark{background-color:#363636;color:#fff}.tag:not(body).is-primary{background-color:#00d1b2;color:#fff}.tag:not(body).is-primary.is-light{background-color:#ebfffc;color:#00947e}.tag:not(body).is-link{background-color:#3273dc;color:#fff}.tag:not(body).is-link.is-light{background-color:#eef3fc;color:#2160c4}.tag:not(body).is-info{background-color:#3298dc;color:#fff}.tag:not(body).is-info.is-light{background-color:#eef6fc;color:#1d72aa}.tag:not(body).is-success{background-color:#48c774;color:#fff}.tag:not(body).is-success.is-light{background-color:#effaf3;color:#257942}.tag:not(body).is-warning{background-color:#ffdd57;color:rgba(0,0,0,.7)}.tag:not(body).is-warning.is-light{background-color:#fffbeb;color:#947600}.tag:not(body).is-danger{background-color:#f14668;color:#fff}.tag:not(body).is-danger.is-light{background-color:#feecf0;color:#cc0f35}.tag:not(body).is-normal{font-size:.75rem}.tag:not(body).is-medium{font-size:1rem}.tag:not(body).is-large{font-size:1.25rem}.tag:not(body) .icon:first-child:not(:last-child){margin-left:-.375em;margin-right:.1875em}.tag:not(body) .icon:last-child:not(:first-child){margin-left:.1875em;margin-right:-.375em}.tag:not(body) .icon:first-child:last-child{margin-left:-.375em;margin-right:-.375em}.tag:not(body).is-delete{margin-left:1px;padding:0;position:relative;width:2em}.tag:not(body).is-delete::after,.tag:not(body).is-delete::before{background-color:currentColor;content:"";display:block;left:50%;position:absolute;top:50%;transform:translateX(-50%) translateY(-50%) rotate(45deg);transform-origin:center center}.tag:not(body).is-delete::before{height:1px;width:50%}.tag:not(body).is-delete::after{height:50%;width:1px}.tag:not(body).is-delete:focus,.tag:not(body).is-delete:hover{background-color:#e8e8e8}.tag:not(body).is-delete:active{background-color:#dbdbdb}.tag:not(body).is-rounded{border-radius:290486px}a.tag:hover{text-decoration:underline}.subtitle,.title{word-break:break-word}.subtitle em,.subtitle span,.title em,.title span{font-weight:inherit}.subtitle sub,.title sub{font-size:.75em}.subtitle sup,.title sup{font-size:.75em}.subtitle .tag,.title .tag{vertical-align:middle}.title{color:#363636;font-size:2rem;font-weight:600;line-height:1.125}.title strong{color:inherit;font-weight:inherit}.title+.highlight{margin-top:-.75rem}.title:not(.is-spaced)+.subtitle{margin-top:-1.25rem}.title.is-1{font-size:3rem}.title.is-2{font-size:2.5rem}.title.is-3{font-size:2rem}.title.is-4{font-size:1.5rem}.title.is-5{font-size:1.25rem}.title.is-6{font-size:1rem}.title.is-7{font-size:.75rem}.subtitle{color:#4a4a4a;font-size:1.25rem;font-weight:400;line-height:1.25}.subtitle strong{color:#363636;font-weight:600}.subtitle:not(.is-spaced)+.title{margin-top:-1.25rem}.subtitle.is-1{font-size:3rem}.subtitle.is-2{font-size:2.5rem}.subtitle.is-3{font-size:2rem}.subtitle.is-4{font-size:1.5rem}.subtitle.is-5{font-size:1.25rem}.subtitle.is-6{font-size:1rem}.subtitle.is-7{font-size:.75rem}.heading{display:block;font-size:11px;letter-spacing:1px;margin-bottom:5px;text-transform:uppercase}.highlight{font-weight:400;max-width:100%;overflow:hidden;padding:0}.highlight pre{overflow:auto;max-width:100%}.number{align-items:center;background-color:#f5f5f5;border-radius:290486px;display:inline-flex;font-size:1.25rem;height:2em;justify-content:center;margin-right:1.5rem;min-width:2.5em;padding:.25rem .5rem;text-align:center;vertical-align:top}.input,.select select,.textarea{background-color:#fff;border-color:#dbdbdb;border-radius:4px;color:#363636}.input::-moz-placeholder,.select select::-moz-placeholder,.textarea::-moz-placeholder{color:rgba(54,54,54,.3)}.input::-webkit-input-placeholder,.select select::-webkit-input-placeholder,.textarea::-webkit-input-placeholder{color:rgba(54,54,54,.3)}.input:-moz-placeholder,.select select:-moz-placeholder,.textarea:-moz-placeholder{color:rgba(54,54,54,.3)}.input:-ms-input-placeholder,.select select:-ms-input-placeholder,.textarea:-ms-input-placeholder{color:rgba(54,54,54,.3)}.input:hover,.is-hovered.input,.is-hovered.textarea,.select select.is-hovered,.select select:hover,.textarea:hover{border-color:#b5b5b5}.input:active,.input:focus,.is-active.input,.is-active.textarea,.is-focused.input,.is-focused.textarea,.select select.is-active,.select select.is-focused,.select select:active,.select select:focus,.textarea:active,.textarea:focus{border-color:#3273dc;box-shadow:0 0 0 .125em rgba(50,115,220,.25)}.input[disabled],.select fieldset[disabled] select,.select select[disabled],.textarea[disabled],fieldset[disabled] .input,fieldset[disabled] .select select,fieldset[disabled] .textarea{background-color:#f5f5f5;border-color:#f5f5f5;box-shadow:none;color:#7a7a7a}.input[disabled]::-moz-placeholder,.select fieldset[disabled] select::-moz-placeholder,.select select[disabled]::-moz-placeholder,.textarea[disabled]::-moz-placeholder,fieldset[disabled] .input::-moz-placeholder,fieldset[disabled] .select select::-moz-placeholder,fieldset[disabled] .textarea::-moz-placeholder{color:rgba(122,122,122,.3)}.input[disabled]::-webkit-input-placeholder,.select fieldset[disabled] select::-webkit-input-placeholder,.select select[disabled]::-webkit-input-placeholder,.textarea[disabled]::-webkit-input-placeholder,fieldset[disabled] .input::-webkit-input-placeholder,fieldset[disabled] .select select::-webkit-input-placeholder,fieldset[disabled] .textarea::-webkit-input-placeholder{color:rgba(122,122,122,.3)}.input[disabled]:-moz-placeholder,.select fieldset[disabled] select:-moz-placeholder,.select select[disabled]:-moz-placeholder,.textarea[disabled]:-moz-placeholder,fieldset[disabled] .input:-moz-placeholder,fieldset[disabled] .select select:-moz-placeholder,fieldset[disabled] .textarea:-moz-placeholder{color:rgba(122,122,122,.3)}.input[disabled]:-ms-input-placeholder,.select fieldset[disabled] select:-ms-input-placeholder,.select select[disabled]:-ms-input-placeholder,.textarea[disabled]:-ms-input-placeholder,fieldset[disabled] .input:-ms-input-placeholder,fieldset[disabled] .select select:-ms-input-placeholder,fieldset[disabled] .textarea:-ms-input-placeholder{color:rgba(122,122,122,.3)}.input,.textarea{box-shadow:inset 0 .0625em .125em rgba(10,10,10,.05);max-width:100%;width:100%}.input[readonly],.textarea[readonly]{box-shadow:none}.is-white.input,.is-white.textarea{border-color:#fff}.is-white.input:active,.is-white.input:focus,.is-white.is-active.input,.is-white.is-active.textarea,.is-white.is-focused.input,.is-white.is-focused.textarea,.is-white.textarea:active,.is-white.textarea:focus{box-shadow:0 0 0 .125em rgba(255,255,255,.25)}.is-black.input,.is-black.textarea{border-color:#0a0a0a}.is-black.input:active,.is-black.input:focus,.is-black.is-active.input,.is-black.is-active.textarea,.is-black.is-focused.input,.is-black.is-focused.textarea,.is-black.textarea:active,.is-black.textarea:focus{box-shadow:0 0 0 .125em rgba(10,10,10,.25)}.is-light.input,.is-light.textarea{border-color:#f5f5f5}.is-light.input:active,.is-light.input:focus,.is-light.is-active.input,.is-light.is-active.textarea,.is-light.is-focused.input,.is-light.is-focused.textarea,.is-light.textarea:active,.is-light.textarea:focus{box-shadow:0 0 0 .125em rgba(245,245,245,.25)}.is-dark.input,.is-dark.textarea{border-color:#363636}.is-dark.input:active,.is-dark.input:focus,.is-dark.is-active.input,.is-dark.is-active.textarea,.is-dark.is-focused.input,.is-dark.is-focused.textarea,.is-dark.textarea:active,.is-dark.textarea:focus{box-shadow:0 0 0 .125em rgba(54,54,54,.25)}.is-primary.input,.is-primary.textarea{border-color:#00d1b2}.is-primary.input:active,.is-primary.input:focus,.is-primary.is-active.input,.is-primary.is-active.textarea,.is-primary.is-focused.input,.is-primary.is-focused.textarea,.is-primary.textarea:active,.is-primary.textarea:focus{box-shadow:0 0 0 .125em rgba(0,209,178,.25)}.is-link.input,.is-link.textarea{border-color:#3273dc}.is-link.input:active,.is-link.input:focus,.is-link.is-active.input,.is-link.is-active.textarea,.is-link.is-focused.input,.is-link.is-focused.textarea,.is-link.textarea:active,.is-link.textarea:focus{box-shadow:0 0 0 .125em rgba(50,115,220,.25)}.is-info.input,.is-info.textarea{border-color:#3298dc}.is-info.input:active,.is-info.input:focus,.is-info.is-active.input,.is-info.is-active.textarea,.is-info.is-focused.input,.is-info.is-focused.textarea,.is-info.textarea:active,.is-info.textarea:focus{box-shadow:0 0 0 .125em rgba(50,152,220,.25)}.is-success.input,.is-success.textarea{border-color:#48c774}.is-success.input:active,.is-success.input:focus,.is-success.is-active.input,.is-success.is-active.textarea,.is-success.is-focused.input,.is-success.is-focused.textarea,.is-success.textarea:active,.is-success.textarea:focus{box-shadow:0 0 0 .125em rgba(72,199,116,.25)}.is-warning.input,.is-warning.textarea{border-color:#ffdd57}.is-warning.input:active,.is-warning.input:focus,.is-warning.is-active.input,.is-warning.is-active.textarea,.is-warning.is-focused.input,.is-warning.is-focused.textarea,.is-warning.textarea:active,.is-warning.textarea:focus{box-shadow:0 0 0 .125em rgba(255,221,87,.25)}.is-danger.input,.is-danger.textarea{border-color:#f14668}.is-danger.input:active,.is-danger.input:focus,.is-danger.is-active.input,.is-danger.is-active.textarea,.is-danger.is-focused.input,.is-danger.is-focused.textarea,.is-danger.textarea:active,.is-danger.textarea:focus{box-shadow:0 0 0 .125em rgba(241,70,104,.25)}.is-small.input,.is-small.textarea{border-radius:2px;font-size:.75rem}.is-medium.input,.is-medium.textarea{font-size:1.25rem}.is-large.input,.is-large.textarea{font-size:1.5rem}.is-fullwidth.input,.is-fullwidth.textarea{display:block;width:100%}.is-inline.input,.is-inline.textarea{display:inline;width:auto}.input.is-rounded{border-radius:290486px;padding-left:calc(calc(.75em - 1px) + .375em);padding-right:calc(calc(.75em - 1px) + .375em)}.input.is-static{background-color:transparent;border-color:transparent;box-shadow:none;padding-left:0;padding-right:0}.textarea{display:block;max-width:100%;min-width:100%;padding:calc(.75em - 1px);resize:vertical}.textarea:not([rows]){max-height:40em;min-height:8em}.textarea[rows]{height:initial}.textarea.has-fixed-size{resize:none}.checkbox,.radio{cursor:pointer;display:inline-block;line-height:1.25;position:relative}.checkbox input,.radio input{cursor:pointer}.checkbox:hover,.radio:hover{color:#363636}.checkbox input[disabled],.checkbox[disabled],.radio input[disabled],.radio[disabled],fieldset[disabled] .checkbox,fieldset[disabled] .radio{color:#7a7a7a;cursor:not-allowed}.radio+.radio{margin-left:.5em}.select{display:inline-block;max-width:100%;position:relative;vertical-align:top}.select:not(.is-multiple){height:2.5em}.select:not(.is-multiple):not(.is-loading)::after{border-color:#3273dc;right:1.125em;z-index:4}.select.is-rounded select{border-radius:290486px;padding-left:1em}.select select{cursor:pointer;display:block;font-size:1em;max-width:100%;outline:0}.select select::-ms-expand{display:none}.select select[disabled]:hover,fieldset[disabled] .select select:hover{border-color:#f5f5f5}.select select:not([multiple]){padding-right:2.5em}.select select[multiple]{height:auto;padding:0}.select select[multiple] option{padding:.5em 1em}.select:not(.is-multiple):not(.is-loading):hover::after{border-color:#363636}.select.is-white:not(:hover)::after{border-color:#fff}.select.is-white select{border-color:#fff}.select.is-white select.is-hovered,.select.is-white select:hover{border-color:#f2f2f2}.select.is-white select.is-active,.select.is-white select.is-focused,.select.is-white select:active,.select.is-white select:focus{box-shadow:0 0 0 .125em rgba(255,255,255,.25)}.select.is-black:not(:hover)::after{border-color:#0a0a0a}.select.is-black select{border-color:#0a0a0a}.select.is-black select.is-hovered,.select.is-black select:hover{border-color:#000}.select.is-black select.is-active,.select.is-black select.is-focused,.select.is-black select:active,.select.is-black select:focus{box-shadow:0 0 0 .125em rgba(10,10,10,.25)}.select.is-light:not(:hover)::after{border-color:#f5f5f5}.select.is-light select{border-color:#f5f5f5}.select.is-light select.is-hovered,.select.is-light select:hover{border-color:#e8e8e8}.select.is-light select.is-active,.select.is-light select.is-focused,.select.is-light select:active,.select.is-light select:focus{box-shadow:0 0 0 .125em rgba(245,245,245,.25)}.select.is-dark:not(:hover)::after{border-color:#363636}.select.is-dark select{border-color:#363636}.select.is-dark select.is-hovered,.select.is-dark select:hover{border-color:#292929}.select.is-dark select.is-active,.select.is-dark select.is-focused,.select.is-dark select:active,.select.is-dark select:focus{box-shadow:0 0 0 .125em rgba(54,54,54,.25)}.select.is-primary:not(:hover)::after{border-color:#00d1b2}.select.is-primary select{border-color:#00d1b2}.select.is-primary select.is-hovered,.select.is-primary select:hover{border-color:#00b89c}.select.is-primary select.is-active,.select.is-primary select.is-focused,.select.is-primary select:active,.select.is-primary select:focus{box-shadow:0 0 0 .125em rgba(0,209,178,.25)}.select.is-link:not(:hover)::after{border-color:#3273dc}.select.is-link select{border-color:#3273dc}.select.is-link select.is-hovered,.select.is-link select:hover{border-color:#2366d1}.select.is-link select.is-active,.select.is-link select.is-focused,.select.is-link select:active,.select.is-link select:focus{box-shadow:0 0 0 .125em rgba(50,115,220,.25)}.select.is-info:not(:hover)::after{border-color:#3298dc}.select.is-info select{border-color:#3298dc}.select.is-info select.is-hovered,.select.is-info select:hover{border-color:#238cd1}.select.is-info select.is-active,.select.is-info select.is-focused,.select.is-info select:active,.select.is-info select:focus{box-shadow:0 0 0 .125em rgba(50,152,220,.25)}.select.is-success:not(:hover)::after{border-color:#48c774}.select.is-success select{border-color:#48c774}.select.is-success select.is-hovered,.select.is-success select:hover{border-color:#3abb67}.select.is-success select.is-active,.select.is-success select.is-focused,.select.is-success select:active,.select.is-success select:focus{box-shadow:0 0 0 .125em rgba(72,199,116,.25)}.select.is-warning:not(:hover)::after{border-color:#ffdd57}.select.is-warning select{border-color:#ffdd57}.select.is-warning select.is-hovered,.select.is-warning select:hover{border-color:#ffd83d}.select.is-warning select.is-active,.select.is-warning select.is-focused,.select.is-warning select:active,.select.is-warning select:focus{box-shadow:0 0 0 .125em rgba(255,221,87,.25)}.select.is-danger:not(:hover)::after{border-color:#f14668}.select.is-danger select{border-color:#f14668}.select.is-danger select.is-hovered,.select.is-danger select:hover{border-color:#ef2e55}.select.is-danger select.is-active,.select.is-danger select.is-focused,.select.is-danger select:active,.select.is-danger select:focus{box-shadow:0 0 0 .125em rgba(241,70,104,.25)}.select.is-small{border-radius:2px;font-size:.75rem}.select.is-medium{font-size:1.25rem}.select.is-large{font-size:1.5rem}.select.is-disabled::after{border-color:#7a7a7a}.select.is-fullwidth{width:100%}.select.is-fullwidth select{width:100%}.select.is-loading::after{margin-top:0;position:absolute;right:.625em;top:.625em;transform:none}.select.is-loading.is-small:after{font-size:.75rem}.select.is-loading.is-medium:after{font-size:1.25rem}.select.is-loading.is-large:after{font-size:1.5rem}.file{align-items:stretch;display:flex;justify-content:flex-start;position:relative}.file.is-white .file-cta{background-color:#fff;border-color:transparent;color:#0a0a0a}.file.is-white.is-hovered .file-cta,.file.is-white:hover .file-cta{background-color:#f9f9f9;border-color:transparent;color:#0a0a0a}.file.is-white.is-focused .file-cta,.file.is-white:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(255,255,255,.25);color:#0a0a0a}.file.is-white.is-active .file-cta,.file.is-white:active .file-cta{background-color:#f2f2f2;border-color:transparent;color:#0a0a0a}.file.is-black .file-cta{background-color:#0a0a0a;border-color:transparent;color:#fff}.file.is-black.is-hovered .file-cta,.file.is-black:hover .file-cta{background-color:#040404;border-color:transparent;color:#fff}.file.is-black.is-focused .file-cta,.file.is-black:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(10,10,10,.25);color:#fff}.file.is-black.is-active .file-cta,.file.is-black:active .file-cta{background-color:#000;border-color:transparent;color:#fff}.file.is-light .file-cta{background-color:#f5f5f5;border-color:transparent;color:rgba(0,0,0,.7)}.file.is-light.is-hovered .file-cta,.file.is-light:hover .file-cta{background-color:#eee;border-color:transparent;color:rgba(0,0,0,.7)}.file.is-light.is-focused .file-cta,.file.is-light:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(245,245,245,.25);color:rgba(0,0,0,.7)}.file.is-light.is-active .file-cta,.file.is-light:active .file-cta{background-color:#e8e8e8;border-color:transparent;color:rgba(0,0,0,.7)}.file.is-dark .file-cta{background-color:#363636;border-color:transparent;color:#fff}.file.is-dark.is-hovered .file-cta,.file.is-dark:hover .file-cta{background-color:#2f2f2f;border-color:transparent;color:#fff}.file.is-dark.is-focused .file-cta,.file.is-dark:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(54,54,54,.25);color:#fff}.file.is-dark.is-active .file-cta,.file.is-dark:active .file-cta{background-color:#292929;border-color:transparent;color:#fff}.file.is-primary .file-cta{background-color:#00d1b2;border-color:transparent;color:#fff}.file.is-primary.is-hovered .file-cta,.file.is-primary:hover .file-cta{background-color:#00c4a7;border-color:transparent;color:#fff}.file.is-primary.is-focused .file-cta,.file.is-primary:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(0,209,178,.25);color:#fff}.file.is-primary.is-active .file-cta,.file.is-primary:active .file-cta{background-color:#00b89c;border-color:transparent;color:#fff}.file.is-link .file-cta{background-color:#3273dc;border-color:transparent;color:#fff}.file.is-link.is-hovered .file-cta,.file.is-link:hover .file-cta{background-color:#276cda;border-color:transparent;color:#fff}.file.is-link.is-focused .file-cta,.file.is-link:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(50,115,220,.25);color:#fff}.file.is-link.is-active .file-cta,.file.is-link:active .file-cta{background-color:#2366d1;border-color:transparent;color:#fff}.file.is-info .file-cta{background-color:#3298dc;border-color:transparent;color:#fff}.file.is-info.is-hovered .file-cta,.file.is-info:hover .file-cta{background-color:#2793da;border-color:transparent;color:#fff}.file.is-info.is-focused .file-cta,.file.is-info:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(50,152,220,.25);color:#fff}.file.is-info.is-active .file-cta,.file.is-info:active .file-cta{background-color:#238cd1;border-color:transparent;color:#fff}.file.is-success .file-cta{background-color:#48c774;border-color:transparent;color:#fff}.file.is-success.is-hovered .file-cta,.file.is-success:hover .file-cta{background-color:#3ec46d;border-color:transparent;color:#fff}.file.is-success.is-focused .file-cta,.file.is-success:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(72,199,116,.25);color:#fff}.file.is-success.is-active .file-cta,.file.is-success:active .file-cta{background-color:#3abb67;border-color:transparent;color:#fff}.file.is-warning .file-cta{background-color:#ffdd57;border-color:transparent;color:rgba(0,0,0,.7)}.file.is-warning.is-hovered .file-cta,.file.is-warning:hover .file-cta{background-color:#ffdb4a;border-color:transparent;color:rgba(0,0,0,.7)}.file.is-warning.is-focused .file-cta,.file.is-warning:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(255,221,87,.25);color:rgba(0,0,0,.7)}.file.is-warning.is-active .file-cta,.file.is-warning:active .file-cta{background-color:#ffd83d;border-color:transparent;color:rgba(0,0,0,.7)}.file.is-danger .file-cta{background-color:#f14668;border-color:transparent;color:#fff}.file.is-danger.is-hovered .file-cta,.file.is-danger:hover .file-cta{background-color:#f03a5f;border-color:transparent;color:#fff}.file.is-danger.is-focused .file-cta,.file.is-danger:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(241,70,104,.25);color:#fff}.file.is-danger.is-active .file-cta,.file.is-danger:active .file-cta{background-color:#ef2e55;border-color:transparent;color:#fff}.file.is-small{font-size:.75rem}.file.is-medium{font-size:1.25rem}.file.is-medium .file-icon .fa{font-size:21px}.file.is-large{font-size:1.5rem}.file.is-large .file-icon .fa{font-size:28px}.file.has-name .file-cta{border-bottom-right-radius:0;border-top-right-radius:0}.file.has-name .file-name{border-bottom-left-radius:0;border-top-left-radius:0}.file.has-name.is-empty .file-cta{border-radius:4px}.file.has-name.is-empty .file-name{display:none}.file.is-boxed .file-label{flex-direction:column}.file.is-boxed .file-cta{flex-direction:column;height:auto;padding:1em 3em}.file.is-boxed .file-name{border-width:0 1px 1px}.file.is-boxed .file-icon{height:1.5em;width:1.5em}.file.is-boxed .file-icon .fa{font-size:21px}.file.is-boxed.is-small .file-icon .fa{font-size:14px}.file.is-boxed.is-medium .file-icon .fa{font-size:28px}.file.is-boxed.is-large .file-icon .fa{font-size:35px}.file.is-boxed.has-name .file-cta{border-radius:4px 4px 0 0}.file.is-boxed.has-name .file-name{border-radius:0 0 4px 4px;border-width:0 1px 1px}.file.is-centered{justify-content:center}.file.is-fullwidth .file-label{width:100%}.file.is-fullwidth .file-name{flex-grow:1;max-width:none}.file.is-right{justify-content:flex-end}.file.is-right .file-cta{border-radius:0 4px 4px 0}.file.is-right .file-name{border-radius:4px 0 0 4px;border-width:1px 0 1px 1px;order:-1}.file-label{align-items:stretch;display:flex;cursor:pointer;justify-content:flex-start;overflow:hidden;position:relative}.file-label:hover .file-cta{background-color:#eee;color:#363636}.file-label:hover .file-name{border-color:#d5d5d5}.file-label:active .file-cta{background-color:#e8e8e8;color:#363636}.file-label:active .file-name{border-color:#cfcfcf}.file-input{height:100%;left:0;opacity:0;outline:0;position:absolute;top:0;width:100%}.file-cta,.file-name{border-color:#dbdbdb;border-radius:4px;font-size:1em;padding-left:1em;padding-right:1em;white-space:nowrap}.file-cta{background-color:#f5f5f5;color:#4a4a4a}.file-name{border-color:#dbdbdb;border-style:solid;border-width:1px 1px 1px 0;display:block;max-width:16em;overflow:hidden;text-align:inherit;text-overflow:ellipsis}.file-icon{align-items:center;display:flex;height:1em;justify-content:center;margin-right:.5em;width:1em}.file-icon .fa{font-size:14px}.label{color:#363636;display:block;font-size:1rem;font-weight:700}.label:not(:last-child){margin-bottom:.5em}.label.is-small{font-size:.75rem}.label.is-medium{font-size:1.25rem}.label.is-large{font-size:1.5rem}.help{display:block;font-size:.75rem;margin-top:.25rem}.help.is-white{color:#fff}.help.is-black{color:#0a0a0a}.help.is-light{color:#f5f5f5}.help.is-dark{color:#363636}.help.is-primary{color:#00d1b2}.help.is-link{color:#3273dc}.help.is-info{color:#3298dc}.help.is-success{color:#48c774}.help.is-warning{color:#ffdd57}.help.is-danger{color:#f14668}.field:not(:last-child){margin-bottom:.75rem}.field.has-addons{display:flex;justify-content:flex-start}.field.has-addons .control:not(:last-child){margin-right:-1px}.field.has-addons .control:not(:first-child):not(:last-child) .button,.field.has-addons .control:not(:first-child):not(:last-child) .input,.field.has-addons .control:not(:first-child):not(:last-child) .select select{border-radius:0}.field.has-addons .control:first-child:not(:only-child) .button,.field.has-addons .control:first-child:not(:only-child) .input,.field.has-addons .control:first-child:not(:only-child) .select select{border-bottom-right-radius:0;border-top-right-radius:0}.field.has-addons .control:last-child:not(:only-child) .button,.field.has-addons .control:last-child:not(:only-child) .input,.field.has-addons .control:last-child:not(:only-child) .select select{border-bottom-left-radius:0;border-top-left-radius:0}.field.has-addons .control .button:not([disabled]).is-hovered,.field.has-addons .control .button:not([disabled]):hover,.field.has-addons .control .input:not([disabled]).is-hovered,.field.has-addons .control .input:not([disabled]):hover,.field.has-addons .control .select select:not([disabled]).is-hovered,.field.has-addons .control .select select:not([disabled]):hover{z-index:2}.field.has-addons .control .button:not([disabled]).is-active,.field.has-addons .control .button:not([disabled]).is-focused,.field.has-addons .control .button:not([disabled]):active,.field.has-addons .control .button:not([disabled]):focus,.field.has-addons .control .input:not([disabled]).is-active,.field.has-addons .control .input:not([disabled]).is-focused,.field.has-addons .control .input:not([disabled]):active,.field.has-addons .control .input:not([disabled]):focus,.field.has-addons .control .select select:not([disabled]).is-active,.field.has-addons .control .select select:not([disabled]).is-focused,.field.has-addons .control .select select:not([disabled]):active,.field.has-addons .control .select select:not([disabled]):focus{z-index:3}.field.has-addons .control .button:not([disabled]).is-active:hover,.field.has-addons .control .button:not([disabled]).is-focused:hover,.field.has-addons .control .button:not([disabled]):active:hover,.field.has-addons .control .button:not([disabled]):focus:hover,.field.has-addons .control .input:not([disabled]).is-active:hover,.field.has-addons .control .input:not([disabled]).is-focused:hover,.field.has-addons .control .input:not([disabled]):active:hover,.field.has-addons .control .input:not([disabled]):focus:hover,.field.has-addons .control .select select:not([disabled]).is-active:hover,.field.has-addons .control .select select:not([disabled]).is-focused:hover,.field.has-addons .control .select select:not([disabled]):active:hover,.field.has-addons .control .select select:not([disabled]):focus:hover{z-index:4}.field.has-addons .control.is-expanded{flex-grow:1;flex-shrink:1}.field.has-addons.has-addons-centered{justify-content:center}.field.has-addons.has-addons-right{justify-content:flex-end}.field.has-addons.has-addons-fullwidth .control{flex-grow:1;flex-shrink:0}.field.is-grouped{display:flex;justify-content:flex-start}.field.is-grouped>.control{flex-shrink:0}.field.is-grouped>.control:not(:last-child){margin-bottom:0;margin-right:.75rem}.field.is-grouped>.control.is-expanded{flex-grow:1;flex-shrink:1}.field.is-grouped.is-grouped-centered{justify-content:center}.field.is-grouped.is-grouped-right{justify-content:flex-end}.field.is-grouped.is-grouped-multiline{flex-wrap:wrap}.field.is-grouped.is-grouped-multiline>.control:last-child,.field.is-grouped.is-grouped-multiline>.control:not(:last-child){margin-bottom:.75rem}.field.is-grouped.is-grouped-multiline:last-child{margin-bottom:-.75rem}.field.is-grouped.is-grouped-multiline:not(:last-child){margin-bottom:0}@media screen and (min-width:769px),print{.field.is-horizontal{display:flex}}.field-label .label{font-size:inherit}@media screen and (max-width:768px){.field-label{margin-bottom:.5rem}}@media screen and (min-width:769px),print{.field-label{flex-basis:0;flex-grow:1;flex-shrink:0;margin-right:1.5rem;text-align:right}.field-label.is-small{font-size:.75rem;padding-top:.375em}.field-label.is-normal{padding-top:.375em}.field-label.is-medium{font-size:1.25rem;padding-top:.375em}.field-label.is-large{font-size:1.5rem;padding-top:.375em}}.field-body .field .field{margin-bottom:0}@media screen and (min-width:769px),print{.field-body{display:flex;flex-basis:0;flex-grow:5;flex-shrink:1}.field-body .field{margin-bottom:0}.field-body>.field{flex-shrink:1}.field-body>.field:not(.is-narrow){flex-grow:1}.field-body>.field:not(:last-child){margin-right:.75rem}}.control{box-sizing:border-box;clear:both;font-size:1rem;position:relative;text-align:inherit}.control.has-icons-left .input:focus~.icon,.control.has-icons-left .select:focus~.icon,.control.has-icons-right .input:focus~.icon,.control.has-icons-right .select:focus~.icon{color:#4a4a4a}.control.has-icons-left .input.is-small~.icon,.control.has-icons-left .select.is-small~.icon,.control.has-icons-right .input.is-small~.icon,.control.has-icons-right .select.is-small~.icon{font-size:.75rem}.control.has-icons-left .input.is-medium~.icon,.control.has-icons-left .select.is-medium~.icon,.control.has-icons-right .input.is-medium~.icon,.control.has-icons-right .select.is-medium~.icon{font-size:1.25rem}.control.has-icons-left .input.is-large~.icon,.control.has-icons-left .select.is-large~.icon,.control.has-icons-right .input.is-large~.icon,.control.has-icons-right .select.is-large~.icon{font-size:1.5rem}.control.has-icons-left .icon,.control.has-icons-right .icon{color:#dbdbdb;height:2.5em;pointer-events:none;position:absolute;top:0;width:2.5em;z-index:4}.control.has-icons-left .input,.control.has-icons-left .select select{padding-left:2.5em}.control.has-icons-left .icon.is-left{left:0}.control.has-icons-right .input,.control.has-icons-right .select select{padding-right:2.5em}.control.has-icons-right .icon.is-right{right:0}.control.is-loading::after{position:absolute!important;right:.625em;top:.625em;z-index:4}.control.is-loading.is-small:after{font-size:.75rem}.control.is-loading.is-medium:after{font-size:1.25rem}.control.is-loading.is-large:after{font-size:1.5rem}.breadcrumb{font-size:1rem;white-space:nowrap}.breadcrumb a{align-items:center;color:#3273dc;display:flex;justify-content:center;padding:0 .75em}.breadcrumb a:hover{color:#363636}.breadcrumb li{align-items:center;display:flex}.breadcrumb li:first-child a{padding-left:0}.breadcrumb li.is-active a{color:#363636;cursor:default;pointer-events:none}.breadcrumb li+li::before{color:#b5b5b5;content:"\0002f"}.breadcrumb ol,.breadcrumb ul{align-items:flex-start;display:flex;flex-wrap:wrap;justify-content:flex-start}.breadcrumb .icon:first-child{margin-right:.5em}.breadcrumb .icon:last-child{margin-left:.5em}.breadcrumb.is-centered ol,.breadcrumb.is-centered ul{justify-content:center}.breadcrumb.is-right ol,.breadcrumb.is-right ul{justify-content:flex-end}.breadcrumb.is-small{font-size:.75rem}.breadcrumb.is-medium{font-size:1.25rem}.breadcrumb.is-large{font-size:1.5rem}.breadcrumb.has-arrow-separator li+li::before{content:"\02192"}.breadcrumb.has-bullet-separator li+li::before{content:"\02022"}.breadcrumb.has-dot-separator li+li::before{content:"\000b7"}.breadcrumb.has-succeeds-separator li+li::before{content:"\0227B"}.card{background-color:#fff;border-radius:.25rem;box-shadow:0 .5em 1em -.125em rgba(10,10,10,.1),0 0 0 1px rgba(10,10,10,.02);color:#4a4a4a;max-width:100%;overflow:hidden;position:relative}.card-header{background-color:transparent;align-items:stretch;box-shadow:0 .125em .25em rgba(10,10,10,.1);display:flex}.card-header-title{align-items:center;color:#363636;display:flex;flex-grow:1;font-weight:700;padding:.75rem 1rem}.card-header-title.is-centered{justify-content:center}.card-header-icon{align-items:center;cursor:pointer;display:flex;justify-content:center;padding:.75rem 1rem}.card-image{display:block;position:relative}.card-content{background-color:transparent;padding:1.5rem}.card-footer{background-color:transparent;border-top:1px solid #ededed;align-items:stretch;display:flex}.card-footer-item{align-items:center;display:flex;flex-basis:0;flex-grow:1;flex-shrink:0;justify-content:center;padding:.75rem}.card-footer-item:not(:last-child){border-right:1px solid #ededed}.card .media:not(:last-child){margin-bottom:1.5rem}.dropdown{display:inline-flex;position:relative;vertical-align:top}.dropdown.is-active .dropdown-menu,.dropdown.is-hoverable:hover .dropdown-menu{display:block}.dropdown.is-right .dropdown-menu{left:auto;right:0}.dropdown.is-up .dropdown-menu{bottom:100%;padding-bottom:4px;padding-top:initial;top:auto}.dropdown-menu{display:none;left:0;min-width:12rem;padding-top:4px;position:absolute;top:100%;z-index:20}.dropdown-content{background-color:#fff;border-radius:4px;box-shadow:0 .5em 1em -.125em rgba(10,10,10,.1),0 0 0 1px rgba(10,10,10,.02);padding-bottom:.5rem;padding-top:.5rem}.dropdown-item{color:#4a4a4a;display:block;font-size:.875rem;line-height:1.5;padding:.375rem 1rem;position:relative}a.dropdown-item,button.dropdown-item{padding-right:3rem;text-align:inherit;white-space:nowrap;width:100%}a.dropdown-item:hover,button.dropdown-item:hover{background-color:#f5f5f5;color:#0a0a0a}a.dropdown-item.is-active,button.dropdown-item.is-active{background-color:#3273dc;color:#fff}.dropdown-divider{background-color:#ededed;border:none;display:block;height:1px;margin:.5rem 0}.level{align-items:center;justify-content:space-between}.level code{border-radius:4px}.level img{display:inline-block;vertical-align:top}.level.is-mobile{display:flex}.level.is-mobile .level-left,.level.is-mobile .level-right{display:flex}.level.is-mobile .level-left+.level-right{margin-top:0}.level.is-mobile .level-item:not(:last-child){margin-bottom:0;margin-right:.75rem}.level.is-mobile .level-item:not(.is-narrow){flex-grow:1}@media screen and (min-width:769px),print{.level{display:flex}.level>.level-item:not(.is-narrow){flex-grow:1}}.level-item{align-items:center;display:flex;flex-basis:auto;flex-grow:0;flex-shrink:0;justify-content:center}.level-item .subtitle,.level-item .title{margin-bottom:0}@media screen and (max-width:768px){.level-item:not(:last-child){margin-bottom:.75rem}}.level-left,.level-right{flex-basis:auto;flex-grow:0;flex-shrink:0}.level-left .level-item.is-flexible,.level-right .level-item.is-flexible{flex-grow:1}@media screen and (min-width:769px),print{.level-left .level-item:not(:last-child),.level-right .level-item:not(:last-child){margin-right:.75rem}}.level-left{align-items:center;justify-content:flex-start}@media screen and (max-width:768px){.level-left+.level-right{margin-top:1.5rem}}@media screen and (min-width:769px),print{.level-left{display:flex}}.level-right{align-items:center;justify-content:flex-end}@media screen and (min-width:769px),print{.level-right{display:flex}}.media{align-items:flex-start;display:flex;text-align:inherit}.media .content:not(:last-child){margin-bottom:.75rem}.media .media{border-top:1px solid rgba(219,219,219,.5);display:flex;padding-top:.75rem}.media .media .content:not(:last-child),.media .media .control:not(:last-child){margin-bottom:.5rem}.media .media .media{padding-top:.5rem}.media .media .media+.media{margin-top:.5rem}.media+.media{border-top:1px solid rgba(219,219,219,.5);margin-top:1rem;padding-top:1rem}.media.is-large+.media{margin-top:1.5rem;padding-top:1.5rem}.media-left,.media-right{flex-basis:auto;flex-grow:0;flex-shrink:0}.media-left{margin-right:1rem}.media-right{margin-left:1rem}.media-content{flex-basis:auto;flex-grow:1;flex-shrink:1;text-align:inherit}@media screen and (max-width:768px){.media-content{overflow-x:auto}}.menu{font-size:1rem}.menu.is-small{font-size:.75rem}.menu.is-medium{font-size:1.25rem}.menu.is-large{font-size:1.5rem}.menu-list{line-height:1.25}.menu-list a{border-radius:2px;color:#4a4a4a;display:block;padding:.5em .75em}.menu-list a:hover{background-color:#f5f5f5;color:#363636}.menu-list a.is-active{background-color:#3273dc;color:#fff}.menu-list li ul{border-left:1px solid #dbdbdb;margin:.75em;padding-left:.75em}.menu-label{color:#7a7a7a;font-size:.75em;letter-spacing:.1em;text-transform:uppercase}.menu-label:not(:first-child){margin-top:1em}.menu-label:not(:last-child){margin-bottom:1em}.message{background-color:#f5f5f5;border-radius:4px;font-size:1rem}.message strong{color:currentColor}.message a:not(.button):not(.tag):not(.dropdown-item){color:currentColor;text-decoration:underline}.message.is-small{font-size:.75rem}.message.is-medium{font-size:1.25rem}.message.is-large{font-size:1.5rem}.message.is-white{background-color:#fff}.message.is-white .message-header{background-color:#fff;color:#0a0a0a}.message.is-white .message-body{border-color:#fff}.message.is-black{background-color:#fafafa}.message.is-black .message-header{background-color:#0a0a0a;color:#fff}.message.is-black .message-body{border-color:#0a0a0a}.message.is-light{background-color:#fafafa}.message.is-light .message-header{background-color:#f5f5f5;color:rgba(0,0,0,.7)}.message.is-light .message-body{border-color:#f5f5f5}.message.is-dark{background-color:#fafafa}.message.is-dark .message-header{background-color:#363636;color:#fff}.message.is-dark .message-body{border-color:#363636}.message.is-primary{background-color:#ebfffc}.message.is-primary .message-header{background-color:#00d1b2;color:#fff}.message.is-primary .message-body{border-color:#00d1b2;color:#00947e}.message.is-link{background-color:#eef3fc}.message.is-link .message-header{background-color:#3273dc;color:#fff}.message.is-link .message-body{border-color:#3273dc;color:#2160c4}.message.is-info{background-color:#eef6fc}.message.is-info .message-header{background-color:#3298dc;color:#fff}.message.is-info .message-body{border-color:#3298dc;color:#1d72aa}.message.is-success{background-color:#effaf3}.message.is-success .message-header{background-color:#48c774;color:#fff}.message.is-success .message-body{border-color:#48c774;color:#257942}.message.is-warning{background-color:#fffbeb}.message.is-warning .message-header{background-color:#ffdd57;color:rgba(0,0,0,.7)}.message.is-warning .message-body{border-color:#ffdd57;color:#947600}.message.is-danger{background-color:#feecf0}.message.is-danger .message-header{background-color:#f14668;color:#fff}.message.is-danger .message-body{border-color:#f14668;color:#cc0f35}.message-header{align-items:center;background-color:#4a4a4a;border-radius:4px 4px 0 0;color:#fff;display:flex;font-weight:700;justify-content:space-between;line-height:1.25;padding:.75em 1em;position:relative}.message-header .delete{flex-grow:0;flex-shrink:0;margin-left:.75em}.message-header+.message-body{border-width:0;border-top-left-radius:0;border-top-right-radius:0}.message-body{border-color:#dbdbdb;border-radius:4px;border-style:solid;border-width:0 0 0 4px;color:#4a4a4a;padding:1.25em 1.5em}.message-body code,.message-body pre{background-color:#fff}.message-body pre code{background-color:transparent}.modal{align-items:center;display:none;flex-direction:column;justify-content:center;overflow:hidden;position:fixed;z-index:40}.modal.is-active{display:flex}.modal-background{background-color:rgba(10,10,10,.86)}.modal-card,.modal-content{margin:0 20px;max-height:calc(100vh - 160px);overflow:auto;position:relative;width:100%}@media screen and (min-width:769px){.modal-card,.modal-content{margin:0 auto;max-height:calc(100vh - 40px);width:640px}}.modal-close{background:0 0;height:40px;position:fixed;right:20px;top:20px;width:40px}.modal-card{display:flex;flex-direction:column;max-height:calc(100vh - 40px);overflow:hidden;-ms-overflow-y:visible}.modal-card-foot,.modal-card-head{align-items:center;background-color:#f5f5f5;display:flex;flex-shrink:0;justify-content:flex-start;padding:20px;position:relative}.modal-card-head{border-bottom:1px solid #dbdbdb;border-top-left-radius:6px;border-top-right-radius:6px}.modal-card-title{color:#363636;flex-grow:1;flex-shrink:0;font-size:1.5rem;line-height:1}.modal-card-foot{border-bottom-left-radius:6px;border-bottom-right-radius:6px;border-top:1px solid #dbdbdb}.modal-card-foot .button:not(:last-child){margin-right:.5em}.modal-card-body{-webkit-overflow-scrolling:touch;background-color:#fff;flex-grow:1;flex-shrink:1;overflow:auto;padding:20px}.navbar{background-color:#fff;min-height:3.25rem;position:relative;z-index:30}.navbar.is-white{background-color:#fff;color:#0a0a0a}.navbar.is-white .navbar-brand .navbar-link,.navbar.is-white .navbar-brand>.navbar-item{color:#0a0a0a}.navbar.is-white .navbar-brand .navbar-link.is-active,.navbar.is-white .navbar-brand .navbar-link:focus,.navbar.is-white .navbar-brand .navbar-link:hover,.navbar.is-white .navbar-brand>a.navbar-item.is-active,.navbar.is-white .navbar-brand>a.navbar-item:focus,.navbar.is-white .navbar-brand>a.navbar-item:hover{background-color:#f2f2f2;color:#0a0a0a}.navbar.is-white .navbar-brand .navbar-link::after{border-color:#0a0a0a}.navbar.is-white .navbar-burger{color:#0a0a0a}@media screen and (min-width:1024px){.navbar.is-white .navbar-end .navbar-link,.navbar.is-white .navbar-end>.navbar-item,.navbar.is-white .navbar-start .navbar-link,.navbar.is-white .navbar-start>.navbar-item{color:#0a0a0a}.navbar.is-white .navbar-end .navbar-link.is-active,.navbar.is-white .navbar-end .navbar-link:focus,.navbar.is-white .navbar-end .navbar-link:hover,.navbar.is-white .navbar-end>a.navbar-item.is-active,.navbar.is-white .navbar-end>a.navbar-item:focus,.navbar.is-white .navbar-end>a.navbar-item:hover,.navbar.is-white .navbar-start .navbar-link.is-active,.navbar.is-white .navbar-start .navbar-link:focus,.navbar.is-white .navbar-start .navbar-link:hover,.navbar.is-white .navbar-start>a.navbar-item.is-active,.navbar.is-white .navbar-start>a.navbar-item:focus,.navbar.is-white .navbar-start>a.navbar-item:hover{background-color:#f2f2f2;color:#0a0a0a}.navbar.is-white .navbar-end .navbar-link::after,.navbar.is-white .navbar-start .navbar-link::after{border-color:#0a0a0a}.navbar.is-white .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-white .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-white .navbar-item.has-dropdown:hover .navbar-link{background-color:#f2f2f2;color:#0a0a0a}.navbar.is-white .navbar-dropdown a.navbar-item.is-active{background-color:#fff;color:#0a0a0a}}.navbar.is-black{background-color:#0a0a0a;color:#fff}.navbar.is-black .navbar-brand .navbar-link,.navbar.is-black .navbar-brand>.navbar-item{color:#fff}.navbar.is-black .navbar-brand .navbar-link.is-active,.navbar.is-black .navbar-brand .navbar-link:focus,.navbar.is-black .navbar-brand .navbar-link:hover,.navbar.is-black .navbar-brand>a.navbar-item.is-active,.navbar.is-black .navbar-brand>a.navbar-item:focus,.navbar.is-black .navbar-brand>a.navbar-item:hover{background-color:#000;color:#fff}.navbar.is-black .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-black .navbar-burger{color:#fff}@media screen and (min-width:1024px){.navbar.is-black .navbar-end .navbar-link,.navbar.is-black .navbar-end>.navbar-item,.navbar.is-black .navbar-start .navbar-link,.navbar.is-black .navbar-start>.navbar-item{color:#fff}.navbar.is-black .navbar-end .navbar-link.is-active,.navbar.is-black .navbar-end .navbar-link:focus,.navbar.is-black .navbar-end .navbar-link:hover,.navbar.is-black .navbar-end>a.navbar-item.is-active,.navbar.is-black .navbar-end>a.navbar-item:focus,.navbar.is-black .navbar-end>a.navbar-item:hover,.navbar.is-black .navbar-start .navbar-link.is-active,.navbar.is-black .navbar-start .navbar-link:focus,.navbar.is-black .navbar-start .navbar-link:hover,.navbar.is-black .navbar-start>a.navbar-item.is-active,.navbar.is-black .navbar-start>a.navbar-item:focus,.navbar.is-black .navbar-start>a.navbar-item:hover{background-color:#000;color:#fff}.navbar.is-black .navbar-end .navbar-link::after,.navbar.is-black .navbar-start .navbar-link::after{border-color:#fff}.navbar.is-black .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-black .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-black .navbar-item.has-dropdown:hover .navbar-link{background-color:#000;color:#fff}.navbar.is-black .navbar-dropdown a.navbar-item.is-active{background-color:#0a0a0a;color:#fff}}.navbar.is-light{background-color:#f5f5f5;color:rgba(0,0,0,.7)}.navbar.is-light .navbar-brand .navbar-link,.navbar.is-light .navbar-brand>.navbar-item{color:rgba(0,0,0,.7)}.navbar.is-light .navbar-brand .navbar-link.is-active,.navbar.is-light .navbar-brand .navbar-link:focus,.navbar.is-light .navbar-brand .navbar-link:hover,.navbar.is-light .navbar-brand>a.navbar-item.is-active,.navbar.is-light .navbar-brand>a.navbar-item:focus,.navbar.is-light .navbar-brand>a.navbar-item:hover{background-color:#e8e8e8;color:rgba(0,0,0,.7)}.navbar.is-light .navbar-brand .navbar-link::after{border-color:rgba(0,0,0,.7)}.navbar.is-light .navbar-burger{color:rgba(0,0,0,.7)}@media screen and (min-width:1024px){.navbar.is-light .navbar-end .navbar-link,.navbar.is-light .navbar-end>.navbar-item,.navbar.is-light .navbar-start .navbar-link,.navbar.is-light .navbar-start>.navbar-item{color:rgba(0,0,0,.7)}.navbar.is-light .navbar-end .navbar-link.is-active,.navbar.is-light .navbar-end .navbar-link:focus,.navbar.is-light .navbar-end .navbar-link:hover,.navbar.is-light .navbar-end>a.navbar-item.is-active,.navbar.is-light .navbar-end>a.navbar-item:focus,.navbar.is-light .navbar-end>a.navbar-item:hover,.navbar.is-light .navbar-start .navbar-link.is-active,.navbar.is-light .navbar-start .navbar-link:focus,.navbar.is-light .navbar-start .navbar-link:hover,.navbar.is-light .navbar-start>a.navbar-item.is-active,.navbar.is-light .navbar-start>a.navbar-item:focus,.navbar.is-light .navbar-start>a.navbar-item:hover{background-color:#e8e8e8;color:rgba(0,0,0,.7)}.navbar.is-light .navbar-end .navbar-link::after,.navbar.is-light .navbar-start .navbar-link::after{border-color:rgba(0,0,0,.7)}.navbar.is-light .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-light .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-light .navbar-item.has-dropdown:hover .navbar-link{background-color:#e8e8e8;color:rgba(0,0,0,.7)}.navbar.is-light .navbar-dropdown a.navbar-item.is-active{background-color:#f5f5f5;color:rgba(0,0,0,.7)}}.navbar.is-dark{background-color:#363636;color:#fff}.navbar.is-dark .navbar-brand .navbar-link,.navbar.is-dark .navbar-brand>.navbar-item{color:#fff}.navbar.is-dark .navbar-brand .navbar-link.is-active,.navbar.is-dark .navbar-brand .navbar-link:focus,.navbar.is-dark .navbar-brand .navbar-link:hover,.navbar.is-dark .navbar-brand>a.navbar-item.is-active,.navbar.is-dark .navbar-brand>a.navbar-item:focus,.navbar.is-dark .navbar-brand>a.navbar-item:hover{background-color:#292929;color:#fff}.navbar.is-dark .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-dark .navbar-burger{color:#fff}@media screen and (min-width:1024px){.navbar.is-dark .navbar-end .navbar-link,.navbar.is-dark .navbar-end>.navbar-item,.navbar.is-dark .navbar-start .navbar-link,.navbar.is-dark .navbar-start>.navbar-item{color:#fff}.navbar.is-dark .navbar-end .navbar-link.is-active,.navbar.is-dark .navbar-end .navbar-link:focus,.navbar.is-dark .navbar-end .navbar-link:hover,.navbar.is-dark .navbar-end>a.navbar-item.is-active,.navbar.is-dark .navbar-end>a.navbar-item:focus,.navbar.is-dark .navbar-end>a.navbar-item:hover,.navbar.is-dark .navbar-start .navbar-link.is-active,.navbar.is-dark .navbar-start .navbar-link:focus,.navbar.is-dark .navbar-start .navbar-link:hover,.navbar.is-dark .navbar-start>a.navbar-item.is-active,.navbar.is-dark .navbar-start>a.navbar-item:focus,.navbar.is-dark .navbar-start>a.navbar-item:hover{background-color:#292929;color:#fff}.navbar.is-dark .navbar-end .navbar-link::after,.navbar.is-dark .navbar-start .navbar-link::after{border-color:#fff}.navbar.is-dark .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-dark .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-dark .navbar-item.has-dropdown:hover .navbar-link{background-color:#292929;color:#fff}.navbar.is-dark .navbar-dropdown a.navbar-item.is-active{background-color:#363636;color:#fff}}.navbar.is-primary{background-color:#00d1b2;color:#fff}.navbar.is-primary .navbar-brand .navbar-link,.navbar.is-primary .navbar-brand>.navbar-item{color:#fff}.navbar.is-primary .navbar-brand .navbar-link.is-active,.navbar.is-primary .navbar-brand .navbar-link:focus,.navbar.is-primary .navbar-brand .navbar-link:hover,.navbar.is-primary .navbar-brand>a.navbar-item.is-active,.navbar.is-primary .navbar-brand>a.navbar-item:focus,.navbar.is-primary .navbar-brand>a.navbar-item:hover{background-color:#00b89c;color:#fff}.navbar.is-primary .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-primary .navbar-burger{color:#fff}@media screen and (min-width:1024px){.navbar.is-primary .navbar-end .navbar-link,.navbar.is-primary .navbar-end>.navbar-item,.navbar.is-primary .navbar-start .navbar-link,.navbar.is-primary .navbar-start>.navbar-item{color:#fff}.navbar.is-primary .navbar-end .navbar-link.is-active,.navbar.is-primary .navbar-end .navbar-link:focus,.navbar.is-primary .navbar-end .navbar-link:hover,.navbar.is-primary .navbar-end>a.navbar-item.is-active,.navbar.is-primary .navbar-end>a.navbar-item:focus,.navbar.is-primary .navbar-end>a.navbar-item:hover,.navbar.is-primary .navbar-start .navbar-link.is-active,.navbar.is-primary .navbar-start .navbar-link:focus,.navbar.is-primary .navbar-start .navbar-link:hover,.navbar.is-primary .navbar-start>a.navbar-item.is-active,.navbar.is-primary .navbar-start>a.navbar-item:focus,.navbar.is-primary .navbar-start>a.navbar-item:hover{background-color:#00b89c;color:#fff}.navbar.is-primary .navbar-end .navbar-link::after,.navbar.is-primary .navbar-start .navbar-link::after{border-color:#fff}.navbar.is-primary .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-primary .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-primary .navbar-item.has-dropdown:hover .navbar-link{background-color:#00b89c;color:#fff}.navbar.is-primary .navbar-dropdown a.navbar-item.is-active{background-color:#00d1b2;color:#fff}}.navbar.is-link{background-color:#3273dc;color:#fff}.navbar.is-link .navbar-brand .navbar-link,.navbar.is-link .navbar-brand>.navbar-item{color:#fff}.navbar.is-link .navbar-brand .navbar-link.is-active,.navbar.is-link .navbar-brand .navbar-link:focus,.navbar.is-link .navbar-brand .navbar-link:hover,.navbar.is-link .navbar-brand>a.navbar-item.is-active,.navbar.is-link .navbar-brand>a.navbar-item:focus,.navbar.is-link .navbar-brand>a.navbar-item:hover{background-color:#2366d1;color:#fff}.navbar.is-link .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-link .navbar-burger{color:#fff}@media screen and (min-width:1024px){.navbar.is-link .navbar-end .navbar-link,.navbar.is-link .navbar-end>.navbar-item,.navbar.is-link .navbar-start .navbar-link,.navbar.is-link .navbar-start>.navbar-item{color:#fff}.navbar.is-link .navbar-end .navbar-link.is-active,.navbar.is-link .navbar-end .navbar-link:focus,.navbar.is-link .navbar-end .navbar-link:hover,.navbar.is-link .navbar-end>a.navbar-item.is-active,.navbar.is-link .navbar-end>a.navbar-item:focus,.navbar.is-link .navbar-end>a.navbar-item:hover,.navbar.is-link .navbar-start .navbar-link.is-active,.navbar.is-link .navbar-start .navbar-link:focus,.navbar.is-link .navbar-start .navbar-link:hover,.navbar.is-link .navbar-start>a.navbar-item.is-active,.navbar.is-link .navbar-start>a.navbar-item:focus,.navbar.is-link .navbar-start>a.navbar-item:hover{background-color:#2366d1;color:#fff}.navbar.is-link .navbar-end .navbar-link::after,.navbar.is-link .navbar-start .navbar-link::after{border-color:#fff}.navbar.is-link .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-link .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-link .navbar-item.has-dropdown:hover .navbar-link{background-color:#2366d1;color:#fff}.navbar.is-link .navbar-dropdown a.navbar-item.is-active{background-color:#3273dc;color:#fff}}.navbar.is-info{background-color:#3298dc;color:#fff}.navbar.is-info .navbar-brand .navbar-link,.navbar.is-info .navbar-brand>.navbar-item{color:#fff}.navbar.is-info .navbar-brand .navbar-link.is-active,.navbar.is-info .navbar-brand .navbar-link:focus,.navbar.is-info .navbar-brand .navbar-link:hover,.navbar.is-info .navbar-brand>a.navbar-item.is-active,.navbar.is-info .navbar-brand>a.navbar-item:focus,.navbar.is-info .navbar-brand>a.navbar-item:hover{background-color:#238cd1;color:#fff}.navbar.is-info .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-info .navbar-burger{color:#fff}@media screen and (min-width:1024px){.navbar.is-info .navbar-end .navbar-link,.navbar.is-info .navbar-end>.navbar-item,.navbar.is-info .navbar-start .navbar-link,.navbar.is-info .navbar-start>.navbar-item{color:#fff}.navbar.is-info .navbar-end .navbar-link.is-active,.navbar.is-info .navbar-end .navbar-link:focus,.navbar.is-info .navbar-end .navbar-link:hover,.navbar.is-info .navbar-end>a.navbar-item.is-active,.navbar.is-info .navbar-end>a.navbar-item:focus,.navbar.is-info .navbar-end>a.navbar-item:hover,.navbar.is-info .navbar-start .navbar-link.is-active,.navbar.is-info .navbar-start .navbar-link:focus,.navbar.is-info .navbar-start .navbar-link:hover,.navbar.is-info .navbar-start>a.navbar-item.is-active,.navbar.is-info .navbar-start>a.navbar-item:focus,.navbar.is-info .navbar-start>a.navbar-item:hover{background-color:#238cd1;color:#fff}.navbar.is-info .navbar-end .navbar-link::after,.navbar.is-info .navbar-start .navbar-link::after{border-color:#fff}.navbar.is-info .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-info .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-info .navbar-item.has-dropdown:hover .navbar-link{background-color:#238cd1;color:#fff}.navbar.is-info .navbar-dropdown a.navbar-item.is-active{background-color:#3298dc;color:#fff}}.navbar.is-success{background-color:#48c774;color:#fff}.navbar.is-success .navbar-brand .navbar-link,.navbar.is-success .navbar-brand>.navbar-item{color:#fff}.navbar.is-success .navbar-brand .navbar-link.is-active,.navbar.is-success .navbar-brand .navbar-link:focus,.navbar.is-success .navbar-brand .navbar-link:hover,.navbar.is-success .navbar-brand>a.navbar-item.is-active,.navbar.is-success .navbar-brand>a.navbar-item:focus,.navbar.is-success .navbar-brand>a.navbar-item:hover{background-color:#3abb67;color:#fff}.navbar.is-success .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-success .navbar-burger{color:#fff}@media screen and (min-width:1024px){.navbar.is-success .navbar-end .navbar-link,.navbar.is-success .navbar-end>.navbar-item,.navbar.is-success .navbar-start .navbar-link,.navbar.is-success .navbar-start>.navbar-item{color:#fff}.navbar.is-success .navbar-end .navbar-link.is-active,.navbar.is-success .navbar-end .navbar-link:focus,.navbar.is-success .navbar-end .navbar-link:hover,.navbar.is-success .navbar-end>a.navbar-item.is-active,.navbar.is-success .navbar-end>a.navbar-item:focus,.navbar.is-success .navbar-end>a.navbar-item:hover,.navbar.is-success .navbar-start .navbar-link.is-active,.navbar.is-success .navbar-start .navbar-link:focus,.navbar.is-success .navbar-start .navbar-link:hover,.navbar.is-success .navbar-start>a.navbar-item.is-active,.navbar.is-success .navbar-start>a.navbar-item:focus,.navbar.is-success .navbar-start>a.navbar-item:hover{background-color:#3abb67;color:#fff}.navbar.is-success .navbar-end .navbar-link::after,.navbar.is-success .navbar-start .navbar-link::after{border-color:#fff}.navbar.is-success .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-success .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-success .navbar-item.has-dropdown:hover .navbar-link{background-color:#3abb67;color:#fff}.navbar.is-success .navbar-dropdown a.navbar-item.is-active{background-color:#48c774;color:#fff}}.navbar.is-warning{background-color:#ffdd57;color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-brand .navbar-link,.navbar.is-warning .navbar-brand>.navbar-item{color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-brand .navbar-link.is-active,.navbar.is-warning .navbar-brand .navbar-link:focus,.navbar.is-warning .navbar-brand .navbar-link:hover,.navbar.is-warning .navbar-brand>a.navbar-item.is-active,.navbar.is-warning .navbar-brand>a.navbar-item:focus,.navbar.is-warning .navbar-brand>a.navbar-item:hover{background-color:#ffd83d;color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-brand .navbar-link::after{border-color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-burger{color:rgba(0,0,0,.7)}@media screen and (min-width:1024px){.navbar.is-warning .navbar-end .navbar-link,.navbar.is-warning .navbar-end>.navbar-item,.navbar.is-warning .navbar-start .navbar-link,.navbar.is-warning .navbar-start>.navbar-item{color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-end .navbar-link.is-active,.navbar.is-warning .navbar-end .navbar-link:focus,.navbar.is-warning .navbar-end .navbar-link:hover,.navbar.is-warning .navbar-end>a.navbar-item.is-active,.navbar.is-warning .navbar-end>a.navbar-item:focus,.navbar.is-warning .navbar-end>a.navbar-item:hover,.navbar.is-warning .navbar-start .navbar-link.is-active,.navbar.is-warning .navbar-start .navbar-link:focus,.navbar.is-warning .navbar-start .navbar-link:hover,.navbar.is-warning .navbar-start>a.navbar-item.is-active,.navbar.is-warning .navbar-start>a.navbar-item:focus,.navbar.is-warning .navbar-start>a.navbar-item:hover{background-color:#ffd83d;color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-end .navbar-link::after,.navbar.is-warning .navbar-start .navbar-link::after{border-color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-warning .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-warning .navbar-item.has-dropdown:hover .navbar-link{background-color:#ffd83d;color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-dropdown a.navbar-item.is-active{background-color:#ffdd57;color:rgba(0,0,0,.7)}}.navbar.is-danger{background-color:#f14668;color:#fff}.navbar.is-danger .navbar-brand .navbar-link,.navbar.is-danger .navbar-brand>.navbar-item{color:#fff}.navbar.is-danger .navbar-brand .navbar-link.is-active,.navbar.is-danger .navbar-brand .navbar-link:focus,.navbar.is-danger .navbar-brand .navbar-link:hover,.navbar.is-danger .navbar-brand>a.navbar-item.is-active,.navbar.is-danger .navbar-brand>a.navbar-item:focus,.navbar.is-danger .navbar-brand>a.navbar-item:hover{background-color:#ef2e55;color:#fff}.navbar.is-danger .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-danger .navbar-burger{color:#fff}@media screen and (min-width:1024px){.navbar.is-danger .navbar-end .navbar-link,.navbar.is-danger .navbar-end>.navbar-item,.navbar.is-danger .navbar-start .navbar-link,.navbar.is-danger .navbar-start>.navbar-item{color:#fff}.navbar.is-danger .navbar-end .navbar-link.is-active,.navbar.is-danger .navbar-end .navbar-link:focus,.navbar.is-danger .navbar-end .navbar-link:hover,.navbar.is-danger .navbar-end>a.navbar-item.is-active,.navbar.is-danger .navbar-end>a.navbar-item:focus,.navbar.is-danger .navbar-end>a.navbar-item:hover,.navbar.is-danger .navbar-start .navbar-link.is-active,.navbar.is-danger .navbar-start .navbar-link:focus,.navbar.is-danger .navbar-start .navbar-link:hover,.navbar.is-danger .navbar-start>a.navbar-item.is-active,.navbar.is-danger .navbar-start>a.navbar-item:focus,.navbar.is-danger .navbar-start>a.navbar-item:hover{background-color:#ef2e55;color:#fff}.navbar.is-danger .navbar-end .navbar-link::after,.navbar.is-danger .navbar-start .navbar-link::after{border-color:#fff}.navbar.is-danger .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-danger .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-danger .navbar-item.has-dropdown:hover .navbar-link{background-color:#ef2e55;color:#fff}.navbar.is-danger .navbar-dropdown a.navbar-item.is-active{background-color:#f14668;color:#fff}}.navbar>.container{align-items:stretch;display:flex;min-height:3.25rem;width:100%}.navbar.has-shadow{box-shadow:0 2px 0 0 #f5f5f5}.navbar.is-fixed-bottom,.navbar.is-fixed-top{left:0;position:fixed;right:0;z-index:30}.navbar.is-fixed-bottom{bottom:0}.navbar.is-fixed-bottom.has-shadow{box-shadow:0 -2px 0 0 #f5f5f5}.navbar.is-fixed-top{top:0}body.has-navbar-fixed-top,html.has-navbar-fixed-top{padding-top:3.25rem}body.has-navbar-fixed-bottom,html.has-navbar-fixed-bottom{padding-bottom:3.25rem}.navbar-brand,.navbar-tabs{align-items:stretch;display:flex;flex-shrink:0;min-height:3.25rem}.navbar-brand a.navbar-item:focus,.navbar-brand a.navbar-item:hover{background-color:transparent}.navbar-tabs{-webkit-overflow-scrolling:touch;max-width:100vw;overflow-x:auto;overflow-y:hidden}.navbar-burger{color:#4a4a4a;cursor:pointer;display:block;height:3.25rem;position:relative;width:3.25rem;margin-left:auto}.navbar-burger span{background-color:currentColor;display:block;height:1px;left:calc(50% - 8px);position:absolute;transform-origin:center;transition-duration:86ms;transition-property:background-color,opacity,transform;transition-timing-function:ease-out;width:16px}.navbar-burger span:nth-child(1){top:calc(50% - 6px)}.navbar-burger span:nth-child(2){top:calc(50% - 1px)}.navbar-burger span:nth-child(3){top:calc(50% + 4px)}.navbar-burger:hover{background-color:rgba(0,0,0,.05)}.navbar-burger.is-active span:nth-child(1){transform:translateY(5px) rotate(45deg)}.navbar-burger.is-active span:nth-child(2){opacity:0}.navbar-burger.is-active span:nth-child(3){transform:translateY(-5px) rotate(-45deg)}.navbar-menu{display:none}.navbar-item,.navbar-link{color:#4a4a4a;display:block;line-height:1.5;padding:.5rem .75rem;position:relative}.navbar-item .icon:only-child,.navbar-link .icon:only-child{margin-left:-.25rem;margin-right:-.25rem}.navbar-link,a.navbar-item{cursor:pointer}.navbar-link.is-active,.navbar-link:focus,.navbar-link:focus-within,.navbar-link:hover,a.navbar-item.is-active,a.navbar-item:focus,a.navbar-item:focus-within,a.navbar-item:hover{background-color:#fafafa;color:#3273dc}.navbar-item{flex-grow:0;flex-shrink:0}.navbar-item img{max-height:1.75rem}.navbar-item.has-dropdown{padding:0}.navbar-item.is-expanded{flex-grow:1;flex-shrink:1}.navbar-item.is-tab{border-bottom:1px solid transparent;min-height:3.25rem;padding-bottom:calc(.5rem - 1px)}.navbar-item.is-tab:focus,.navbar-item.is-tab:hover{background-color:transparent;border-bottom-color:#3273dc}.navbar-item.is-tab.is-active{background-color:transparent;border-bottom-color:#3273dc;border-bottom-style:solid;border-bottom-width:3px;color:#3273dc;padding-bottom:calc(.5rem - 3px)}.navbar-content{flex-grow:1;flex-shrink:1}.navbar-link:not(.is-arrowless){padding-right:2.5em}.navbar-link:not(.is-arrowless)::after{border-color:#3273dc;margin-top:-.375em;right:1.125em}.navbar-dropdown{font-size:.875rem;padding-bottom:.5rem;padding-top:.5rem}.navbar-dropdown .navbar-item{padding-left:1.5rem;padding-right:1.5rem}.navbar-divider{background-color:#f5f5f5;border:none;display:none;height:2px;margin:.5rem 0}@media screen and (max-width:1023px){.navbar>.container{display:block}.navbar-brand .navbar-item,.navbar-tabs .navbar-item{align-items:center;display:flex}.navbar-link::after{display:none}.navbar-menu{background-color:#fff;box-shadow:0 8px 16px rgba(10,10,10,.1);padding:.5rem 0}.navbar-menu.is-active{display:block}.navbar.is-fixed-bottom-touch,.navbar.is-fixed-top-touch{left:0;position:fixed;right:0;z-index:30}.navbar.is-fixed-bottom-touch{bottom:0}.navbar.is-fixed-bottom-touch.has-shadow{box-shadow:0 -2px 3px rgba(10,10,10,.1)}.navbar.is-fixed-top-touch{top:0}.navbar.is-fixed-top .navbar-menu,.navbar.is-fixed-top-touch .navbar-menu{-webkit-overflow-scrolling:touch;max-height:calc(100vh - 3.25rem);overflow:auto}body.has-navbar-fixed-top-touch,html.has-navbar-fixed-top-touch{padding-top:3.25rem}body.has-navbar-fixed-bottom-touch,html.has-navbar-fixed-bottom-touch{padding-bottom:3.25rem}}@media screen and (min-width:1024px){.navbar,.navbar-end,.navbar-menu,.navbar-start{align-items:stretch;display:flex}.navbar{min-height:3.25rem}.navbar.is-spaced{padding:1rem 2rem}.navbar.is-spaced .navbar-end,.navbar.is-spaced .navbar-start{align-items:center}.navbar.is-spaced .navbar-link,.navbar.is-spaced a.navbar-item{border-radius:4px}.navbar.is-transparent .navbar-link.is-active,.navbar.is-transparent .navbar-link:focus,.navbar.is-transparent .navbar-link:hover,.navbar.is-transparent a.navbar-item.is-active,.navbar.is-transparent a.navbar-item:focus,.navbar.is-transparent a.navbar-item:hover{background-color:transparent!important}.navbar.is-transparent .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-transparent .navbar-item.has-dropdown.is-hoverable:focus .navbar-link,.navbar.is-transparent .navbar-item.has-dropdown.is-hoverable:focus-within .navbar-link,.navbar.is-transparent .navbar-item.has-dropdown.is-hoverable:hover .navbar-link{background-color:transparent!important}.navbar.is-transparent .navbar-dropdown a.navbar-item:focus,.navbar.is-transparent .navbar-dropdown a.navbar-item:hover{background-color:#f5f5f5;color:#0a0a0a}.navbar.is-transparent .navbar-dropdown a.navbar-item.is-active{background-color:#f5f5f5;color:#3273dc}.navbar-burger{display:none}.navbar-item,.navbar-link{align-items:center;display:flex}.navbar-item.has-dropdown{align-items:stretch}.navbar-item.has-dropdown-up .navbar-link::after{transform:rotate(135deg) translate(.25em,-.25em)}.navbar-item.has-dropdown-up .navbar-dropdown{border-bottom:2px solid #dbdbdb;border-radius:6px 6px 0 0;border-top:none;bottom:100%;box-shadow:0 -8px 8px rgba(10,10,10,.1);top:auto}.navbar-item.is-active .navbar-dropdown,.navbar-item.is-hoverable:focus .navbar-dropdown,.navbar-item.is-hoverable:focus-within .navbar-dropdown,.navbar-item.is-hoverable:hover .navbar-dropdown{display:block}.navbar-item.is-active .navbar-dropdown.is-boxed,.navbar-item.is-hoverable:focus .navbar-dropdown.is-boxed,.navbar-item.is-hoverable:focus-within .navbar-dropdown.is-boxed,.navbar-item.is-hoverable:hover .navbar-dropdown.is-boxed,.navbar.is-spaced .navbar-item.is-active .navbar-dropdown,.navbar.is-spaced .navbar-item.is-hoverable:focus .navbar-dropdown,.navbar.is-spaced .navbar-item.is-hoverable:focus-within .navbar-dropdown,.navbar.is-spaced .navbar-item.is-hoverable:hover .navbar-dropdown{opacity:1;pointer-events:auto;transform:translateY(0)}.navbar-menu{flex-grow:1;flex-shrink:0}.navbar-start{justify-content:flex-start;margin-right:auto}.navbar-end{justify-content:flex-end;margin-left:auto}.navbar-dropdown{background-color:#fff;border-bottom-left-radius:6px;border-bottom-right-radius:6px;border-top:2px solid #dbdbdb;box-shadow:0 8px 8px rgba(10,10,10,.1);display:none;font-size:.875rem;left:0;min-width:100%;position:absolute;top:100%;z-index:20}.navbar-dropdown .navbar-item{padding:.375rem 1rem;white-space:nowrap}.navbar-dropdown a.navbar-item{padding-right:3rem}.navbar-dropdown a.navbar-item:focus,.navbar-dropdown a.navbar-item:hover{background-color:#f5f5f5;color:#0a0a0a}.navbar-dropdown a.navbar-item.is-active{background-color:#f5f5f5;color:#3273dc}.navbar-dropdown.is-boxed,.navbar.is-spaced .navbar-dropdown{border-radius:6px;border-top:none;box-shadow:0 8px 8px rgba(10,10,10,.1),0 0 0 1px rgba(10,10,10,.1);display:block;opacity:0;pointer-events:none;top:calc(100% + (-4px));transform:translateY(-5px);transition-duration:86ms;transition-property:opacity,transform}.navbar-dropdown.is-right{left:auto;right:0}.navbar-divider{display:block}.container>.navbar .navbar-brand,.navbar>.container .navbar-brand{margin-left:-.75rem}.container>.navbar .navbar-menu,.navbar>.container .navbar-menu{margin-right:-.75rem}.navbar.is-fixed-bottom-desktop,.navbar.is-fixed-top-desktop{left:0;position:fixed;right:0;z-index:30}.navbar.is-fixed-bottom-desktop{bottom:0}.navbar.is-fixed-bottom-desktop.has-shadow{box-shadow:0 -2px 3px rgba(10,10,10,.1)}.navbar.is-fixed-top-desktop{top:0}body.has-navbar-fixed-top-desktop,html.has-navbar-fixed-top-desktop{padding-top:3.25rem}body.has-navbar-fixed-bottom-desktop,html.has-navbar-fixed-bottom-desktop{padding-bottom:3.25rem}body.has-spaced-navbar-fixed-top,html.has-spaced-navbar-fixed-top{padding-top:5.25rem}body.has-spaced-navbar-fixed-bottom,html.has-spaced-navbar-fixed-bottom{padding-bottom:5.25rem}.navbar-link.is-active,a.navbar-item.is-active{color:#0a0a0a}.navbar-link.is-active:not(:focus):not(:hover),a.navbar-item.is-active:not(:focus):not(:hover){background-color:transparent}.navbar-item.has-dropdown.is-active .navbar-link,.navbar-item.has-dropdown:focus .navbar-link,.navbar-item.has-dropdown:hover .navbar-link{background-color:#fafafa}}.hero.is-fullheight-with-navbar{min-height:calc(100vh - 3.25rem)}.pagination{font-size:1rem;margin:-.25rem}.pagination.is-small{font-size:.75rem}.pagination.is-medium{font-size:1.25rem}.pagination.is-large{font-size:1.5rem}.pagination.is-rounded .pagination-next,.pagination.is-rounded .pagination-previous{padding-left:1em;padding-right:1em;border-radius:290486px}.pagination.is-rounded .pagination-link{border-radius:290486px}.pagination,.pagination-list{align-items:center;display:flex;justify-content:center;text-align:center}.pagination-ellipsis,.pagination-link,.pagination-next,.pagination-previous{font-size:1em;justify-content:center;margin:.25rem;padding-left:.5em;padding-right:.5em;text-align:center}.pagination-link,.pagination-next,.pagination-previous{border-color:#dbdbdb;color:#363636;min-width:2.5em}.pagination-link:hover,.pagination-next:hover,.pagination-previous:hover{border-color:#b5b5b5;color:#363636}.pagination-link:focus,.pagination-next:focus,.pagination-previous:focus{border-color:#3273dc}.pagination-link:active,.pagination-next:active,.pagination-previous:active{box-shadow:inset 0 1px 2px rgba(10,10,10,.2)}.pagination-link[disabled],.pagination-next[disabled],.pagination-previous[disabled]{background-color:#dbdbdb;border-color:#dbdbdb;box-shadow:none;color:#7a7a7a;opacity:.5}.pagination-next,.pagination-previous{padding-left:.75em;padding-right:.75em;white-space:nowrap}.pagination-link.is-current{background-color:#3273dc;border-color:#3273dc;color:#fff}.pagination-ellipsis{color:#b5b5b5;pointer-events:none}.pagination-list{flex-wrap:wrap}@media screen and (max-width:768px){.pagination{flex-wrap:wrap}.pagination-next,.pagination-previous{flex-grow:1;flex-shrink:1}.pagination-list li{flex-grow:1;flex-shrink:1}}@media screen and (min-width:769px),print{.pagination-list{flex-grow:1;flex-shrink:1;justify-content:flex-start;order:1}.pagination-previous{order:2}.pagination-next{order:3}.pagination{justify-content:space-between}.pagination.is-centered .pagination-previous{order:1}.pagination.is-centered .pagination-list{justify-content:center;order:2}.pagination.is-centered .pagination-next{order:3}.pagination.is-right .pagination-previous{order:1}.pagination.is-right .pagination-next{order:2}.pagination.is-right .pagination-list{justify-content:flex-end;order:3}}.panel{border-radius:6px;box-shadow:0 .5em 1em -.125em rgba(10,10,10,.1),0 0 0 1px rgba(10,10,10,.02);font-size:1rem}.panel:not(:last-child){margin-bottom:1.5rem}.panel.is-white .panel-heading{background-color:#fff;color:#0a0a0a}.panel.is-white .panel-tabs a.is-active{border-bottom-color:#fff}.panel.is-white .panel-block.is-active .panel-icon{color:#fff}.panel.is-black .panel-heading{background-color:#0a0a0a;color:#fff}.panel.is-black .panel-tabs a.is-active{border-bottom-color:#0a0a0a}.panel.is-black .panel-block.is-active .panel-icon{color:#0a0a0a}.panel.is-light .panel-heading{background-color:#f5f5f5;color:rgba(0,0,0,.7)}.panel.is-light .panel-tabs a.is-active{border-bottom-color:#f5f5f5}.panel.is-light .panel-block.is-active .panel-icon{color:#f5f5f5}.panel.is-dark .panel-heading{background-color:#363636;color:#fff}.panel.is-dark .panel-tabs a.is-active{border-bottom-color:#363636}.panel.is-dark .panel-block.is-active .panel-icon{color:#363636}.panel.is-primary .panel-heading{background-color:#00d1b2;color:#fff}.panel.is-primary .panel-tabs a.is-active{border-bottom-color:#00d1b2}.panel.is-primary .panel-block.is-active .panel-icon{color:#00d1b2}.panel.is-link .panel-heading{background-color:#3273dc;color:#fff}.panel.is-link .panel-tabs a.is-active{border-bottom-color:#3273dc}.panel.is-link .panel-block.is-active .panel-icon{color:#3273dc}.panel.is-info .panel-heading{background-color:#3298dc;color:#fff}.panel.is-info .panel-tabs a.is-active{border-bottom-color:#3298dc}.panel.is-info .panel-block.is-active .panel-icon{color:#3298dc}.panel.is-success .panel-heading{background-color:#48c774;color:#fff}.panel.is-success .panel-tabs a.is-active{border-bottom-color:#48c774}.panel.is-success .panel-block.is-active .panel-icon{color:#48c774}.panel.is-warning .panel-heading{background-color:#ffdd57;color:rgba(0,0,0,.7)}.panel.is-warning .panel-tabs a.is-active{border-bottom-color:#ffdd57}.panel.is-warning .panel-block.is-active .panel-icon{color:#ffdd57}.panel.is-danger .panel-heading{background-color:#f14668;color:#fff}.panel.is-danger .panel-tabs a.is-active{border-bottom-color:#f14668}.panel.is-danger .panel-block.is-active .panel-icon{color:#f14668}.panel-block:not(:last-child),.panel-tabs:not(:last-child){border-bottom:1px solid #ededed}.panel-heading{background-color:#ededed;border-radius:6px 6px 0 0;color:#363636;font-size:1.25em;font-weight:700;line-height:1.25;padding:.75em 1em}.panel-tabs{align-items:flex-end;display:flex;font-size:.875em;justify-content:center}.panel-tabs a{border-bottom:1px solid #dbdbdb;margin-bottom:-1px;padding:.5em}.panel-tabs a.is-active{border-bottom-color:#4a4a4a;color:#363636}.panel-list a{color:#4a4a4a}.panel-list a:hover{color:#3273dc}.panel-block{align-items:center;color:#363636;display:flex;justify-content:flex-start;padding:.5em .75em}.panel-block input[type=checkbox]{margin-right:.75em}.panel-block>.control{flex-grow:1;flex-shrink:1;width:100%}.panel-block.is-wrapped{flex-wrap:wrap}.panel-block.is-active{border-left-color:#3273dc;color:#363636}.panel-block.is-active .panel-icon{color:#3273dc}.panel-block:last-child{border-bottom-left-radius:6px;border-bottom-right-radius:6px}a.panel-block,label.panel-block{cursor:pointer}a.panel-block:hover,label.panel-block:hover{background-color:#f5f5f5}.panel-icon{display:inline-block;font-size:14px;height:1em;line-height:1em;text-align:center;vertical-align:top;width:1em;color:#7a7a7a;margin-right:.75em}.panel-icon .fa{font-size:inherit;line-height:inherit}.tabs{-webkit-overflow-scrolling:touch;align-items:stretch;display:flex;font-size:1rem;justify-content:space-between;overflow:hidden;overflow-x:auto;white-space:nowrap}.tabs a{align-items:center;border-bottom-color:#dbdbdb;border-bottom-style:solid;border-bottom-width:1px;color:#4a4a4a;display:flex;justify-content:center;margin-bottom:-1px;padding:.5em 1em;vertical-align:top}.tabs a:hover{border-bottom-color:#363636;color:#363636}.tabs li{display:block}.tabs li.is-active a{border-bottom-color:#3273dc;color:#3273dc}.tabs ul{align-items:center;border-bottom-color:#dbdbdb;border-bottom-style:solid;border-bottom-width:1px;display:flex;flex-grow:1;flex-shrink:0;justify-content:flex-start}.tabs ul.is-left{padding-right:.75em}.tabs ul.is-center{flex:none;justify-content:center;padding-left:.75em;padding-right:.75em}.tabs ul.is-right{justify-content:flex-end;padding-left:.75em}.tabs .icon:first-child{margin-right:.5em}.tabs .icon:last-child{margin-left:.5em}.tabs.is-centered ul{justify-content:center}.tabs.is-right ul{justify-content:flex-end}.tabs.is-boxed a{border:1px solid transparent;border-radius:4px 4px 0 0}.tabs.is-boxed a:hover{background-color:#f5f5f5;border-bottom-color:#dbdbdb}.tabs.is-boxed li.is-active a{background-color:#fff;border-color:#dbdbdb;border-bottom-color:transparent!important}.tabs.is-fullwidth li{flex-grow:1;flex-shrink:0}.tabs.is-toggle a{border-color:#dbdbdb;border-style:solid;border-width:1px;margin-bottom:0;position:relative}.tabs.is-toggle a:hover{background-color:#f5f5f5;border-color:#b5b5b5;z-index:2}.tabs.is-toggle li+li{margin-left:-1px}.tabs.is-toggle li:first-child a{border-top-left-radius:4px;border-bottom-left-radius:4px}.tabs.is-toggle li:last-child a{border-top-right-radius:4px;border-bottom-right-radius:4px}.tabs.is-toggle li.is-active a{background-color:#3273dc;border-color:#3273dc;color:#fff;z-index:1}.tabs.is-toggle ul{border-bottom:none}.tabs.is-toggle.is-toggle-rounded li:first-child a{border-bottom-left-radius:290486px;border-top-left-radius:290486px;padding-left:1.25em}.tabs.is-toggle.is-toggle-rounded li:last-child a{border-bottom-right-radius:290486px;border-top-right-radius:290486px;padding-right:1.25em}.tabs.is-small{font-size:.75rem}.tabs.is-medium{font-size:1.25rem}.tabs.is-large{font-size:1.5rem}.column{display:block;flex-basis:0;flex-grow:1;flex-shrink:1;padding:.75rem}.columns.is-mobile>.column.is-narrow{flex:none}.columns.is-mobile>.column.is-full{flex:none;width:100%}.columns.is-mobile>.column.is-three-quarters{flex:none;width:75%}.columns.is-mobile>.column.is-two-thirds{flex:none;width:66.6666%}.columns.is-mobile>.column.is-half{flex:none;width:50%}.columns.is-mobile>.column.is-one-third{flex:none;width:33.3333%}.columns.is-mobile>.column.is-one-quarter{flex:none;width:25%}.columns.is-mobile>.column.is-one-fifth{flex:none;width:20%}.columns.is-mobile>.column.is-two-fifths{flex:none;width:40%}.columns.is-mobile>.column.is-three-fifths{flex:none;width:60%}.columns.is-mobile>.column.is-four-fifths{flex:none;width:80%}.columns.is-mobile>.column.is-offset-three-quarters{margin-left:75%}.columns.is-mobile>.column.is-offset-two-thirds{margin-left:66.6666%}.columns.is-mobile>.column.is-offset-half{margin-left:50%}.columns.is-mobile>.column.is-offset-one-third{margin-left:33.3333%}.columns.is-mobile>.column.is-offset-one-quarter{margin-left:25%}.columns.is-mobile>.column.is-offset-one-fifth{margin-left:20%}.columns.is-mobile>.column.is-offset-two-fifths{margin-left:40%}.columns.is-mobile>.column.is-offset-three-fifths{margin-left:60%}.columns.is-mobile>.column.is-offset-four-fifths{margin-left:80%}.columns.is-mobile>.column.is-0{flex:none;width:0%}.columns.is-mobile>.column.is-offset-0{margin-left:0}.columns.is-mobile>.column.is-1{flex:none;width:8.33333%}.columns.is-mobile>.column.is-offset-1{margin-left:8.33333%}.columns.is-mobile>.column.is-2{flex:none;width:16.66667%}.columns.is-mobile>.column.is-offset-2{margin-left:16.66667%}.columns.is-mobile>.column.is-3{flex:none;width:25%}.columns.is-mobile>.column.is-offset-3{margin-left:25%}.columns.is-mobile>.column.is-4{flex:none;width:33.33333%}.columns.is-mobile>.column.is-offset-4{margin-left:33.33333%}.columns.is-mobile>.column.is-5{flex:none;width:41.66667%}.columns.is-mobile>.column.is-offset-5{margin-left:41.66667%}.columns.is-mobile>.column.is-6{flex:none;width:50%}.columns.is-mobile>.column.is-offset-6{margin-left:50%}.columns.is-mobile>.column.is-7{flex:none;width:58.33333%}.columns.is-mobile>.column.is-offset-7{margin-left:58.33333%}.columns.is-mobile>.column.is-8{flex:none;width:66.66667%}.columns.is-mobile>.column.is-offset-8{margin-left:66.66667%}.columns.is-mobile>.column.is-9{flex:none;width:75%}.columns.is-mobile>.column.is-offset-9{margin-left:75%}.columns.is-mobile>.column.is-10{flex:none;width:83.33333%}.columns.is-mobile>.column.is-offset-10{margin-left:83.33333%}.columns.is-mobile>.column.is-11{flex:none;width:91.66667%}.columns.is-mobile>.column.is-offset-11{margin-left:91.66667%}.columns.is-mobile>.column.is-12{flex:none;width:100%}.columns.is-mobile>.column.is-offset-12{margin-left:100%}@media screen and (max-width:768px){.column.is-narrow-mobile{flex:none}.column.is-full-mobile{flex:none;width:100%}.column.is-three-quarters-mobile{flex:none;width:75%}.column.is-two-thirds-mobile{flex:none;width:66.6666%}.column.is-half-mobile{flex:none;width:50%}.column.is-one-third-mobile{flex:none;width:33.3333%}.column.is-one-quarter-mobile{flex:none;width:25%}.column.is-one-fifth-mobile{flex:none;width:20%}.column.is-two-fifths-mobile{flex:none;width:40%}.column.is-three-fifths-mobile{flex:none;width:60%}.column.is-four-fifths-mobile{flex:none;width:80%}.column.is-offset-three-quarters-mobile{margin-left:75%}.column.is-offset-two-thirds-mobile{margin-left:66.6666%}.column.is-offset-half-mobile{margin-left:50%}.column.is-offset-one-third-mobile{margin-left:33.3333%}.column.is-offset-one-quarter-mobile{margin-left:25%}.column.is-offset-one-fifth-mobile{margin-left:20%}.column.is-offset-two-fifths-mobile{margin-left:40%}.column.is-offset-three-fifths-mobile{margin-left:60%}.column.is-offset-four-fifths-mobile{margin-left:80%}.column.is-0-mobile{flex:none;width:0%}.column.is-offset-0-mobile{margin-left:0}.column.is-1-mobile{flex:none;width:8.33333%}.column.is-offset-1-mobile{margin-left:8.33333%}.column.is-2-mobile{flex:none;width:16.66667%}.column.is-offset-2-mobile{margin-left:16.66667%}.column.is-3-mobile{flex:none;width:25%}.column.is-offset-3-mobile{margin-left:25%}.column.is-4-mobile{flex:none;width:33.33333%}.column.is-offset-4-mobile{margin-left:33.33333%}.column.is-5-mobile{flex:none;width:41.66667%}.column.is-offset-5-mobile{margin-left:41.66667%}.column.is-6-mobile{flex:none;width:50%}.column.is-offset-6-mobile{margin-left:50%}.column.is-7-mobile{flex:none;width:58.33333%}.column.is-offset-7-mobile{margin-left:58.33333%}.column.is-8-mobile{flex:none;width:66.66667%}.column.is-offset-8-mobile{margin-left:66.66667%}.column.is-9-mobile{flex:none;width:75%}.column.is-offset-9-mobile{margin-left:75%}.column.is-10-mobile{flex:none;width:83.33333%}.column.is-offset-10-mobile{margin-left:83.33333%}.column.is-11-mobile{flex:none;width:91.66667%}.column.is-offset-11-mobile{margin-left:91.66667%}.column.is-12-mobile{flex:none;width:100%}.column.is-offset-12-mobile{margin-left:100%}}@media screen and (min-width:769px),print{.column.is-narrow,.column.is-narrow-tablet{flex:none}.column.is-full,.column.is-full-tablet{flex:none;width:100%}.column.is-three-quarters,.column.is-three-quarters-tablet{flex:none;width:75%}.column.is-two-thirds,.column.is-two-thirds-tablet{flex:none;width:66.6666%}.column.is-half,.column.is-half-tablet{flex:none;width:50%}.column.is-one-third,.column.is-one-third-tablet{flex:none;width:33.3333%}.column.is-one-quarter,.column.is-one-quarter-tablet{flex:none;width:25%}.column.is-one-fifth,.column.is-one-fifth-tablet{flex:none;width:20%}.column.is-two-fifths,.column.is-two-fifths-tablet{flex:none;width:40%}.column.is-three-fifths,.column.is-three-fifths-tablet{flex:none;width:60%}.column.is-four-fifths,.column.is-four-fifths-tablet{flex:none;width:80%}.column.is-offset-three-quarters,.column.is-offset-three-quarters-tablet{margin-left:75%}.column.is-offset-two-thirds,.column.is-offset-two-thirds-tablet{margin-left:66.6666%}.column.is-offset-half,.column.is-offset-half-tablet{margin-left:50%}.column.is-offset-one-third,.column.is-offset-one-third-tablet{margin-left:33.3333%}.column.is-offset-one-quarter,.column.is-offset-one-quarter-tablet{margin-left:25%}.column.is-offset-one-fifth,.column.is-offset-one-fifth-tablet{margin-left:20%}.column.is-offset-two-fifths,.column.is-offset-two-fifths-tablet{margin-left:40%}.column.is-offset-three-fifths,.column.is-offset-three-fifths-tablet{margin-left:60%}.column.is-offset-four-fifths,.column.is-offset-four-fifths-tablet{margin-left:80%}.column.is-0,.column.is-0-tablet{flex:none;width:0%}.column.is-offset-0,.column.is-offset-0-tablet{margin-left:0}.column.is-1,.column.is-1-tablet{flex:none;width:8.33333%}.column.is-offset-1,.column.is-offset-1-tablet{margin-left:8.33333%}.column.is-2,.column.is-2-tablet{flex:none;width:16.66667%}.column.is-offset-2,.column.is-offset-2-tablet{margin-left:16.66667%}.column.is-3,.column.is-3-tablet{flex:none;width:25%}.column.is-offset-3,.column.is-offset-3-tablet{margin-left:25%}.column.is-4,.column.is-4-tablet{flex:none;width:33.33333%}.column.is-offset-4,.column.is-offset-4-tablet{margin-left:33.33333%}.column.is-5,.column.is-5-tablet{flex:none;width:41.66667%}.column.is-offset-5,.column.is-offset-5-tablet{margin-left:41.66667%}.column.is-6,.column.is-6-tablet{flex:none;width:50%}.column.is-offset-6,.column.is-offset-6-tablet{margin-left:50%}.column.is-7,.column.is-7-tablet{flex:none;width:58.33333%}.column.is-offset-7,.column.is-offset-7-tablet{margin-left:58.33333%}.column.is-8,.column.is-8-tablet{flex:none;width:66.66667%}.column.is-offset-8,.column.is-offset-8-tablet{margin-left:66.66667%}.column.is-9,.column.is-9-tablet{flex:none;width:75%}.column.is-offset-9,.column.is-offset-9-tablet{margin-left:75%}.column.is-10,.column.is-10-tablet{flex:none;width:83.33333%}.column.is-offset-10,.column.is-offset-10-tablet{margin-left:83.33333%}.column.is-11,.column.is-11-tablet{flex:none;width:91.66667%}.column.is-offset-11,.column.is-offset-11-tablet{margin-left:91.66667%}.column.is-12,.column.is-12-tablet{flex:none;width:100%}.column.is-offset-12,.column.is-offset-12-tablet{margin-left:100%}}@media screen and (max-width:1023px){.column.is-narrow-touch{flex:none}.column.is-full-touch{flex:none;width:100%}.column.is-three-quarters-touch{flex:none;width:75%}.column.is-two-thirds-touch{flex:none;width:66.6666%}.column.is-half-touch{flex:none;width:50%}.column.is-one-third-touch{flex:none;width:33.3333%}.column.is-one-quarter-touch{flex:none;width:25%}.column.is-one-fifth-touch{flex:none;width:20%}.column.is-two-fifths-touch{flex:none;width:40%}.column.is-three-fifths-touch{flex:none;width:60%}.column.is-four-fifths-touch{flex:none;width:80%}.column.is-offset-three-quarters-touch{margin-left:75%}.column.is-offset-two-thirds-touch{margin-left:66.6666%}.column.is-offset-half-touch{margin-left:50%}.column.is-offset-one-third-touch{margin-left:33.3333%}.column.is-offset-one-quarter-touch{margin-left:25%}.column.is-offset-one-fifth-touch{margin-left:20%}.column.is-offset-two-fifths-touch{margin-left:40%}.column.is-offset-three-fifths-touch{margin-left:60%}.column.is-offset-four-fifths-touch{margin-left:80%}.column.is-0-touch{flex:none;width:0%}.column.is-offset-0-touch{margin-left:0}.column.is-1-touch{flex:none;width:8.33333%}.column.is-offset-1-touch{margin-left:8.33333%}.column.is-2-touch{flex:none;width:16.66667%}.column.is-offset-2-touch{margin-left:16.66667%}.column.is-3-touch{flex:none;width:25%}.column.is-offset-3-touch{margin-left:25%}.column.is-4-touch{flex:none;width:33.33333%}.column.is-offset-4-touch{margin-left:33.33333%}.column.is-5-touch{flex:none;width:41.66667%}.column.is-offset-5-touch{margin-left:41.66667%}.column.is-6-touch{flex:none;width:50%}.column.is-offset-6-touch{margin-left:50%}.column.is-7-touch{flex:none;width:58.33333%}.column.is-offset-7-touch{margin-left:58.33333%}.column.is-8-touch{flex:none;width:66.66667%}.column.is-offset-8-touch{margin-left:66.66667%}.column.is-9-touch{flex:none;width:75%}.column.is-offset-9-touch{margin-left:75%}.column.is-10-touch{flex:none;width:83.33333%}.column.is-offset-10-touch{margin-left:83.33333%}.column.is-11-touch{flex:none;width:91.66667%}.column.is-offset-11-touch{margin-left:91.66667%}.column.is-12-touch{flex:none;width:100%}.column.is-offset-12-touch{margin-left:100%}}@media screen and (min-width:1024px){.column.is-narrow-desktop{flex:none}.column.is-full-desktop{flex:none;width:100%}.column.is-three-quarters-desktop{flex:none;width:75%}.column.is-two-thirds-desktop{flex:none;width:66.6666%}.column.is-half-desktop{flex:none;width:50%}.column.is-one-third-desktop{flex:none;width:33.3333%}.column.is-one-quarter-desktop{flex:none;width:25%}.column.is-one-fifth-desktop{flex:none;width:20%}.column.is-two-fifths-desktop{flex:none;width:40%}.column.is-three-fifths-desktop{flex:none;width:60%}.column.is-four-fifths-desktop{flex:none;width:80%}.column.is-offset-three-quarters-desktop{margin-left:75%}.column.is-offset-two-thirds-desktop{margin-left:66.6666%}.column.is-offset-half-desktop{margin-left:50%}.column.is-offset-one-third-desktop{margin-left:33.3333%}.column.is-offset-one-quarter-desktop{margin-left:25%}.column.is-offset-one-fifth-desktop{margin-left:20%}.column.is-offset-two-fifths-desktop{margin-left:40%}.column.is-offset-three-fifths-desktop{margin-left:60%}.column.is-offset-four-fifths-desktop{margin-left:80%}.column.is-0-desktop{flex:none;width:0%}.column.is-offset-0-desktop{margin-left:0}.column.is-1-desktop{flex:none;width:8.33333%}.column.is-offset-1-desktop{margin-left:8.33333%}.column.is-2-desktop{flex:none;width:16.66667%}.column.is-offset-2-desktop{margin-left:16.66667%}.column.is-3-desktop{flex:none;width:25%}.column.is-offset-3-desktop{margin-left:25%}.column.is-4-desktop{flex:none;width:33.33333%}.column.is-offset-4-desktop{margin-left:33.33333%}.column.is-5-desktop{flex:none;width:41.66667%}.column.is-offset-5-desktop{margin-left:41.66667%}.column.is-6-desktop{flex:none;width:50%}.column.is-offset-6-desktop{margin-left:50%}.column.is-7-desktop{flex:none;width:58.33333%}.column.is-offset-7-desktop{margin-left:58.33333%}.column.is-8-desktop{flex:none;width:66.66667%}.column.is-offset-8-desktop{margin-left:66.66667%}.column.is-9-desktop{flex:none;width:75%}.column.is-offset-9-desktop{margin-left:75%}.column.is-10-desktop{flex:none;width:83.33333%}.column.is-offset-10-desktop{margin-left:83.33333%}.column.is-11-desktop{flex:none;width:91.66667%}.column.is-offset-11-desktop{margin-left:91.66667%}.column.is-12-desktop{flex:none;width:100%}.column.is-offset-12-desktop{margin-left:100%}}@media screen and (min-width:1216px){.column.is-narrow-widescreen{flex:none}.column.is-full-widescreen{flex:none;width:100%}.column.is-three-quarters-widescreen{flex:none;width:75%}.column.is-two-thirds-widescreen{flex:none;width:66.6666%}.column.is-half-widescreen{flex:none;width:50%}.column.is-one-third-widescreen{flex:none;width:33.3333%}.column.is-one-quarter-widescreen{flex:none;width:25%}.column.is-one-fifth-widescreen{flex:none;width:20%}.column.is-two-fifths-widescreen{flex:none;width:40%}.column.is-three-fifths-widescreen{flex:none;width:60%}.column.is-four-fifths-widescreen{flex:none;width:80%}.column.is-offset-three-quarters-widescreen{margin-left:75%}.column.is-offset-two-thirds-widescreen{margin-left:66.6666%}.column.is-offset-half-widescreen{margin-left:50%}.column.is-offset-one-third-widescreen{margin-left:33.3333%}.column.is-offset-one-quarter-widescreen{margin-left:25%}.column.is-offset-one-fifth-widescreen{margin-left:20%}.column.is-offset-two-fifths-widescreen{margin-left:40%}.column.is-offset-three-fifths-widescreen{margin-left:60%}.column.is-offset-four-fifths-widescreen{margin-left:80%}.column.is-0-widescreen{flex:none;width:0%}.column.is-offset-0-widescreen{margin-left:0}.column.is-1-widescreen{flex:none;width:8.33333%}.column.is-offset-1-widescreen{margin-left:8.33333%}.column.is-2-widescreen{flex:none;width:16.66667%}.column.is-offset-2-widescreen{margin-left:16.66667%}.column.is-3-widescreen{flex:none;width:25%}.column.is-offset-3-widescreen{margin-left:25%}.column.is-4-widescreen{flex:none;width:33.33333%}.column.is-offset-4-widescreen{margin-left:33.33333%}.column.is-5-widescreen{flex:none;width:41.66667%}.column.is-offset-5-widescreen{margin-left:41.66667%}.column.is-6-widescreen{flex:none;width:50%}.column.is-offset-6-widescreen{margin-left:50%}.column.is-7-widescreen{flex:none;width:58.33333%}.column.is-offset-7-widescreen{margin-left:58.33333%}.column.is-8-widescreen{flex:none;width:66.66667%}.column.is-offset-8-widescreen{margin-left:66.66667%}.column.is-9-widescreen{flex:none;width:75%}.column.is-offset-9-widescreen{margin-left:75%}.column.is-10-widescreen{flex:none;width:83.33333%}.column.is-offset-10-widescreen{margin-left:83.33333%}.column.is-11-widescreen{flex:none;width:91.66667%}.column.is-offset-11-widescreen{margin-left:91.66667%}.column.is-12-widescreen{flex:none;width:100%}.column.is-offset-12-widescreen{margin-left:100%}}@media screen and (min-width:1408px){.column.is-narrow-fullhd{flex:none}.column.is-full-fullhd{flex:none;width:100%}.column.is-three-quarters-fullhd{flex:none;width:75%}.column.is-two-thirds-fullhd{flex:none;width:66.6666%}.column.is-half-fullhd{flex:none;width:50%}.column.is-one-third-fullhd{flex:none;width:33.3333%}.column.is-one-quarter-fullhd{flex:none;width:25%}.column.is-one-fifth-fullhd{flex:none;width:20%}.column.is-two-fifths-fullhd{flex:none;width:40%}.column.is-three-fifths-fullhd{flex:none;width:60%}.column.is-four-fifths-fullhd{flex:none;width:80%}.column.is-offset-three-quarters-fullhd{margin-left:75%}.column.is-offset-two-thirds-fullhd{margin-left:66.6666%}.column.is-offset-half-fullhd{margin-left:50%}.column.is-offset-one-third-fullhd{margin-left:33.3333%}.column.is-offset-one-quarter-fullhd{margin-left:25%}.column.is-offset-one-fifth-fullhd{margin-left:20%}.column.is-offset-two-fifths-fullhd{margin-left:40%}.column.is-offset-three-fifths-fullhd{margin-left:60%}.column.is-offset-four-fifths-fullhd{margin-left:80%}.column.is-0-fullhd{flex:none;width:0%}.column.is-offset-0-fullhd{margin-left:0}.column.is-1-fullhd{flex:none;width:8.33333%}.column.is-offset-1-fullhd{margin-left:8.33333%}.column.is-2-fullhd{flex:none;width:16.66667%}.column.is-offset-2-fullhd{margin-left:16.66667%}.column.is-3-fullhd{flex:none;width:25%}.column.is-offset-3-fullhd{margin-left:25%}.column.is-4-fullhd{flex:none;width:33.33333%}.column.is-offset-4-fullhd{margin-left:33.33333%}.column.is-5-fullhd{flex:none;width:41.66667%}.column.is-offset-5-fullhd{margin-left:41.66667%}.column.is-6-fullhd{flex:none;width:50%}.column.is-offset-6-fullhd{margin-left:50%}.column.is-7-fullhd{flex:none;width:58.33333%}.column.is-offset-7-fullhd{margin-left:58.33333%}.column.is-8-fullhd{flex:none;width:66.66667%}.column.is-offset-8-fullhd{margin-left:66.66667%}.column.is-9-fullhd{flex:none;width:75%}.column.is-offset-9-fullhd{margin-left:75%}.column.is-10-fullhd{flex:none;width:83.33333%}.column.is-offset-10-fullhd{margin-left:83.33333%}.column.is-11-fullhd{flex:none;width:91.66667%}.column.is-offset-11-fullhd{margin-left:91.66667%}.column.is-12-fullhd{flex:none;width:100%}.column.is-offset-12-fullhd{margin-left:100%}}.columns{margin-left:-.75rem;margin-right:-.75rem;margin-top:-.75rem}.columns:last-child{margin-bottom:-.75rem}.columns:not(:last-child){margin-bottom:calc(1.5rem - .75rem)}.columns.is-centered{justify-content:center}.columns.is-gapless{margin-left:0;margin-right:0;margin-top:0}.columns.is-gapless>.column{margin:0;padding:0!important}.columns.is-gapless:not(:last-child){margin-bottom:1.5rem}.columns.is-gapless:last-child{margin-bottom:0}.columns.is-mobile{display:flex}.columns.is-multiline{flex-wrap:wrap}.columns.is-vcentered{align-items:center}@media screen and (min-width:769px),print{.columns:not(.is-desktop){display:flex}}@media screen and (min-width:1024px){.columns.is-desktop{display:flex}}.columns.is-variable{--columnGap:0.75rem;margin-left:calc(-1 * var(--columnGap));margin-right:calc(-1 * var(--columnGap))}.columns.is-variable .column{padding-left:var(--columnGap);padding-right:var(--columnGap)}.columns.is-variable.is-0{--columnGap:0rem}@media screen and (max-width:768px){.columns.is-variable.is-0-mobile{--columnGap:0rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-0-tablet{--columnGap:0rem}}@media screen and (min-width:769px) and (max-width:1023px){.columns.is-variable.is-0-tablet-only{--columnGap:0rem}}@media screen and (max-width:1023px){.columns.is-variable.is-0-touch{--columnGap:0rem}}@media screen and (min-width:1024px){.columns.is-variable.is-0-desktop{--columnGap:0rem}}@media screen and (min-width:1024px) and (max-width:1215px){.columns.is-variable.is-0-desktop-only{--columnGap:0rem}}@media screen and (min-width:1216px){.columns.is-variable.is-0-widescreen{--columnGap:0rem}}@media screen and (min-width:1216px) and (max-width:1407px){.columns.is-variable.is-0-widescreen-only{--columnGap:0rem}}@media screen and (min-width:1408px){.columns.is-variable.is-0-fullhd{--columnGap:0rem}}.columns.is-variable.is-1{--columnGap:0.25rem}@media screen and (max-width:768px){.columns.is-variable.is-1-mobile{--columnGap:0.25rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-1-tablet{--columnGap:0.25rem}}@media screen and (min-width:769px) and (max-width:1023px){.columns.is-variable.is-1-tablet-only{--columnGap:0.25rem}}@media screen and (max-width:1023px){.columns.is-variable.is-1-touch{--columnGap:0.25rem}}@media screen and (min-width:1024px){.columns.is-variable.is-1-desktop{--columnGap:0.25rem}}@media screen and (min-width:1024px) and (max-width:1215px){.columns.is-variable.is-1-desktop-only{--columnGap:0.25rem}}@media screen and (min-width:1216px){.columns.is-variable.is-1-widescreen{--columnGap:0.25rem}}@media screen and (min-width:1216px) and (max-width:1407px){.columns.is-variable.is-1-widescreen-only{--columnGap:0.25rem}}@media screen and (min-width:1408px){.columns.is-variable.is-1-fullhd{--columnGap:0.25rem}}.columns.is-variable.is-2{--columnGap:0.5rem}@media screen and (max-width:768px){.columns.is-variable.is-2-mobile{--columnGap:0.5rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-2-tablet{--columnGap:0.5rem}}@media screen and (min-width:769px) and (max-width:1023px){.columns.is-variable.is-2-tablet-only{--columnGap:0.5rem}}@media screen and (max-width:1023px){.columns.is-variable.is-2-touch{--columnGap:0.5rem}}@media screen and (min-width:1024px){.columns.is-variable.is-2-desktop{--columnGap:0.5rem}}@media screen and (min-width:1024px) and (max-width:1215px){.columns.is-variable.is-2-desktop-only{--columnGap:0.5rem}}@media screen and (min-width:1216px){.columns.is-variable.is-2-widescreen{--columnGap:0.5rem}}@media screen and (min-width:1216px) and (max-width:1407px){.columns.is-variable.is-2-widescreen-only{--columnGap:0.5rem}}@media screen and (min-width:1408px){.columns.is-variable.is-2-fullhd{--columnGap:0.5rem}}.columns.is-variable.is-3{--columnGap:0.75rem}@media screen and (max-width:768px){.columns.is-variable.is-3-mobile{--columnGap:0.75rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-3-tablet{--columnGap:0.75rem}}@media screen and (min-width:769px) and (max-width:1023px){.columns.is-variable.is-3-tablet-only{--columnGap:0.75rem}}@media screen and (max-width:1023px){.columns.is-variable.is-3-touch{--columnGap:0.75rem}}@media screen and (min-width:1024px){.columns.is-variable.is-3-desktop{--columnGap:0.75rem}}@media screen and (min-width:1024px) and (max-width:1215px){.columns.is-variable.is-3-desktop-only{--columnGap:0.75rem}}@media screen and (min-width:1216px){.columns.is-variable.is-3-widescreen{--columnGap:0.75rem}}@media screen and (min-width:1216px) and (max-width:1407px){.columns.is-variable.is-3-widescreen-only{--columnGap:0.75rem}}@media screen and (min-width:1408px){.columns.is-variable.is-3-fullhd{--columnGap:0.75rem}}.columns.is-variable.is-4{--columnGap:1rem}@media screen and (max-width:768px){.columns.is-variable.is-4-mobile{--columnGap:1rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-4-tablet{--columnGap:1rem}}@media screen and (min-width:769px) and (max-width:1023px){.columns.is-variable.is-4-tablet-only{--columnGap:1rem}}@media screen and (max-width:1023px){.columns.is-variable.is-4-touch{--columnGap:1rem}}@media screen and (min-width:1024px){.columns.is-variable.is-4-desktop{--columnGap:1rem}}@media screen and (min-width:1024px) and (max-width:1215px){.columns.is-variable.is-4-desktop-only{--columnGap:1rem}}@media screen and (min-width:1216px){.columns.is-variable.is-4-widescreen{--columnGap:1rem}}@media screen and (min-width:1216px) and (max-width:1407px){.columns.is-variable.is-4-widescreen-only{--columnGap:1rem}}@media screen and (min-width:1408px){.columns.is-variable.is-4-fullhd{--columnGap:1rem}}.columns.is-variable.is-5{--columnGap:1.25rem}@media screen and (max-width:768px){.columns.is-variable.is-5-mobile{--columnGap:1.25rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-5-tablet{--columnGap:1.25rem}}@media screen and (min-width:769px) and (max-width:1023px){.columns.is-variable.is-5-tablet-only{--columnGap:1.25rem}}@media screen and (max-width:1023px){.columns.is-variable.is-5-touch{--columnGap:1.25rem}}@media screen and (min-width:1024px){.columns.is-variable.is-5-desktop{--columnGap:1.25rem}}@media screen and (min-width:1024px) and (max-width:1215px){.columns.is-variable.is-5-desktop-only{--columnGap:1.25rem}}@media screen and (min-width:1216px){.columns.is-variable.is-5-widescreen{--columnGap:1.25rem}}@media screen and (min-width:1216px) and (max-width:1407px){.columns.is-variable.is-5-widescreen-only{--columnGap:1.25rem}}@media screen and (min-width:1408px){.columns.is-variable.is-5-fullhd{--columnGap:1.25rem}}.columns.is-variable.is-6{--columnGap:1.5rem}@media screen and (max-width:768px){.columns.is-variable.is-6-mobile{--columnGap:1.5rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-6-tablet{--columnGap:1.5rem}}@media screen and (min-width:769px) and (max-width:1023px){.columns.is-variable.is-6-tablet-only{--columnGap:1.5rem}}@media screen and (max-width:1023px){.columns.is-variable.is-6-touch{--columnGap:1.5rem}}@media screen and (min-width:1024px){.columns.is-variable.is-6-desktop{--columnGap:1.5rem}}@media screen and (min-width:1024px) and (max-width:1215px){.columns.is-variable.is-6-desktop-only{--columnGap:1.5rem}}@media screen and (min-width:1216px){.columns.is-variable.is-6-widescreen{--columnGap:1.5rem}}@media screen and (min-width:1216px) and (max-width:1407px){.columns.is-variable.is-6-widescreen-only{--columnGap:1.5rem}}@media screen and (min-width:1408px){.columns.is-variable.is-6-fullhd{--columnGap:1.5rem}}.columns.is-variable.is-7{--columnGap:1.75rem}@media screen and (max-width:768px){.columns.is-variable.is-7-mobile{--columnGap:1.75rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-7-tablet{--columnGap:1.75rem}}@media screen and (min-width:769px) and (max-width:1023px){.columns.is-variable.is-7-tablet-only{--columnGap:1.75rem}}@media screen and (max-width:1023px){.columns.is-variable.is-7-touch{--columnGap:1.75rem}}@media screen and (min-width:1024px){.columns.is-variable.is-7-desktop{--columnGap:1.75rem}}@media screen and (min-width:1024px) and (max-width:1215px){.columns.is-variable.is-7-desktop-only{--columnGap:1.75rem}}@media screen and (min-width:1216px){.columns.is-variable.is-7-widescreen{--columnGap:1.75rem}}@media screen and (min-width:1216px) and (max-width:1407px){.columns.is-variable.is-7-widescreen-only{--columnGap:1.75rem}}@media screen and (min-width:1408px){.columns.is-variable.is-7-fullhd{--columnGap:1.75rem}}.columns.is-variable.is-8{--columnGap:2rem}@media screen and (max-width:768px){.columns.is-variable.is-8-mobile{--columnGap:2rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-8-tablet{--columnGap:2rem}}@media screen and (min-width:769px) and (max-width:1023px){.columns.is-variable.is-8-tablet-only{--columnGap:2rem}}@media screen and (max-width:1023px){.columns.is-variable.is-8-touch{--columnGap:2rem}}@media screen and (min-width:1024px){.columns.is-variable.is-8-desktop{--columnGap:2rem}}@media screen and (min-width:1024px) and (max-width:1215px){.columns.is-variable.is-8-desktop-only{--columnGap:2rem}}@media screen and (min-width:1216px){.columns.is-variable.is-8-widescreen{--columnGap:2rem}}@media screen and (min-width:1216px) and (max-width:1407px){.columns.is-variable.is-8-widescreen-only{--columnGap:2rem}}@media screen and (min-width:1408px){.columns.is-variable.is-8-fullhd{--columnGap:2rem}}.tile{align-items:stretch;display:block;flex-basis:0;flex-grow:1;flex-shrink:1;min-height:-webkit-min-content;min-height:-moz-min-content;min-height:min-content}.tile.is-ancestor{margin-left:-.75rem;margin-right:-.75rem;margin-top:-.75rem}.tile.is-ancestor:last-child{margin-bottom:-.75rem}.tile.is-ancestor:not(:last-child){margin-bottom:.75rem}.tile.is-child{margin:0!important}.tile.is-parent{padding:.75rem}.tile.is-vertical{flex-direction:column}.tile.is-vertical>.tile.is-child:not(:last-child){margin-bottom:1.5rem!important}@media screen and (min-width:769px),print{.tile:not(.is-child){display:flex}.tile.is-1{flex:none;width:8.33333%}.tile.is-2{flex:none;width:16.66667%}.tile.is-3{flex:none;width:25%}.tile.is-4{flex:none;width:33.33333%}.tile.is-5{flex:none;width:41.66667%}.tile.is-6{flex:none;width:50%}.tile.is-7{flex:none;width:58.33333%}.tile.is-8{flex:none;width:66.66667%}.tile.is-9{flex:none;width:75%}.tile.is-10{flex:none;width:83.33333%}.tile.is-11{flex:none;width:91.66667%}.tile.is-12{flex:none;width:100%}}.has-text-white{color:#fff!important}a.has-text-white:focus,a.has-text-white:hover{color:#e6e6e6!important}.has-background-white{background-color:#fff!important}.has-text-black{color:#0a0a0a!important}a.has-text-black:focus,a.has-text-black:hover{color:#000!important}.has-background-black{background-color:#0a0a0a!important}.has-text-light{color:#f5f5f5!important}a.has-text-light:focus,a.has-text-light:hover{color:#dbdbdb!important}.has-background-light{background-color:#f5f5f5!important}.has-text-dark{color:#363636!important}a.has-text-dark:focus,a.has-text-dark:hover{color:#1c1c1c!important}.has-background-dark{background-color:#363636!important}.has-text-primary{color:#00d1b2!important}a.has-text-primary:focus,a.has-text-primary:hover{color:#009e86!important}.has-background-primary{background-color:#00d1b2!important}.has-text-primary-light{color:#ebfffc!important}a.has-text-primary-light:focus,a.has-text-primary-light:hover{color:#b8fff4!important}.has-background-primary-light{background-color:#ebfffc!important}.has-text-primary-dark{color:#00947e!important}a.has-text-primary-dark:focus,a.has-text-primary-dark:hover{color:#00c7a9!important}.has-background-primary-dark{background-color:#00947e!important}.has-text-link{color:#3273dc!important}a.has-text-link:focus,a.has-text-link:hover{color:#205bbc!important}.has-background-link{background-color:#3273dc!important}.has-text-link-light{color:#eef3fc!important}a.has-text-link-light:focus,a.has-text-link-light:hover{color:#c2d5f5!important}.has-background-link-light{background-color:#eef3fc!important}.has-text-link-dark{color:#2160c4!important}a.has-text-link-dark:focus,a.has-text-link-dark:hover{color:#3b79de!important}.has-background-link-dark{background-color:#2160c4!important}.has-text-info{color:#3298dc!important}a.has-text-info:focus,a.has-text-info:hover{color:#207dbc!important}.has-background-info{background-color:#3298dc!important}.has-text-info-light{color:#eef6fc!important}a.has-text-info-light:focus,a.has-text-info-light:hover{color:#c2e0f5!important}.has-background-info-light{background-color:#eef6fc!important}.has-text-info-dark{color:#1d72aa!important}a.has-text-info-dark:focus,a.has-text-info-dark:hover{color:#248fd6!important}.has-background-info-dark{background-color:#1d72aa!important}.has-text-success{color:#48c774!important}a.has-text-success:focus,a.has-text-success:hover{color:#34a85c!important}.has-background-success{background-color:#48c774!important}.has-text-success-light{color:#effaf3!important}a.has-text-success-light:focus,a.has-text-success-light:hover{color:#c8eed6!important}.has-background-success-light{background-color:#effaf3!important}.has-text-success-dark{color:#257942!important}a.has-text-success-dark:focus,a.has-text-success-dark:hover{color:#31a058!important}.has-background-success-dark{background-color:#257942!important}.has-text-warning{color:#ffdd57!important}a.has-text-warning:focus,a.has-text-warning:hover{color:#ffd324!important}.has-background-warning{background-color:#ffdd57!important}.has-text-warning-light{color:#fffbeb!important}a.has-text-warning-light:focus,a.has-text-warning-light:hover{color:#fff1b8!important}.has-background-warning-light{background-color:#fffbeb!important}.has-text-warning-dark{color:#947600!important}a.has-text-warning-dark:focus,a.has-text-warning-dark:hover{color:#c79f00!important}.has-background-warning-dark{background-color:#947600!important}.has-text-danger{color:#f14668!important}a.has-text-danger:focus,a.has-text-danger:hover{color:#ee1742!important}.has-background-danger{background-color:#f14668!important}.has-text-danger-light{color:#feecf0!important}a.has-text-danger-light:focus,a.has-text-danger-light:hover{color:#fabdc9!important}.has-background-danger-light{background-color:#feecf0!important}.has-text-danger-dark{color:#cc0f35!important}a.has-text-danger-dark:focus,a.has-text-danger-dark:hover{color:#ee2049!important}.has-background-danger-dark{background-color:#cc0f35!important}.has-text-black-bis{color:#121212!important}.has-background-black-bis{background-color:#121212!important}.has-text-black-ter{color:#242424!important}.has-background-black-ter{background-color:#242424!important}.has-text-grey-darker{color:#363636!important}.has-background-grey-darker{background-color:#363636!important}.has-text-grey-dark{color:#4a4a4a!important}.has-background-grey-dark{background-color:#4a4a4a!important}.has-text-grey{color:#7a7a7a!important}.has-background-grey{background-color:#7a7a7a!important}.has-text-grey-light{color:#b5b5b5!important}.has-background-grey-light{background-color:#b5b5b5!important}.has-text-grey-lighter{color:#dbdbdb!important}.has-background-grey-lighter{background-color:#dbdbdb!important}.has-text-white-ter{color:#f5f5f5!important}.has-background-white-ter{background-color:#f5f5f5!important}.has-text-white-bis{color:#fafafa!important}.has-background-white-bis{background-color:#fafafa!important}.is-flex-direction-row{flex-direction:row!important}.is-flex-direction-row-reverse{flex-direction:row-reverse!important}.is-flex-direction-column{flex-direction:column!important}.is-flex-direction-column-reverse{flex-direction:column-reverse!important}.is-flex-wrap-nowrap{flex-wrap:nowrap!important}.is-flex-wrap-wrap{flex-wrap:wrap!important}.is-flex-wrap-wrap-reverse{flex-wrap:wrap-reverse!important}.is-justify-content-flex-start{justify-content:flex-start!important}.is-justify-content-flex-end{justify-content:flex-end!important}.is-justify-content-center{justify-content:center!important}.is-justify-content-space-between{justify-content:space-between!important}.is-justify-content-space-around{justify-content:space-around!important}.is-justify-content-space-evenly{justify-content:space-evenly!important}.is-justify-content-start{justify-content:start!important}.is-justify-content-end{justify-content:end!important}.is-justify-content-left{justify-content:left!important}.is-justify-content-right{justify-content:right!important}.is-align-content-flex-start{align-content:flex-start!important}.is-align-content-flex-end{align-content:flex-end!important}.is-align-content-center{align-content:center!important}.is-align-content-space-between{align-content:space-between!important}.is-align-content-space-around{align-content:space-around!important}.is-align-content-space-evenly{align-content:space-evenly!important}.is-align-content-stretch{align-content:stretch!important}.is-align-content-start{align-content:start!important}.is-align-content-end{align-content:end!important}.is-align-content-baseline{align-content:baseline!important}.is-align-items-stretch{align-items:stretch!important}.is-align-items-flex-start{align-items:flex-start!important}.is-align-items-flex-end{align-items:flex-end!important}.is-align-items-center{align-items:center!important}.is-align-items-baseline{align-items:baseline!important}.is-align-items-start{align-items:start!important}.is-align-items-end{align-items:end!important}.is-align-items-self-start{align-items:self-start!important}.is-align-items-self-end{align-items:self-end!important}.is-align-self-auto{align-self:auto!important}.is-align-self-flex-start{align-self:flex-start!important}.is-align-self-flex-end{align-self:flex-end!important}.is-align-self-center{align-self:center!important}.is-align-self-baseline{align-self:baseline!important}.is-align-self-stretch{align-self:stretch!important}.is-flex-grow-0{flex-grow:0!important}.is-flex-grow-1{flex-grow:1!important}.is-flex-grow-2{flex-grow:2!important}.is-flex-grow-3{flex-grow:3!important}.is-flex-grow-4{flex-grow:4!important}.is-flex-grow-5{flex-grow:5!important}.is-flex-shrink-0{flex-shrink:0!important}.is-flex-shrink-1{flex-shrink:1!important}.is-flex-shrink-2{flex-shrink:2!important}.is-flex-shrink-3{flex-shrink:3!important}.is-flex-shrink-4{flex-shrink:4!important}.is-flex-shrink-5{flex-shrink:5!important}.is-clearfix::after{clear:both;content:" ";display:table}.is-pulled-left{float:left!important}.is-pulled-right{float:right!important}.is-radiusless{border-radius:0!important}.is-shadowless{box-shadow:none!important}.is-clickable{cursor:pointer!important}.is-clipped{overflow:hidden!important}.is-relative{position:relative!important}.is-marginless{margin:0!important}.is-paddingless{padding:0!important}.m-0{margin:0!important}.mt-0{margin-top:0!important}.mr-0{margin-right:0!important}.mb-0{margin-bottom:0!important}.ml-0{margin-left:0!important}.mx-0{margin-left:0!important;margin-right:0!important}.my-0{margin-top:0!important;margin-bottom:0!important}.m-1{margin:.25rem!important}.mt-1{margin-top:.25rem!important}.mr-1{margin-right:.25rem!important}.mb-1{margin-bottom:.25rem!important}.ml-1{margin-left:.25rem!important}.mx-1{margin-left:.25rem!important;margin-right:.25rem!important}.my-1{margin-top:.25rem!important;margin-bottom:.25rem!important}.m-2{margin:.5rem!important}.mt-2{margin-top:.5rem!important}.mr-2{margin-right:.5rem!important}.mb-2{margin-bottom:.5rem!important}.ml-2{margin-left:.5rem!important}.mx-2{margin-left:.5rem!important;margin-right:.5rem!important}.my-2{margin-top:.5rem!important;margin-bottom:.5rem!important}.m-3{margin:.75rem!important}.mt-3{margin-top:.75rem!important}.mr-3{margin-right:.75rem!important}.mb-3{margin-bottom:.75rem!important}.ml-3{margin-left:.75rem!important}.mx-3{margin-left:.75rem!important;margin-right:.75rem!important}.my-3{margin-top:.75rem!important;margin-bottom:.75rem!important}.m-4{margin:1rem!important}.mt-4{margin-top:1rem!important}.mr-4{margin-right:1rem!important}.mb-4{margin-bottom:1rem!important}.ml-4{margin-left:1rem!important}.mx-4{margin-left:1rem!important;margin-right:1rem!important}.my-4{margin-top:1rem!important;margin-bottom:1rem!important}.m-5{margin:1.5rem!important}.mt-5{margin-top:1.5rem!important}.mr-5{margin-right:1.5rem!important}.mb-5{margin-bottom:1.5rem!important}.ml-5{margin-left:1.5rem!important}.mx-5{margin-left:1.5rem!important;margin-right:1.5rem!important}.my-5{margin-top:1.5rem!important;margin-bottom:1.5rem!important}.m-6{margin:3rem!important}.mt-6{margin-top:3rem!important}.mr-6{margin-right:3rem!important}.mb-6{margin-bottom:3rem!important}.ml-6{margin-left:3rem!important}.mx-6{margin-left:3rem!important;margin-right:3rem!important}.my-6{margin-top:3rem!important;margin-bottom:3rem!important}.p-0{padding:0!important}.pt-0{padding-top:0!important}.pr-0{padding-right:0!important}.pb-0{padding-bottom:0!important}.pl-0{padding-left:0!important}.px-0{padding-left:0!important;padding-right:0!important}.py-0{padding-top:0!important;padding-bottom:0!important}.p-1{padding:.25rem!important}.pt-1{padding-top:.25rem!important}.pr-1{padding-right:.25rem!important}.pb-1{padding-bottom:.25rem!important}.pl-1{padding-left:.25rem!important}.px-1{padding-left:.25rem!important;padding-right:.25rem!important}.py-1{padding-top:.25rem!important;padding-bottom:.25rem!important}.p-2{padding:.5rem!important}.pt-2{padding-top:.5rem!important}.pr-2{padding-right:.5rem!important}.pb-2{padding-bottom:.5rem!important}.pl-2{padding-left:.5rem!important}.px-2{padding-left:.5rem!important;padding-right:.5rem!important}.py-2{padding-top:.5rem!important;padding-bottom:.5rem!important}.p-3{padding:.75rem!important}.pt-3{padding-top:.75rem!important}.pr-3{padding-right:.75rem!important}.pb-3{padding-bottom:.75rem!important}.pl-3{padding-left:.75rem!important}.px-3{padding-left:.75rem!important;padding-right:.75rem!important}.py-3{padding-top:.75rem!important;padding-bottom:.75rem!important}.p-4{padding:1rem!important}.pt-4{padding-top:1rem!important}.pr-4{padding-right:1rem!important}.pb-4{padding-bottom:1rem!important}.pl-4{padding-left:1rem!important}.px-4{padding-left:1rem!important;padding-right:1rem!important}.py-4{padding-top:1rem!important;padding-bottom:1rem!important}.p-5{padding:1.5rem!important}.pt-5{padding-top:1.5rem!important}.pr-5{padding-right:1.5rem!important}.pb-5{padding-bottom:1.5rem!important}.pl-5{padding-left:1.5rem!important}.px-5{padding-left:1.5rem!important;padding-right:1.5rem!important}.py-5{padding-top:1.5rem!important;padding-bottom:1.5rem!important}.p-6{padding:3rem!important}.pt-6{padding-top:3rem!important}.pr-6{padding-right:3rem!important}.pb-6{padding-bottom:3rem!important}.pl-6{padding-left:3rem!important}.px-6{padding-left:3rem!important;padding-right:3rem!important}.py-6{padding-top:3rem!important;padding-bottom:3rem!important}.is-size-1{font-size:3rem!important}.is-size-2{font-size:2.5rem!important}.is-size-3{font-size:2rem!important}.is-size-4{font-size:1.5rem!important}.is-size-5{font-size:1.25rem!important}.is-size-6{font-size:1rem!important}.is-size-7{font-size:.75rem!important}@media screen and (max-width:768px){.is-size-1-mobile{font-size:3rem!important}.is-size-2-mobile{font-size:2.5rem!important}.is-size-3-mobile{font-size:2rem!important}.is-size-4-mobile{font-size:1.5rem!important}.is-size-5-mobile{font-size:1.25rem!important}.is-size-6-mobile{font-size:1rem!important}.is-size-7-mobile{font-size:.75rem!important}}@media screen and (min-width:769px),print{.is-size-1-tablet{font-size:3rem!important}.is-size-2-tablet{font-size:2.5rem!important}.is-size-3-tablet{font-size:2rem!important}.is-size-4-tablet{font-size:1.5rem!important}.is-size-5-tablet{font-size:1.25rem!important}.is-size-6-tablet{font-size:1rem!important}.is-size-7-tablet{font-size:.75rem!important}}@media screen and (max-width:1023px){.is-size-1-touch{font-size:3rem!important}.is-size-2-touch{font-size:2.5rem!important}.is-size-3-touch{font-size:2rem!important}.is-size-4-touch{font-size:1.5rem!important}.is-size-5-touch{font-size:1.25rem!important}.is-size-6-touch{font-size:1rem!important}.is-size-7-touch{font-size:.75rem!important}}@media screen and (min-width:1024px){.is-size-1-desktop{font-size:3rem!important}.is-size-2-desktop{font-size:2.5rem!important}.is-size-3-desktop{font-size:2rem!important}.is-size-4-desktop{font-size:1.5rem!important}.is-size-5-desktop{font-size:1.25rem!important}.is-size-6-desktop{font-size:1rem!important}.is-size-7-desktop{font-size:.75rem!important}}@media screen and (min-width:1216px){.is-size-1-widescreen{font-size:3rem!important}.is-size-2-widescreen{font-size:2.5rem!important}.is-size-3-widescreen{font-size:2rem!important}.is-size-4-widescreen{font-size:1.5rem!important}.is-size-5-widescreen{font-size:1.25rem!important}.is-size-6-widescreen{font-size:1rem!important}.is-size-7-widescreen{font-size:.75rem!important}}@media screen and (min-width:1408px){.is-size-1-fullhd{font-size:3rem!important}.is-size-2-fullhd{font-size:2.5rem!important}.is-size-3-fullhd{font-size:2rem!important}.is-size-4-fullhd{font-size:1.5rem!important}.is-size-5-fullhd{font-size:1.25rem!important}.is-size-6-fullhd{font-size:1rem!important}.is-size-7-fullhd{font-size:.75rem!important}}.has-text-centered{text-align:center!important}.has-text-justified{text-align:justify!important}.has-text-left{text-align:left!important}.has-text-right{text-align:right!important}@media screen and (max-width:768px){.has-text-centered-mobile{text-align:center!important}}@media screen and (min-width:769px),print{.has-text-centered-tablet{text-align:center!important}}@media screen and (min-width:769px) and (max-width:1023px){.has-text-centered-tablet-only{text-align:center!important}}@media screen and (max-width:1023px){.has-text-centered-touch{text-align:center!important}}@media screen and (min-width:1024px){.has-text-centered-desktop{text-align:center!important}}@media screen and (min-width:1024px) and (max-width:1215px){.has-text-centered-desktop-only{text-align:center!important}}@media screen and (min-width:1216px){.has-text-centered-widescreen{text-align:center!important}}@media screen and (min-width:1216px) and (max-width:1407px){.has-text-centered-widescreen-only{text-align:center!important}}@media screen and (min-width:1408px){.has-text-centered-fullhd{text-align:center!important}}@media screen and (max-width:768px){.has-text-justified-mobile{text-align:justify!important}}@media screen and (min-width:769px),print{.has-text-justified-tablet{text-align:justify!important}}@media screen and (min-width:769px) and (max-width:1023px){.has-text-justified-tablet-only{text-align:justify!important}}@media screen and (max-width:1023px){.has-text-justified-touch{text-align:justify!important}}@media screen and (min-width:1024px){.has-text-justified-desktop{text-align:justify!important}}@media screen and (min-width:1024px) and (max-width:1215px){.has-text-justified-desktop-only{text-align:justify!important}}@media screen and (min-width:1216px){.has-text-justified-widescreen{text-align:justify!important}}@media screen and (min-width:1216px) and (max-width:1407px){.has-text-justified-widescreen-only{text-align:justify!important}}@media screen and (min-width:1408px){.has-text-justified-fullhd{text-align:justify!important}}@media screen and (max-width:768px){.has-text-left-mobile{text-align:left!important}}@media screen and (min-width:769px),print{.has-text-left-tablet{text-align:left!important}}@media screen and (min-width:769px) and (max-width:1023px){.has-text-left-tablet-only{text-align:left!important}}@media screen and (max-width:1023px){.has-text-left-touch{text-align:left!important}}@media screen and (min-width:1024px){.has-text-left-desktop{text-align:left!important}}@media screen and (min-width:1024px) and (max-width:1215px){.has-text-left-desktop-only{text-align:left!important}}@media screen and (min-width:1216px){.has-text-left-widescreen{text-align:left!important}}@media screen and (min-width:1216px) and (max-width:1407px){.has-text-left-widescreen-only{text-align:left!important}}@media screen and (min-width:1408px){.has-text-left-fullhd{text-align:left!important}}@media screen and (max-width:768px){.has-text-right-mobile{text-align:right!important}}@media screen and (min-width:769px),print{.has-text-right-tablet{text-align:right!important}}@media screen and (min-width:769px) and (max-width:1023px){.has-text-right-tablet-only{text-align:right!important}}@media screen and (max-width:1023px){.has-text-right-touch{text-align:right!important}}@media screen and (min-width:1024px){.has-text-right-desktop{text-align:right!important}}@media screen and (min-width:1024px) and (max-width:1215px){.has-text-right-desktop-only{text-align:right!important}}@media screen and (min-width:1216px){.has-text-right-widescreen{text-align:right!important}}@media screen and (min-width:1216px) and (max-width:1407px){.has-text-right-widescreen-only{text-align:right!important}}@media screen and (min-width:1408px){.has-text-right-fullhd{text-align:right!important}}.is-capitalized{text-transform:capitalize!important}.is-lowercase{text-transform:lowercase!important}.is-uppercase{text-transform:uppercase!important}.is-italic{font-style:italic!important}.has-text-weight-light{font-weight:300!important}.has-text-weight-normal{font-weight:400!important}.has-text-weight-medium{font-weight:500!important}.has-text-weight-semibold{font-weight:600!important}.has-text-weight-bold{font-weight:700!important}.is-family-primary{font-family:BlinkMacSystemFont,-apple-system,"Segoe UI",Roboto,Oxygen,Ubuntu,Cantarell,"Fira Sans","Droid Sans","Helvetica Neue",Helvetica,Arial,sans-serif!important}.is-family-secondary{font-family:BlinkMacSystemFont,-apple-system,"Segoe UI",Roboto,Oxygen,Ubuntu,Cantarell,"Fira Sans","Droid Sans","Helvetica Neue",Helvetica,Arial,sans-serif!important}.is-family-sans-serif{font-family:BlinkMacSystemFont,-apple-system,"Segoe UI",Roboto,Oxygen,Ubuntu,Cantarell,"Fira Sans","Droid Sans","Helvetica Neue",Helvetica,Arial,sans-serif!important}.is-family-monospace{font-family:monospace!important}.is-family-code{font-family:monospace!important}.is-block{display:block!important}@media screen and (max-width:768px){.is-block-mobile{display:block!important}}@media screen and (min-width:769px),print{.is-block-tablet{display:block!important}}@media screen and (min-width:769px) and (max-width:1023px){.is-block-tablet-only{display:block!important}}@media screen and (max-width:1023px){.is-block-touch{display:block!important}}@media screen and (min-width:1024px){.is-block-desktop{display:block!important}}@media screen and (min-width:1024px) and (max-width:1215px){.is-block-desktop-only{display:block!important}}@media screen and (min-width:1216px){.is-block-widescreen{display:block!important}}@media screen and (min-width:1216px) and (max-width:1407px){.is-block-widescreen-only{display:block!important}}@media screen and (min-width:1408px){.is-block-fullhd{display:block!important}}.is-flex{display:flex!important}@media screen and (max-width:768px){.is-flex-mobile{display:flex!important}}@media screen and (min-width:769px),print{.is-flex-tablet{display:flex!important}}@media screen and (min-width:769px) and (max-width:1023px){.is-flex-tablet-only{display:flex!important}}@media screen and (max-width:1023px){.is-flex-touch{display:flex!important}}@media screen and (min-width:1024px){.is-flex-desktop{display:flex!important}}@media screen and (min-width:1024px) and (max-width:1215px){.is-flex-desktop-only{display:flex!important}}@media screen and (min-width:1216px){.is-flex-widescreen{display:flex!important}}@media screen and (min-width:1216px) and (max-width:1407px){.is-flex-widescreen-only{display:flex!important}}@media screen and (min-width:1408px){.is-flex-fullhd{display:flex!important}}.is-inline{display:inline!important}@media screen and (max-width:768px){.is-inline-mobile{display:inline!important}}@media screen and (min-width:769px),print{.is-inline-tablet{display:inline!important}}@media screen and (min-width:769px) and (max-width:1023px){.is-inline-tablet-only{display:inline!important}}@media screen and (max-width:1023px){.is-inline-touch{display:inline!important}}@media screen and (min-width:1024px){.is-inline-desktop{display:inline!important}}@media screen and (min-width:1024px) and (max-width:1215px){.is-inline-desktop-only{display:inline!important}}@media screen and (min-width:1216px){.is-inline-widescreen{display:inline!important}}@media screen and (min-width:1216px) and (max-width:1407px){.is-inline-widescreen-only{display:inline!important}}@media screen and (min-width:1408px){.is-inline-fullhd{display:inline!important}}.is-inline-block{display:inline-block!important}@media screen and (max-width:768px){.is-inline-block-mobile{display:inline-block!important}}@media screen and (min-width:769px),print{.is-inline-block-tablet{display:inline-block!important}}@media screen and (min-width:769px) and (max-width:1023px){.is-inline-block-tablet-only{display:inline-block!important}}@media screen and (max-width:1023px){.is-inline-block-touch{display:inline-block!important}}@media screen and (min-width:1024px){.is-inline-block-desktop{display:inline-block!important}}@media screen and (min-width:1024px) and (max-width:1215px){.is-inline-block-desktop-only{display:inline-block!important}}@media screen and (min-width:1216px){.is-inline-block-widescreen{display:inline-block!important}}@media screen and (min-width:1216px) and (max-width:1407px){.is-inline-block-widescreen-only{display:inline-block!important}}@media screen and (min-width:1408px){.is-inline-block-fullhd{display:inline-block!important}}.is-inline-flex{display:inline-flex!important}@media screen and (max-width:768px){.is-inline-flex-mobile{display:inline-flex!important}}@media screen and (min-width:769px),print{.is-inline-flex-tablet{display:inline-flex!important}}@media screen and (min-width:769px) and (max-width:1023px){.is-inline-flex-tablet-only{display:inline-flex!important}}@media screen and (max-width:1023px){.is-inline-flex-touch{display:inline-flex!important}}@media screen and (min-width:1024px){.is-inline-flex-desktop{display:inline-flex!important}}@media screen and (min-width:1024px) and (max-width:1215px){.is-inline-flex-desktop-only{display:inline-flex!important}}@media screen and (min-width:1216px){.is-inline-flex-widescreen{display:inline-flex!important}}@media screen and (min-width:1216px) and (max-width:1407px){.is-inline-flex-widescreen-only{display:inline-flex!important}}@media screen and (min-width:1408px){.is-inline-flex-fullhd{display:inline-flex!important}}.is-hidden{display:none!important}.is-sr-only{border:none!important;clip:rect(0,0,0,0)!important;height:.01em!important;overflow:hidden!important;padding:0!important;position:absolute!important;white-space:nowrap!important;width:.01em!important}@media screen and (max-width:768px){.is-hidden-mobile{display:none!important}}@media screen and (min-width:769px),print{.is-hidden-tablet{display:none!important}}@media screen and (min-width:769px) and (max-width:1023px){.is-hidden-tablet-only{display:none!important}}@media screen and (max-width:1023px){.is-hidden-touch{display:none!important}}@media screen and (min-width:1024px){.is-hidden-desktop{display:none!important}}@media screen and (min-width:1024px) and (max-width:1215px){.is-hidden-desktop-only{display:none!important}}@media screen and (min-width:1216px){.is-hidden-widescreen{display:none!important}}@media screen and (min-width:1216px) and (max-width:1407px){.is-hidden-widescreen-only{display:none!important}}@media screen and (min-width:1408px){.is-hidden-fullhd{display:none!important}}.is-invisible{visibility:hidden!important}@media screen and (max-width:768px){.is-invisible-mobile{visibility:hidden!important}}@media screen and (min-width:769px),print{.is-invisible-tablet{visibility:hidden!important}}@media screen and (min-width:769px) and (max-width:1023px){.is-invisible-tablet-only{visibility:hidden!important}}@media screen and (max-width:1023px){.is-invisible-touch{visibility:hidden!important}}@media screen and (min-width:1024px){.is-invisible-desktop{visibility:hidden!important}}@media screen and (min-width:1024px) and (max-width:1215px){.is-invisible-desktop-only{visibility:hidden!important}}@media screen and (min-width:1216px){.is-invisible-widescreen{visibility:hidden!important}}@media screen and (min-width:1216px) and (max-width:1407px){.is-invisible-widescreen-only{visibility:hidden!important}}@media screen and (min-width:1408px){.is-invisible-fullhd{visibility:hidden!important}}.hero{align-items:stretch;display:flex;flex-direction:column;justify-content:space-between}.hero .navbar{background:0 0}.hero .tabs ul{border-bottom:none}.hero.is-white{background-color:#fff;color:#0a0a0a}.hero.is-white a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-white strong{color:inherit}.hero.is-white .title{color:#0a0a0a}.hero.is-white .subtitle{color:rgba(10,10,10,.9)}.hero.is-white .subtitle a:not(.button),.hero.is-white .subtitle strong{color:#0a0a0a}@media screen and (max-width:1023px){.hero.is-white .navbar-menu{background-color:#fff}}.hero.is-white .navbar-item,.hero.is-white .navbar-link{color:rgba(10,10,10,.7)}.hero.is-white .navbar-link.is-active,.hero.is-white .navbar-link:hover,.hero.is-white a.navbar-item.is-active,.hero.is-white a.navbar-item:hover{background-color:#f2f2f2;color:#0a0a0a}.hero.is-white .tabs a{color:#0a0a0a;opacity:.9}.hero.is-white .tabs a:hover{opacity:1}.hero.is-white .tabs li.is-active a{opacity:1}.hero.is-white .tabs.is-boxed a,.hero.is-white .tabs.is-toggle a{color:#0a0a0a}.hero.is-white .tabs.is-boxed a:hover,.hero.is-white .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-white .tabs.is-boxed li.is-active a,.hero.is-white .tabs.is-boxed li.is-active a:hover,.hero.is-white .tabs.is-toggle li.is-active a,.hero.is-white .tabs.is-toggle li.is-active a:hover{background-color:#0a0a0a;border-color:#0a0a0a;color:#fff}.hero.is-white.is-bold{background-image:linear-gradient(141deg,#e6e6e6 0,#fff 71%,#fff 100%)}@media screen and (max-width:768px){.hero.is-white.is-bold .navbar-menu{background-image:linear-gradient(141deg,#e6e6e6 0,#fff 71%,#fff 100%)}}.hero.is-black{background-color:#0a0a0a;color:#fff}.hero.is-black a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-black strong{color:inherit}.hero.is-black .title{color:#fff}.hero.is-black .subtitle{color:rgba(255,255,255,.9)}.hero.is-black .subtitle a:not(.button),.hero.is-black .subtitle strong{color:#fff}@media screen and (max-width:1023px){.hero.is-black .navbar-menu{background-color:#0a0a0a}}.hero.is-black .navbar-item,.hero.is-black .navbar-link{color:rgba(255,255,255,.7)}.hero.is-black .navbar-link.is-active,.hero.is-black .navbar-link:hover,.hero.is-black a.navbar-item.is-active,.hero.is-black a.navbar-item:hover{background-color:#000;color:#fff}.hero.is-black .tabs a{color:#fff;opacity:.9}.hero.is-black .tabs a:hover{opacity:1}.hero.is-black .tabs li.is-active a{opacity:1}.hero.is-black .tabs.is-boxed a,.hero.is-black .tabs.is-toggle a{color:#fff}.hero.is-black .tabs.is-boxed a:hover,.hero.is-black .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-black .tabs.is-boxed li.is-active a,.hero.is-black .tabs.is-boxed li.is-active a:hover,.hero.is-black .tabs.is-toggle li.is-active a,.hero.is-black .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#0a0a0a}.hero.is-black.is-bold{background-image:linear-gradient(141deg,#000 0,#0a0a0a 71%,#181616 100%)}@media screen and (max-width:768px){.hero.is-black.is-bold .navbar-menu{background-image:linear-gradient(141deg,#000 0,#0a0a0a 71%,#181616 100%)}}.hero.is-light{background-color:#f5f5f5;color:rgba(0,0,0,.7)}.hero.is-light a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-light strong{color:inherit}.hero.is-light .title{color:rgba(0,0,0,.7)}.hero.is-light .subtitle{color:rgba(0,0,0,.9)}.hero.is-light .subtitle a:not(.button),.hero.is-light .subtitle strong{color:rgba(0,0,0,.7)}@media screen and (max-width:1023px){.hero.is-light .navbar-menu{background-color:#f5f5f5}}.hero.is-light .navbar-item,.hero.is-light .navbar-link{color:rgba(0,0,0,.7)}.hero.is-light .navbar-link.is-active,.hero.is-light .navbar-link:hover,.hero.is-light a.navbar-item.is-active,.hero.is-light a.navbar-item:hover{background-color:#e8e8e8;color:rgba(0,0,0,.7)}.hero.is-light .tabs a{color:rgba(0,0,0,.7);opacity:.9}.hero.is-light .tabs a:hover{opacity:1}.hero.is-light .tabs li.is-active a{opacity:1}.hero.is-light .tabs.is-boxed a,.hero.is-light .tabs.is-toggle a{color:rgba(0,0,0,.7)}.hero.is-light .tabs.is-boxed a:hover,.hero.is-light .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-light .tabs.is-boxed li.is-active a,.hero.is-light .tabs.is-boxed li.is-active a:hover,.hero.is-light .tabs.is-toggle li.is-active a,.hero.is-light .tabs.is-toggle li.is-active a:hover{background-color:rgba(0,0,0,.7);border-color:rgba(0,0,0,.7);color:#f5f5f5}.hero.is-light.is-bold{background-image:linear-gradient(141deg,#dfd8d9 0,#f5f5f5 71%,#fff 100%)}@media screen and (max-width:768px){.hero.is-light.is-bold .navbar-menu{background-image:linear-gradient(141deg,#dfd8d9 0,#f5f5f5 71%,#fff 100%)}}.hero.is-dark{background-color:#363636;color:#fff}.hero.is-dark a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-dark strong{color:inherit}.hero.is-dark .title{color:#fff}.hero.is-dark .subtitle{color:rgba(255,255,255,.9)}.hero.is-dark .subtitle a:not(.button),.hero.is-dark .subtitle strong{color:#fff}@media screen and (max-width:1023px){.hero.is-dark .navbar-menu{background-color:#363636}}.hero.is-dark .navbar-item,.hero.is-dark .navbar-link{color:rgba(255,255,255,.7)}.hero.is-dark .navbar-link.is-active,.hero.is-dark .navbar-link:hover,.hero.is-dark a.navbar-item.is-active,.hero.is-dark a.navbar-item:hover{background-color:#292929;color:#fff}.hero.is-dark .tabs a{color:#fff;opacity:.9}.hero.is-dark .tabs a:hover{opacity:1}.hero.is-dark .tabs li.is-active a{opacity:1}.hero.is-dark .tabs.is-boxed a,.hero.is-dark .tabs.is-toggle a{color:#fff}.hero.is-dark .tabs.is-boxed a:hover,.hero.is-dark .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-dark .tabs.is-boxed li.is-active a,.hero.is-dark .tabs.is-boxed li.is-active a:hover,.hero.is-dark .tabs.is-toggle li.is-active a,.hero.is-dark .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#363636}.hero.is-dark.is-bold{background-image:linear-gradient(141deg,#1f191a 0,#363636 71%,#46403f 100%)}@media screen and (max-width:768px){.hero.is-dark.is-bold .navbar-menu{background-image:linear-gradient(141deg,#1f191a 0,#363636 71%,#46403f 100%)}}.hero.is-primary{background-color:#00d1b2;color:#fff}.hero.is-primary a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-primary strong{color:inherit}.hero.is-primary .title{color:#fff}.hero.is-primary .subtitle{color:rgba(255,255,255,.9)}.hero.is-primary .subtitle a:not(.button),.hero.is-primary .subtitle strong{color:#fff}@media screen and (max-width:1023px){.hero.is-primary .navbar-menu{background-color:#00d1b2}}.hero.is-primary .navbar-item,.hero.is-primary .navbar-link{color:rgba(255,255,255,.7)}.hero.is-primary .navbar-link.is-active,.hero.is-primary .navbar-link:hover,.hero.is-primary a.navbar-item.is-active,.hero.is-primary a.navbar-item:hover{background-color:#00b89c;color:#fff}.hero.is-primary .tabs a{color:#fff;opacity:.9}.hero.is-primary .tabs a:hover{opacity:1}.hero.is-primary .tabs li.is-active a{opacity:1}.hero.is-primary .tabs.is-boxed a,.hero.is-primary .tabs.is-toggle a{color:#fff}.hero.is-primary .tabs.is-boxed a:hover,.hero.is-primary .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-primary .tabs.is-boxed li.is-active a,.hero.is-primary .tabs.is-boxed li.is-active a:hover,.hero.is-primary .tabs.is-toggle li.is-active a,.hero.is-primary .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#00d1b2}.hero.is-primary.is-bold{background-image:linear-gradient(141deg,#009e6c 0,#00d1b2 71%,#00e7eb 100%)}@media screen and (max-width:768px){.hero.is-primary.is-bold .navbar-menu{background-image:linear-gradient(141deg,#009e6c 0,#00d1b2 71%,#00e7eb 100%)}}.hero.is-link{background-color:#3273dc;color:#fff}.hero.is-link a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-link strong{color:inherit}.hero.is-link .title{color:#fff}.hero.is-link .subtitle{color:rgba(255,255,255,.9)}.hero.is-link .subtitle a:not(.button),.hero.is-link .subtitle strong{color:#fff}@media screen and (max-width:1023px){.hero.is-link .navbar-menu{background-color:#3273dc}}.hero.is-link .navbar-item,.hero.is-link .navbar-link{color:rgba(255,255,255,.7)}.hero.is-link .navbar-link.is-active,.hero.is-link .navbar-link:hover,.hero.is-link a.navbar-item.is-active,.hero.is-link a.navbar-item:hover{background-color:#2366d1;color:#fff}.hero.is-link .tabs a{color:#fff;opacity:.9}.hero.is-link .tabs a:hover{opacity:1}.hero.is-link .tabs li.is-active a{opacity:1}.hero.is-link .tabs.is-boxed a,.hero.is-link .tabs.is-toggle a{color:#fff}.hero.is-link .tabs.is-boxed a:hover,.hero.is-link .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-link .tabs.is-boxed li.is-active a,.hero.is-link .tabs.is-boxed li.is-active a:hover,.hero.is-link .tabs.is-toggle li.is-active a,.hero.is-link .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#3273dc}.hero.is-link.is-bold{background-image:linear-gradient(141deg,#1577c6 0,#3273dc 71%,#4366e5 100%)}@media screen and (max-width:768px){.hero.is-link.is-bold .navbar-menu{background-image:linear-gradient(141deg,#1577c6 0,#3273dc 71%,#4366e5 100%)}}.hero.is-info{background-color:#3298dc;color:#fff}.hero.is-info a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-info strong{color:inherit}.hero.is-info .title{color:#fff}.hero.is-info .subtitle{color:rgba(255,255,255,.9)}.hero.is-info .subtitle a:not(.button),.hero.is-info .subtitle strong{color:#fff}@media screen and (max-width:1023px){.hero.is-info .navbar-menu{background-color:#3298dc}}.hero.is-info .navbar-item,.hero.is-info .navbar-link{color:rgba(255,255,255,.7)}.hero.is-info .navbar-link.is-active,.hero.is-info .navbar-link:hover,.hero.is-info a.navbar-item.is-active,.hero.is-info a.navbar-item:hover{background-color:#238cd1;color:#fff}.hero.is-info .tabs a{color:#fff;opacity:.9}.hero.is-info .tabs a:hover{opacity:1}.hero.is-info .tabs li.is-active a{opacity:1}.hero.is-info .tabs.is-boxed a,.hero.is-info .tabs.is-toggle a{color:#fff}.hero.is-info .tabs.is-boxed a:hover,.hero.is-info .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-info .tabs.is-boxed li.is-active a,.hero.is-info .tabs.is-boxed li.is-active a:hover,.hero.is-info .tabs.is-toggle li.is-active a,.hero.is-info .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#3298dc}.hero.is-info.is-bold{background-image:linear-gradient(141deg,#159dc6 0,#3298dc 71%,#4389e5 100%)}@media screen and (max-width:768px){.hero.is-info.is-bold .navbar-menu{background-image:linear-gradient(141deg,#159dc6 0,#3298dc 71%,#4389e5 100%)}}.hero.is-success{background-color:#48c774;color:#fff}.hero.is-success a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-success strong{color:inherit}.hero.is-success .title{color:#fff}.hero.is-success .subtitle{color:rgba(255,255,255,.9)}.hero.is-success .subtitle a:not(.button),.hero.is-success .subtitle strong{color:#fff}@media screen and (max-width:1023px){.hero.is-success .navbar-menu{background-color:#48c774}}.hero.is-success .navbar-item,.hero.is-success .navbar-link{color:rgba(255,255,255,.7)}.hero.is-success .navbar-link.is-active,.hero.is-success .navbar-link:hover,.hero.is-success a.navbar-item.is-active,.hero.is-success a.navbar-item:hover{background-color:#3abb67;color:#fff}.hero.is-success .tabs a{color:#fff;opacity:.9}.hero.is-success .tabs a:hover{opacity:1}.hero.is-success .tabs li.is-active a{opacity:1}.hero.is-success .tabs.is-boxed a,.hero.is-success .tabs.is-toggle a{color:#fff}.hero.is-success .tabs.is-boxed a:hover,.hero.is-success .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-success .tabs.is-boxed li.is-active a,.hero.is-success .tabs.is-boxed li.is-active a:hover,.hero.is-success .tabs.is-toggle li.is-active a,.hero.is-success .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#48c774}.hero.is-success.is-bold{background-image:linear-gradient(141deg,#29b342 0,#48c774 71%,#56d296 100%)}@media screen and (max-width:768px){.hero.is-success.is-bold .navbar-menu{background-image:linear-gradient(141deg,#29b342 0,#48c774 71%,#56d296 100%)}}.hero.is-warning{background-color:#ffdd57;color:rgba(0,0,0,.7)}.hero.is-warning a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-warning strong{color:inherit}.hero.is-warning .title{color:rgba(0,0,0,.7)}.hero.is-warning .subtitle{color:rgba(0,0,0,.9)}.hero.is-warning .subtitle a:not(.button),.hero.is-warning .subtitle strong{color:rgba(0,0,0,.7)}@media screen and (max-width:1023px){.hero.is-warning .navbar-menu{background-color:#ffdd57}}.hero.is-warning .navbar-item,.hero.is-warning .navbar-link{color:rgba(0,0,0,.7)}.hero.is-warning .navbar-link.is-active,.hero.is-warning .navbar-link:hover,.hero.is-warning a.navbar-item.is-active,.hero.is-warning a.navbar-item:hover{background-color:#ffd83d;color:rgba(0,0,0,.7)}.hero.is-warning .tabs a{color:rgba(0,0,0,.7);opacity:.9}.hero.is-warning .tabs a:hover{opacity:1}.hero.is-warning .tabs li.is-active a{opacity:1}.hero.is-warning .tabs.is-boxed a,.hero.is-warning .tabs.is-toggle a{color:rgba(0,0,0,.7)}.hero.is-warning .tabs.is-boxed a:hover,.hero.is-warning .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-warning .tabs.is-boxed li.is-active a,.hero.is-warning .tabs.is-boxed li.is-active a:hover,.hero.is-warning .tabs.is-toggle li.is-active a,.hero.is-warning .tabs.is-toggle li.is-active a:hover{background-color:rgba(0,0,0,.7);border-color:rgba(0,0,0,.7);color:#ffdd57}.hero.is-warning.is-bold{background-image:linear-gradient(141deg,#ffaf24 0,#ffdd57 71%,#fffa70 100%)}@media screen and (max-width:768px){.hero.is-warning.is-bold .navbar-menu{background-image:linear-gradient(141deg,#ffaf24 0,#ffdd57 71%,#fffa70 100%)}}.hero.is-danger{background-color:#f14668;color:#fff}.hero.is-danger a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-danger strong{color:inherit}.hero.is-danger .title{color:#fff}.hero.is-danger .subtitle{color:rgba(255,255,255,.9)}.hero.is-danger .subtitle a:not(.button),.hero.is-danger .subtitle strong{color:#fff}@media screen and (max-width:1023px){.hero.is-danger .navbar-menu{background-color:#f14668}}.hero.is-danger .navbar-item,.hero.is-danger .navbar-link{color:rgba(255,255,255,.7)}.hero.is-danger .navbar-link.is-active,.hero.is-danger .navbar-link:hover,.hero.is-danger a.navbar-item.is-active,.hero.is-danger a.navbar-item:hover{background-color:#ef2e55;color:#fff}.hero.is-danger .tabs a{color:#fff;opacity:.9}.hero.is-danger .tabs a:hover{opacity:1}.hero.is-danger .tabs li.is-active a{opacity:1}.hero.is-danger .tabs.is-boxed a,.hero.is-danger .tabs.is-toggle a{color:#fff}.hero.is-danger .tabs.is-boxed a:hover,.hero.is-danger .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-danger .tabs.is-boxed li.is-active a,.hero.is-danger .tabs.is-boxed li.is-active a:hover,.hero.is-danger .tabs.is-toggle li.is-active a,.hero.is-danger .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#f14668}.hero.is-danger.is-bold{background-image:linear-gradient(141deg,#fa0a62 0,#f14668 71%,#f7595f 100%)}@media screen and (max-width:768px){.hero.is-danger.is-bold .navbar-menu{background-image:linear-gradient(141deg,#fa0a62 0,#f14668 71%,#f7595f 100%)}}.hero.is-small .hero-body{padding:1.5rem}@media screen and (min-width:769px),print{.hero.is-medium .hero-body{padding:9rem 1.5rem}}@media screen and (min-width:769px),print{.hero.is-large .hero-body{padding:18rem 1.5rem}}.hero.is-fullheight .hero-body,.hero.is-fullheight-with-navbar .hero-body,.hero.is-halfheight .hero-body{align-items:center;display:flex}.hero.is-fullheight .hero-body>.container,.hero.is-fullheight-with-navbar .hero-body>.container,.hero.is-halfheight .hero-body>.container{flex-grow:1;flex-shrink:1}.hero.is-halfheight{min-height:50vh}.hero.is-fullheight{min-height:100vh}.hero-video{overflow:hidden}.hero-video video{left:50%;min-height:100%;min-width:100%;position:absolute;top:50%;transform:translate3d(-50%,-50%,0)}.hero-video.is-transparent{opacity:.3}@media screen and (max-width:768px){.hero-video{display:none}}.hero-buttons{margin-top:1.5rem}@media screen and (max-width:768px){.hero-buttons .button{display:flex}.hero-buttons .button:not(:last-child){margin-bottom:.75rem}}@media screen and (min-width:769px),print{.hero-buttons{display:flex;justify-content:center}.hero-buttons .button:not(:last-child){margin-right:1.5rem}}.hero-foot,.hero-head{flex-grow:0;flex-shrink:0}.hero-body{flex-grow:1;flex-shrink:0;padding:3rem 1.5rem}.section{padding:3rem 1.5rem}@media screen and (min-width:1024px){.section.is-medium{padding:9rem 1.5rem}.section.is-large{padding:18rem 1.5rem}}.footer{background-color:#fafafa;padding:3rem 1.5rem 6rem} \ No newline at end of file diff --git a/docs/static/css/fontawesome.all.min.css b/docs/static/css/fontawesome.all.min.css new file mode 100644 index 0000000..656a507 --- /dev/null +++ b/docs/static/css/fontawesome.all.min.css @@ -0,0 +1,5 @@ +/*! + * Font Awesome Free 5.15.1 by @fontawesome - https://fontawesome.com + * License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License) + */ +.fa,.fab,.fad,.fal,.far,.fas{-moz-osx-font-smoothing:grayscale;-webkit-font-smoothing:antialiased;display:inline-block;font-style:normal;font-variant:normal;text-rendering:auto;line-height:1}.fa-lg{font-size:1.33333em;line-height:.75em;vertical-align:-.0667em}.fa-xs{font-size:.75em}.fa-sm{font-size:.875em}.fa-1x{font-size:1em}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-6x{font-size:6em}.fa-7x{font-size:7em}.fa-8x{font-size:8em}.fa-9x{font-size:9em}.fa-10x{font-size:10em}.fa-fw{text-align:center;width:1.25em}.fa-ul{list-style-type:none;margin-left:2.5em;padding-left:0}.fa-ul>li{position:relative}.fa-li{left:-2em;position:absolute;text-align:center;width:2em;line-height:inherit}.fa-border{border:.08em solid #eee;border-radius:.1em;padding:.2em .25em .15em}.fa-pull-left{float:left}.fa-pull-right{float:right}.fa.fa-pull-left,.fab.fa-pull-left,.fal.fa-pull-left,.far.fa-pull-left,.fas.fa-pull-left{margin-right:.3em}.fa.fa-pull-right,.fab.fa-pull-right,.fal.fa-pull-right,.far.fa-pull-right,.fas.fa-pull-right{margin-left:.3em}.fa-spin{-webkit-animation:fa-spin 2s linear infinite;animation:fa-spin 2s linear infinite}.fa-pulse{-webkit-animation:fa-spin 1s steps(8) infinite;animation:fa-spin 1s steps(8) infinite}@-webkit-keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(1turn);transform:rotate(1turn)}}@keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(1turn);transform:rotate(1turn)}}.fa-rotate-90{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";-webkit-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";-webkit-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";-webkit-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";-webkit-transform:scaleX(-1);transform:scaleX(-1)}.fa-flip-vertical{-webkit-transform:scaleY(-1);transform:scaleY(-1)}.fa-flip-both,.fa-flip-horizontal.fa-flip-vertical,.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)"}.fa-flip-both,.fa-flip-horizontal.fa-flip-vertical{-webkit-transform:scale(-1);transform:scale(-1)}:root .fa-flip-both,:root .fa-flip-horizontal,:root .fa-flip-vertical,:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270{-webkit-filter:none;filter:none}.fa-stack{display:inline-block;height:2em;line-height:2em;position:relative;vertical-align:middle;width:2.5em}.fa-stack-1x,.fa-stack-2x{left:0;position:absolute;text-align:center;width:100%}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-500px:before{content:"\f26e"}.fa-accessible-icon:before{content:"\f368"}.fa-accusoft:before{content:"\f369"}.fa-acquisitions-incorporated:before{content:"\f6af"}.fa-ad:before{content:"\f641"}.fa-address-book:before{content:"\f2b9"}.fa-address-card:before{content:"\f2bb"}.fa-adjust:before{content:"\f042"}.fa-adn:before{content:"\f170"}.fa-adversal:before{content:"\f36a"}.fa-affiliatetheme:before{content:"\f36b"}.fa-air-freshener:before{content:"\f5d0"}.fa-airbnb:before{content:"\f834"}.fa-algolia:before{content:"\f36c"}.fa-align-center:before{content:"\f037"}.fa-align-justify:before{content:"\f039"}.fa-align-left:before{content:"\f036"}.fa-align-right:before{content:"\f038"}.fa-alipay:before{content:"\f642"}.fa-allergies:before{content:"\f461"}.fa-amazon:before{content:"\f270"}.fa-amazon-pay:before{content:"\f42c"}.fa-ambulance:before{content:"\f0f9"}.fa-american-sign-language-interpreting:before{content:"\f2a3"}.fa-amilia:before{content:"\f36d"}.fa-anchor:before{content:"\f13d"}.fa-android:before{content:"\f17b"}.fa-angellist:before{content:"\f209"}.fa-angle-double-down:before{content:"\f103"}.fa-angle-double-left:before{content:"\f100"}.fa-angle-double-right:before{content:"\f101"}.fa-angle-double-up:before{content:"\f102"}.fa-angle-down:before{content:"\f107"}.fa-angle-left:before{content:"\f104"}.fa-angle-right:before{content:"\f105"}.fa-angle-up:before{content:"\f106"}.fa-angry:before{content:"\f556"}.fa-angrycreative:before{content:"\f36e"}.fa-angular:before{content:"\f420"}.fa-ankh:before{content:"\f644"}.fa-app-store:before{content:"\f36f"}.fa-app-store-ios:before{content:"\f370"}.fa-apper:before{content:"\f371"}.fa-apple:before{content:"\f179"}.fa-apple-alt:before{content:"\f5d1"}.fa-apple-pay:before{content:"\f415"}.fa-archive:before{content:"\f187"}.fa-archway:before{content:"\f557"}.fa-arrow-alt-circle-down:before{content:"\f358"}.fa-arrow-alt-circle-left:before{content:"\f359"}.fa-arrow-alt-circle-right:before{content:"\f35a"}.fa-arrow-alt-circle-up:before{content:"\f35b"}.fa-arrow-circle-down:before{content:"\f0ab"}.fa-arrow-circle-left:before{content:"\f0a8"}.fa-arrow-circle-right:before{content:"\f0a9"}.fa-arrow-circle-up:before{content:"\f0aa"}.fa-arrow-down:before{content:"\f063"}.fa-arrow-left:before{content:"\f060"}.fa-arrow-right:before{content:"\f061"}.fa-arrow-up:before{content:"\f062"}.fa-arrows-alt:before{content:"\f0b2"}.fa-arrows-alt-h:before{content:"\f337"}.fa-arrows-alt-v:before{content:"\f338"}.fa-artstation:before{content:"\f77a"}.fa-assistive-listening-systems:before{content:"\f2a2"}.fa-asterisk:before{content:"\f069"}.fa-asymmetrik:before{content:"\f372"}.fa-at:before{content:"\f1fa"}.fa-atlas:before{content:"\f558"}.fa-atlassian:before{content:"\f77b"}.fa-atom:before{content:"\f5d2"}.fa-audible:before{content:"\f373"}.fa-audio-description:before{content:"\f29e"}.fa-autoprefixer:before{content:"\f41c"}.fa-avianex:before{content:"\f374"}.fa-aviato:before{content:"\f421"}.fa-award:before{content:"\f559"}.fa-aws:before{content:"\f375"}.fa-baby:before{content:"\f77c"}.fa-baby-carriage:before{content:"\f77d"}.fa-backspace:before{content:"\f55a"}.fa-backward:before{content:"\f04a"}.fa-bacon:before{content:"\f7e5"}.fa-bacteria:before{content:"\e059"}.fa-bacterium:before{content:"\e05a"}.fa-bahai:before{content:"\f666"}.fa-balance-scale:before{content:"\f24e"}.fa-balance-scale-left:before{content:"\f515"}.fa-balance-scale-right:before{content:"\f516"}.fa-ban:before{content:"\f05e"}.fa-band-aid:before{content:"\f462"}.fa-bandcamp:before{content:"\f2d5"}.fa-barcode:before{content:"\f02a"}.fa-bars:before{content:"\f0c9"}.fa-baseball-ball:before{content:"\f433"}.fa-basketball-ball:before{content:"\f434"}.fa-bath:before{content:"\f2cd"}.fa-battery-empty:before{content:"\f244"}.fa-battery-full:before{content:"\f240"}.fa-battery-half:before{content:"\f242"}.fa-battery-quarter:before{content:"\f243"}.fa-battery-three-quarters:before{content:"\f241"}.fa-battle-net:before{content:"\f835"}.fa-bed:before{content:"\f236"}.fa-beer:before{content:"\f0fc"}.fa-behance:before{content:"\f1b4"}.fa-behance-square:before{content:"\f1b5"}.fa-bell:before{content:"\f0f3"}.fa-bell-slash:before{content:"\f1f6"}.fa-bezier-curve:before{content:"\f55b"}.fa-bible:before{content:"\f647"}.fa-bicycle:before{content:"\f206"}.fa-biking:before{content:"\f84a"}.fa-bimobject:before{content:"\f378"}.fa-binoculars:before{content:"\f1e5"}.fa-biohazard:before{content:"\f780"}.fa-birthday-cake:before{content:"\f1fd"}.fa-bitbucket:before{content:"\f171"}.fa-bitcoin:before{content:"\f379"}.fa-bity:before{content:"\f37a"}.fa-black-tie:before{content:"\f27e"}.fa-blackberry:before{content:"\f37b"}.fa-blender:before{content:"\f517"}.fa-blender-phone:before{content:"\f6b6"}.fa-blind:before{content:"\f29d"}.fa-blog:before{content:"\f781"}.fa-blogger:before{content:"\f37c"}.fa-blogger-b:before{content:"\f37d"}.fa-bluetooth:before{content:"\f293"}.fa-bluetooth-b:before{content:"\f294"}.fa-bold:before{content:"\f032"}.fa-bolt:before{content:"\f0e7"}.fa-bomb:before{content:"\f1e2"}.fa-bone:before{content:"\f5d7"}.fa-bong:before{content:"\f55c"}.fa-book:before{content:"\f02d"}.fa-book-dead:before{content:"\f6b7"}.fa-book-medical:before{content:"\f7e6"}.fa-book-open:before{content:"\f518"}.fa-book-reader:before{content:"\f5da"}.fa-bookmark:before{content:"\f02e"}.fa-bootstrap:before{content:"\f836"}.fa-border-all:before{content:"\f84c"}.fa-border-none:before{content:"\f850"}.fa-border-style:before{content:"\f853"}.fa-bowling-ball:before{content:"\f436"}.fa-box:before{content:"\f466"}.fa-box-open:before{content:"\f49e"}.fa-box-tissue:before{content:"\e05b"}.fa-boxes:before{content:"\f468"}.fa-braille:before{content:"\f2a1"}.fa-brain:before{content:"\f5dc"}.fa-bread-slice:before{content:"\f7ec"}.fa-briefcase:before{content:"\f0b1"}.fa-briefcase-medical:before{content:"\f469"}.fa-broadcast-tower:before{content:"\f519"}.fa-broom:before{content:"\f51a"}.fa-brush:before{content:"\f55d"}.fa-btc:before{content:"\f15a"}.fa-buffer:before{content:"\f837"}.fa-bug:before{content:"\f188"}.fa-building:before{content:"\f1ad"}.fa-bullhorn:before{content:"\f0a1"}.fa-bullseye:before{content:"\f140"}.fa-burn:before{content:"\f46a"}.fa-buromobelexperte:before{content:"\f37f"}.fa-bus:before{content:"\f207"}.fa-bus-alt:before{content:"\f55e"}.fa-business-time:before{content:"\f64a"}.fa-buy-n-large:before{content:"\f8a6"}.fa-buysellads:before{content:"\f20d"}.fa-calculator:before{content:"\f1ec"}.fa-calendar:before{content:"\f133"}.fa-calendar-alt:before{content:"\f073"}.fa-calendar-check:before{content:"\f274"}.fa-calendar-day:before{content:"\f783"}.fa-calendar-minus:before{content:"\f272"}.fa-calendar-plus:before{content:"\f271"}.fa-calendar-times:before{content:"\f273"}.fa-calendar-week:before{content:"\f784"}.fa-camera:before{content:"\f030"}.fa-camera-retro:before{content:"\f083"}.fa-campground:before{content:"\f6bb"}.fa-canadian-maple-leaf:before{content:"\f785"}.fa-candy-cane:before{content:"\f786"}.fa-cannabis:before{content:"\f55f"}.fa-capsules:before{content:"\f46b"}.fa-car:before{content:"\f1b9"}.fa-car-alt:before{content:"\f5de"}.fa-car-battery:before{content:"\f5df"}.fa-car-crash:before{content:"\f5e1"}.fa-car-side:before{content:"\f5e4"}.fa-caravan:before{content:"\f8ff"}.fa-caret-down:before{content:"\f0d7"}.fa-caret-left:before{content:"\f0d9"}.fa-caret-right:before{content:"\f0da"}.fa-caret-square-down:before{content:"\f150"}.fa-caret-square-left:before{content:"\f191"}.fa-caret-square-right:before{content:"\f152"}.fa-caret-square-up:before{content:"\f151"}.fa-caret-up:before{content:"\f0d8"}.fa-carrot:before{content:"\f787"}.fa-cart-arrow-down:before{content:"\f218"}.fa-cart-plus:before{content:"\f217"}.fa-cash-register:before{content:"\f788"}.fa-cat:before{content:"\f6be"}.fa-cc-amazon-pay:before{content:"\f42d"}.fa-cc-amex:before{content:"\f1f3"}.fa-cc-apple-pay:before{content:"\f416"}.fa-cc-diners-club:before{content:"\f24c"}.fa-cc-discover:before{content:"\f1f2"}.fa-cc-jcb:before{content:"\f24b"}.fa-cc-mastercard:before{content:"\f1f1"}.fa-cc-paypal:before{content:"\f1f4"}.fa-cc-stripe:before{content:"\f1f5"}.fa-cc-visa:before{content:"\f1f0"}.fa-centercode:before{content:"\f380"}.fa-centos:before{content:"\f789"}.fa-certificate:before{content:"\f0a3"}.fa-chair:before{content:"\f6c0"}.fa-chalkboard:before{content:"\f51b"}.fa-chalkboard-teacher:before{content:"\f51c"}.fa-charging-station:before{content:"\f5e7"}.fa-chart-area:before{content:"\f1fe"}.fa-chart-bar:before{content:"\f080"}.fa-chart-line:before{content:"\f201"}.fa-chart-pie:before{content:"\f200"}.fa-check:before{content:"\f00c"}.fa-check-circle:before{content:"\f058"}.fa-check-double:before{content:"\f560"}.fa-check-square:before{content:"\f14a"}.fa-cheese:before{content:"\f7ef"}.fa-chess:before{content:"\f439"}.fa-chess-bishop:before{content:"\f43a"}.fa-chess-board:before{content:"\f43c"}.fa-chess-king:before{content:"\f43f"}.fa-chess-knight:before{content:"\f441"}.fa-chess-pawn:before{content:"\f443"}.fa-chess-queen:before{content:"\f445"}.fa-chess-rook:before{content:"\f447"}.fa-chevron-circle-down:before{content:"\f13a"}.fa-chevron-circle-left:before{content:"\f137"}.fa-chevron-circle-right:before{content:"\f138"}.fa-chevron-circle-up:before{content:"\f139"}.fa-chevron-down:before{content:"\f078"}.fa-chevron-left:before{content:"\f053"}.fa-chevron-right:before{content:"\f054"}.fa-chevron-up:before{content:"\f077"}.fa-child:before{content:"\f1ae"}.fa-chrome:before{content:"\f268"}.fa-chromecast:before{content:"\f838"}.fa-church:before{content:"\f51d"}.fa-circle:before{content:"\f111"}.fa-circle-notch:before{content:"\f1ce"}.fa-city:before{content:"\f64f"}.fa-clinic-medical:before{content:"\f7f2"}.fa-clipboard:before{content:"\f328"}.fa-clipboard-check:before{content:"\f46c"}.fa-clipboard-list:before{content:"\f46d"}.fa-clock:before{content:"\f017"}.fa-clone:before{content:"\f24d"}.fa-closed-captioning:before{content:"\f20a"}.fa-cloud:before{content:"\f0c2"}.fa-cloud-download-alt:before{content:"\f381"}.fa-cloud-meatball:before{content:"\f73b"}.fa-cloud-moon:before{content:"\f6c3"}.fa-cloud-moon-rain:before{content:"\f73c"}.fa-cloud-rain:before{content:"\f73d"}.fa-cloud-showers-heavy:before{content:"\f740"}.fa-cloud-sun:before{content:"\f6c4"}.fa-cloud-sun-rain:before{content:"\f743"}.fa-cloud-upload-alt:before{content:"\f382"}.fa-cloudflare:before{content:"\e07d"}.fa-cloudscale:before{content:"\f383"}.fa-cloudsmith:before{content:"\f384"}.fa-cloudversify:before{content:"\f385"}.fa-cocktail:before{content:"\f561"}.fa-code:before{content:"\f121"}.fa-code-branch:before{content:"\f126"}.fa-codepen:before{content:"\f1cb"}.fa-codiepie:before{content:"\f284"}.fa-coffee:before{content:"\f0f4"}.fa-cog:before{content:"\f013"}.fa-cogs:before{content:"\f085"}.fa-coins:before{content:"\f51e"}.fa-columns:before{content:"\f0db"}.fa-comment:before{content:"\f075"}.fa-comment-alt:before{content:"\f27a"}.fa-comment-dollar:before{content:"\f651"}.fa-comment-dots:before{content:"\f4ad"}.fa-comment-medical:before{content:"\f7f5"}.fa-comment-slash:before{content:"\f4b3"}.fa-comments:before{content:"\f086"}.fa-comments-dollar:before{content:"\f653"}.fa-compact-disc:before{content:"\f51f"}.fa-compass:before{content:"\f14e"}.fa-compress:before{content:"\f066"}.fa-compress-alt:before{content:"\f422"}.fa-compress-arrows-alt:before{content:"\f78c"}.fa-concierge-bell:before{content:"\f562"}.fa-confluence:before{content:"\f78d"}.fa-connectdevelop:before{content:"\f20e"}.fa-contao:before{content:"\f26d"}.fa-cookie:before{content:"\f563"}.fa-cookie-bite:before{content:"\f564"}.fa-copy:before{content:"\f0c5"}.fa-copyright:before{content:"\f1f9"}.fa-cotton-bureau:before{content:"\f89e"}.fa-couch:before{content:"\f4b8"}.fa-cpanel:before{content:"\f388"}.fa-creative-commons:before{content:"\f25e"}.fa-creative-commons-by:before{content:"\f4e7"}.fa-creative-commons-nc:before{content:"\f4e8"}.fa-creative-commons-nc-eu:before{content:"\f4e9"}.fa-creative-commons-nc-jp:before{content:"\f4ea"}.fa-creative-commons-nd:before{content:"\f4eb"}.fa-creative-commons-pd:before{content:"\f4ec"}.fa-creative-commons-pd-alt:before{content:"\f4ed"}.fa-creative-commons-remix:before{content:"\f4ee"}.fa-creative-commons-sa:before{content:"\f4ef"}.fa-creative-commons-sampling:before{content:"\f4f0"}.fa-creative-commons-sampling-plus:before{content:"\f4f1"}.fa-creative-commons-share:before{content:"\f4f2"}.fa-creative-commons-zero:before{content:"\f4f3"}.fa-credit-card:before{content:"\f09d"}.fa-critical-role:before{content:"\f6c9"}.fa-crop:before{content:"\f125"}.fa-crop-alt:before{content:"\f565"}.fa-cross:before{content:"\f654"}.fa-crosshairs:before{content:"\f05b"}.fa-crow:before{content:"\f520"}.fa-crown:before{content:"\f521"}.fa-crutch:before{content:"\f7f7"}.fa-css3:before{content:"\f13c"}.fa-css3-alt:before{content:"\f38b"}.fa-cube:before{content:"\f1b2"}.fa-cubes:before{content:"\f1b3"}.fa-cut:before{content:"\f0c4"}.fa-cuttlefish:before{content:"\f38c"}.fa-d-and-d:before{content:"\f38d"}.fa-d-and-d-beyond:before{content:"\f6ca"}.fa-dailymotion:before{content:"\e052"}.fa-dashcube:before{content:"\f210"}.fa-database:before{content:"\f1c0"}.fa-deaf:before{content:"\f2a4"}.fa-deezer:before{content:"\e077"}.fa-delicious:before{content:"\f1a5"}.fa-democrat:before{content:"\f747"}.fa-deploydog:before{content:"\f38e"}.fa-deskpro:before{content:"\f38f"}.fa-desktop:before{content:"\f108"}.fa-dev:before{content:"\f6cc"}.fa-deviantart:before{content:"\f1bd"}.fa-dharmachakra:before{content:"\f655"}.fa-dhl:before{content:"\f790"}.fa-diagnoses:before{content:"\f470"}.fa-diaspora:before{content:"\f791"}.fa-dice:before{content:"\f522"}.fa-dice-d20:before{content:"\f6cf"}.fa-dice-d6:before{content:"\f6d1"}.fa-dice-five:before{content:"\f523"}.fa-dice-four:before{content:"\f524"}.fa-dice-one:before{content:"\f525"}.fa-dice-six:before{content:"\f526"}.fa-dice-three:before{content:"\f527"}.fa-dice-two:before{content:"\f528"}.fa-digg:before{content:"\f1a6"}.fa-digital-ocean:before{content:"\f391"}.fa-digital-tachograph:before{content:"\f566"}.fa-directions:before{content:"\f5eb"}.fa-discord:before{content:"\f392"}.fa-discourse:before{content:"\f393"}.fa-disease:before{content:"\f7fa"}.fa-divide:before{content:"\f529"}.fa-dizzy:before{content:"\f567"}.fa-dna:before{content:"\f471"}.fa-dochub:before{content:"\f394"}.fa-docker:before{content:"\f395"}.fa-dog:before{content:"\f6d3"}.fa-dollar-sign:before{content:"\f155"}.fa-dolly:before{content:"\f472"}.fa-dolly-flatbed:before{content:"\f474"}.fa-donate:before{content:"\f4b9"}.fa-door-closed:before{content:"\f52a"}.fa-door-open:before{content:"\f52b"}.fa-dot-circle:before{content:"\f192"}.fa-dove:before{content:"\f4ba"}.fa-download:before{content:"\f019"}.fa-draft2digital:before{content:"\f396"}.fa-drafting-compass:before{content:"\f568"}.fa-dragon:before{content:"\f6d5"}.fa-draw-polygon:before{content:"\f5ee"}.fa-dribbble:before{content:"\f17d"}.fa-dribbble-square:before{content:"\f397"}.fa-dropbox:before{content:"\f16b"}.fa-drum:before{content:"\f569"}.fa-drum-steelpan:before{content:"\f56a"}.fa-drumstick-bite:before{content:"\f6d7"}.fa-drupal:before{content:"\f1a9"}.fa-dumbbell:before{content:"\f44b"}.fa-dumpster:before{content:"\f793"}.fa-dumpster-fire:before{content:"\f794"}.fa-dungeon:before{content:"\f6d9"}.fa-dyalog:before{content:"\f399"}.fa-earlybirds:before{content:"\f39a"}.fa-ebay:before{content:"\f4f4"}.fa-edge:before{content:"\f282"}.fa-edge-legacy:before{content:"\e078"}.fa-edit:before{content:"\f044"}.fa-egg:before{content:"\f7fb"}.fa-eject:before{content:"\f052"}.fa-elementor:before{content:"\f430"}.fa-ellipsis-h:before{content:"\f141"}.fa-ellipsis-v:before{content:"\f142"}.fa-ello:before{content:"\f5f1"}.fa-ember:before{content:"\f423"}.fa-empire:before{content:"\f1d1"}.fa-envelope:before{content:"\f0e0"}.fa-envelope-open:before{content:"\f2b6"}.fa-envelope-open-text:before{content:"\f658"}.fa-envelope-square:before{content:"\f199"}.fa-envira:before{content:"\f299"}.fa-equals:before{content:"\f52c"}.fa-eraser:before{content:"\f12d"}.fa-erlang:before{content:"\f39d"}.fa-ethereum:before{content:"\f42e"}.fa-ethernet:before{content:"\f796"}.fa-etsy:before{content:"\f2d7"}.fa-euro-sign:before{content:"\f153"}.fa-evernote:before{content:"\f839"}.fa-exchange-alt:before{content:"\f362"}.fa-exclamation:before{content:"\f12a"}.fa-exclamation-circle:before{content:"\f06a"}.fa-exclamation-triangle:before{content:"\f071"}.fa-expand:before{content:"\f065"}.fa-expand-alt:before{content:"\f424"}.fa-expand-arrows-alt:before{content:"\f31e"}.fa-expeditedssl:before{content:"\f23e"}.fa-external-link-alt:before{content:"\f35d"}.fa-external-link-square-alt:before{content:"\f360"}.fa-eye:before{content:"\f06e"}.fa-eye-dropper:before{content:"\f1fb"}.fa-eye-slash:before{content:"\f070"}.fa-facebook:before{content:"\f09a"}.fa-facebook-f:before{content:"\f39e"}.fa-facebook-messenger:before{content:"\f39f"}.fa-facebook-square:before{content:"\f082"}.fa-fan:before{content:"\f863"}.fa-fantasy-flight-games:before{content:"\f6dc"}.fa-fast-backward:before{content:"\f049"}.fa-fast-forward:before{content:"\f050"}.fa-faucet:before{content:"\e005"}.fa-fax:before{content:"\f1ac"}.fa-feather:before{content:"\f52d"}.fa-feather-alt:before{content:"\f56b"}.fa-fedex:before{content:"\f797"}.fa-fedora:before{content:"\f798"}.fa-female:before{content:"\f182"}.fa-fighter-jet:before{content:"\f0fb"}.fa-figma:before{content:"\f799"}.fa-file:before{content:"\f15b"}.fa-file-alt:before{content:"\f15c"}.fa-file-archive:before{content:"\f1c6"}.fa-file-audio:before{content:"\f1c7"}.fa-file-code:before{content:"\f1c9"}.fa-file-contract:before{content:"\f56c"}.fa-file-csv:before{content:"\f6dd"}.fa-file-download:before{content:"\f56d"}.fa-file-excel:before{content:"\f1c3"}.fa-file-export:before{content:"\f56e"}.fa-file-image:before{content:"\f1c5"}.fa-file-import:before{content:"\f56f"}.fa-file-invoice:before{content:"\f570"}.fa-file-invoice-dollar:before{content:"\f571"}.fa-file-medical:before{content:"\f477"}.fa-file-medical-alt:before{content:"\f478"}.fa-file-pdf:before{content:"\f1c1"}.fa-file-powerpoint:before{content:"\f1c4"}.fa-file-prescription:before{content:"\f572"}.fa-file-signature:before{content:"\f573"}.fa-file-upload:before{content:"\f574"}.fa-file-video:before{content:"\f1c8"}.fa-file-word:before{content:"\f1c2"}.fa-fill:before{content:"\f575"}.fa-fill-drip:before{content:"\f576"}.fa-film:before{content:"\f008"}.fa-filter:before{content:"\f0b0"}.fa-fingerprint:before{content:"\f577"}.fa-fire:before{content:"\f06d"}.fa-fire-alt:before{content:"\f7e4"}.fa-fire-extinguisher:before{content:"\f134"}.fa-firefox:before{content:"\f269"}.fa-firefox-browser:before{content:"\e007"}.fa-first-aid:before{content:"\f479"}.fa-first-order:before{content:"\f2b0"}.fa-first-order-alt:before{content:"\f50a"}.fa-firstdraft:before{content:"\f3a1"}.fa-fish:before{content:"\f578"}.fa-fist-raised:before{content:"\f6de"}.fa-flag:before{content:"\f024"}.fa-flag-checkered:before{content:"\f11e"}.fa-flag-usa:before{content:"\f74d"}.fa-flask:before{content:"\f0c3"}.fa-flickr:before{content:"\f16e"}.fa-flipboard:before{content:"\f44d"}.fa-flushed:before{content:"\f579"}.fa-fly:before{content:"\f417"}.fa-folder:before{content:"\f07b"}.fa-folder-minus:before{content:"\f65d"}.fa-folder-open:before{content:"\f07c"}.fa-folder-plus:before{content:"\f65e"}.fa-font:before{content:"\f031"}.fa-font-awesome:before{content:"\f2b4"}.fa-font-awesome-alt:before{content:"\f35c"}.fa-font-awesome-flag:before{content:"\f425"}.fa-font-awesome-logo-full:before{content:"\f4e6"}.fa-fonticons:before{content:"\f280"}.fa-fonticons-fi:before{content:"\f3a2"}.fa-football-ball:before{content:"\f44e"}.fa-fort-awesome:before{content:"\f286"}.fa-fort-awesome-alt:before{content:"\f3a3"}.fa-forumbee:before{content:"\f211"}.fa-forward:before{content:"\f04e"}.fa-foursquare:before{content:"\f180"}.fa-free-code-camp:before{content:"\f2c5"}.fa-freebsd:before{content:"\f3a4"}.fa-frog:before{content:"\f52e"}.fa-frown:before{content:"\f119"}.fa-frown-open:before{content:"\f57a"}.fa-fulcrum:before{content:"\f50b"}.fa-funnel-dollar:before{content:"\f662"}.fa-futbol:before{content:"\f1e3"}.fa-galactic-republic:before{content:"\f50c"}.fa-galactic-senate:before{content:"\f50d"}.fa-gamepad:before{content:"\f11b"}.fa-gas-pump:before{content:"\f52f"}.fa-gavel:before{content:"\f0e3"}.fa-gem:before{content:"\f3a5"}.fa-genderless:before{content:"\f22d"}.fa-get-pocket:before{content:"\f265"}.fa-gg:before{content:"\f260"}.fa-gg-circle:before{content:"\f261"}.fa-ghost:before{content:"\f6e2"}.fa-gift:before{content:"\f06b"}.fa-gifts:before{content:"\f79c"}.fa-git:before{content:"\f1d3"}.fa-git-alt:before{content:"\f841"}.fa-git-square:before{content:"\f1d2"}.fa-github:before{content:"\f09b"}.fa-github-alt:before{content:"\f113"}.fa-github-square:before{content:"\f092"}.fa-gitkraken:before{content:"\f3a6"}.fa-gitlab:before{content:"\f296"}.fa-gitter:before{content:"\f426"}.fa-glass-cheers:before{content:"\f79f"}.fa-glass-martini:before{content:"\f000"}.fa-glass-martini-alt:before{content:"\f57b"}.fa-glass-whiskey:before{content:"\f7a0"}.fa-glasses:before{content:"\f530"}.fa-glide:before{content:"\f2a5"}.fa-glide-g:before{content:"\f2a6"}.fa-globe:before{content:"\f0ac"}.fa-globe-africa:before{content:"\f57c"}.fa-globe-americas:before{content:"\f57d"}.fa-globe-asia:before{content:"\f57e"}.fa-globe-europe:before{content:"\f7a2"}.fa-gofore:before{content:"\f3a7"}.fa-golf-ball:before{content:"\f450"}.fa-goodreads:before{content:"\f3a8"}.fa-goodreads-g:before{content:"\f3a9"}.fa-google:before{content:"\f1a0"}.fa-google-drive:before{content:"\f3aa"}.fa-google-pay:before{content:"\e079"}.fa-google-play:before{content:"\f3ab"}.fa-google-plus:before{content:"\f2b3"}.fa-google-plus-g:before{content:"\f0d5"}.fa-google-plus-square:before{content:"\f0d4"}.fa-google-wallet:before{content:"\f1ee"}.fa-gopuram:before{content:"\f664"}.fa-graduation-cap:before{content:"\f19d"}.fa-gratipay:before{content:"\f184"}.fa-grav:before{content:"\f2d6"}.fa-greater-than:before{content:"\f531"}.fa-greater-than-equal:before{content:"\f532"}.fa-grimace:before{content:"\f57f"}.fa-grin:before{content:"\f580"}.fa-grin-alt:before{content:"\f581"}.fa-grin-beam:before{content:"\f582"}.fa-grin-beam-sweat:before{content:"\f583"}.fa-grin-hearts:before{content:"\f584"}.fa-grin-squint:before{content:"\f585"}.fa-grin-squint-tears:before{content:"\f586"}.fa-grin-stars:before{content:"\f587"}.fa-grin-tears:before{content:"\f588"}.fa-grin-tongue:before{content:"\f589"}.fa-grin-tongue-squint:before{content:"\f58a"}.fa-grin-tongue-wink:before{content:"\f58b"}.fa-grin-wink:before{content:"\f58c"}.fa-grip-horizontal:before{content:"\f58d"}.fa-grip-lines:before{content:"\f7a4"}.fa-grip-lines-vertical:before{content:"\f7a5"}.fa-grip-vertical:before{content:"\f58e"}.fa-gripfire:before{content:"\f3ac"}.fa-grunt:before{content:"\f3ad"}.fa-guilded:before{content:"\e07e"}.fa-guitar:before{content:"\f7a6"}.fa-gulp:before{content:"\f3ae"}.fa-h-square:before{content:"\f0fd"}.fa-hacker-news:before{content:"\f1d4"}.fa-hacker-news-square:before{content:"\f3af"}.fa-hackerrank:before{content:"\f5f7"}.fa-hamburger:before{content:"\f805"}.fa-hammer:before{content:"\f6e3"}.fa-hamsa:before{content:"\f665"}.fa-hand-holding:before{content:"\f4bd"}.fa-hand-holding-heart:before{content:"\f4be"}.fa-hand-holding-medical:before{content:"\e05c"}.fa-hand-holding-usd:before{content:"\f4c0"}.fa-hand-holding-water:before{content:"\f4c1"}.fa-hand-lizard:before{content:"\f258"}.fa-hand-middle-finger:before{content:"\f806"}.fa-hand-paper:before{content:"\f256"}.fa-hand-peace:before{content:"\f25b"}.fa-hand-point-down:before{content:"\f0a7"}.fa-hand-point-left:before{content:"\f0a5"}.fa-hand-point-right:before{content:"\f0a4"}.fa-hand-point-up:before{content:"\f0a6"}.fa-hand-pointer:before{content:"\f25a"}.fa-hand-rock:before{content:"\f255"}.fa-hand-scissors:before{content:"\f257"}.fa-hand-sparkles:before{content:"\e05d"}.fa-hand-spock:before{content:"\f259"}.fa-hands:before{content:"\f4c2"}.fa-hands-helping:before{content:"\f4c4"}.fa-hands-wash:before{content:"\e05e"}.fa-handshake:before{content:"\f2b5"}.fa-handshake-alt-slash:before{content:"\e05f"}.fa-handshake-slash:before{content:"\e060"}.fa-hanukiah:before{content:"\f6e6"}.fa-hard-hat:before{content:"\f807"}.fa-hashtag:before{content:"\f292"}.fa-hat-cowboy:before{content:"\f8c0"}.fa-hat-cowboy-side:before{content:"\f8c1"}.fa-hat-wizard:before{content:"\f6e8"}.fa-hdd:before{content:"\f0a0"}.fa-head-side-cough:before{content:"\e061"}.fa-head-side-cough-slash:before{content:"\e062"}.fa-head-side-mask:before{content:"\e063"}.fa-head-side-virus:before{content:"\e064"}.fa-heading:before{content:"\f1dc"}.fa-headphones:before{content:"\f025"}.fa-headphones-alt:before{content:"\f58f"}.fa-headset:before{content:"\f590"}.fa-heart:before{content:"\f004"}.fa-heart-broken:before{content:"\f7a9"}.fa-heartbeat:before{content:"\f21e"}.fa-helicopter:before{content:"\f533"}.fa-highlighter:before{content:"\f591"}.fa-hiking:before{content:"\f6ec"}.fa-hippo:before{content:"\f6ed"}.fa-hips:before{content:"\f452"}.fa-hire-a-helper:before{content:"\f3b0"}.fa-history:before{content:"\f1da"}.fa-hive:before{content:"\e07f"}.fa-hockey-puck:before{content:"\f453"}.fa-holly-berry:before{content:"\f7aa"}.fa-home:before{content:"\f015"}.fa-hooli:before{content:"\f427"}.fa-hornbill:before{content:"\f592"}.fa-horse:before{content:"\f6f0"}.fa-horse-head:before{content:"\f7ab"}.fa-hospital:before{content:"\f0f8"}.fa-hospital-alt:before{content:"\f47d"}.fa-hospital-symbol:before{content:"\f47e"}.fa-hospital-user:before{content:"\f80d"}.fa-hot-tub:before{content:"\f593"}.fa-hotdog:before{content:"\f80f"}.fa-hotel:before{content:"\f594"}.fa-hotjar:before{content:"\f3b1"}.fa-hourglass:before{content:"\f254"}.fa-hourglass-end:before{content:"\f253"}.fa-hourglass-half:before{content:"\f252"}.fa-hourglass-start:before{content:"\f251"}.fa-house-damage:before{content:"\f6f1"}.fa-house-user:before{content:"\e065"}.fa-houzz:before{content:"\f27c"}.fa-hryvnia:before{content:"\f6f2"}.fa-html5:before{content:"\f13b"}.fa-hubspot:before{content:"\f3b2"}.fa-i-cursor:before{content:"\f246"}.fa-ice-cream:before{content:"\f810"}.fa-icicles:before{content:"\f7ad"}.fa-icons:before{content:"\f86d"}.fa-id-badge:before{content:"\f2c1"}.fa-id-card:before{content:"\f2c2"}.fa-id-card-alt:before{content:"\f47f"}.fa-ideal:before{content:"\e013"}.fa-igloo:before{content:"\f7ae"}.fa-image:before{content:"\f03e"}.fa-images:before{content:"\f302"}.fa-imdb:before{content:"\f2d8"}.fa-inbox:before{content:"\f01c"}.fa-indent:before{content:"\f03c"}.fa-industry:before{content:"\f275"}.fa-infinity:before{content:"\f534"}.fa-info:before{content:"\f129"}.fa-info-circle:before{content:"\f05a"}.fa-innosoft:before{content:"\e080"}.fa-instagram:before{content:"\f16d"}.fa-instagram-square:before{content:"\e055"}.fa-instalod:before{content:"\e081"}.fa-intercom:before{content:"\f7af"}.fa-internet-explorer:before{content:"\f26b"}.fa-invision:before{content:"\f7b0"}.fa-ioxhost:before{content:"\f208"}.fa-italic:before{content:"\f033"}.fa-itch-io:before{content:"\f83a"}.fa-itunes:before{content:"\f3b4"}.fa-itunes-note:before{content:"\f3b5"}.fa-java:before{content:"\f4e4"}.fa-jedi:before{content:"\f669"}.fa-jedi-order:before{content:"\f50e"}.fa-jenkins:before{content:"\f3b6"}.fa-jira:before{content:"\f7b1"}.fa-joget:before{content:"\f3b7"}.fa-joint:before{content:"\f595"}.fa-joomla:before{content:"\f1aa"}.fa-journal-whills:before{content:"\f66a"}.fa-js:before{content:"\f3b8"}.fa-js-square:before{content:"\f3b9"}.fa-jsfiddle:before{content:"\f1cc"}.fa-kaaba:before{content:"\f66b"}.fa-kaggle:before{content:"\f5fa"}.fa-key:before{content:"\f084"}.fa-keybase:before{content:"\f4f5"}.fa-keyboard:before{content:"\f11c"}.fa-keycdn:before{content:"\f3ba"}.fa-khanda:before{content:"\f66d"}.fa-kickstarter:before{content:"\f3bb"}.fa-kickstarter-k:before{content:"\f3bc"}.fa-kiss:before{content:"\f596"}.fa-kiss-beam:before{content:"\f597"}.fa-kiss-wink-heart:before{content:"\f598"}.fa-kiwi-bird:before{content:"\f535"}.fa-korvue:before{content:"\f42f"}.fa-landmark:before{content:"\f66f"}.fa-language:before{content:"\f1ab"}.fa-laptop:before{content:"\f109"}.fa-laptop-code:before{content:"\f5fc"}.fa-laptop-house:before{content:"\e066"}.fa-laptop-medical:before{content:"\f812"}.fa-laravel:before{content:"\f3bd"}.fa-lastfm:before{content:"\f202"}.fa-lastfm-square:before{content:"\f203"}.fa-laugh:before{content:"\f599"}.fa-laugh-beam:before{content:"\f59a"}.fa-laugh-squint:before{content:"\f59b"}.fa-laugh-wink:before{content:"\f59c"}.fa-layer-group:before{content:"\f5fd"}.fa-leaf:before{content:"\f06c"}.fa-leanpub:before{content:"\f212"}.fa-lemon:before{content:"\f094"}.fa-less:before{content:"\f41d"}.fa-less-than:before{content:"\f536"}.fa-less-than-equal:before{content:"\f537"}.fa-level-down-alt:before{content:"\f3be"}.fa-level-up-alt:before{content:"\f3bf"}.fa-life-ring:before{content:"\f1cd"}.fa-lightbulb:before{content:"\f0eb"}.fa-line:before{content:"\f3c0"}.fa-link:before{content:"\f0c1"}.fa-linkedin:before{content:"\f08c"}.fa-linkedin-in:before{content:"\f0e1"}.fa-linode:before{content:"\f2b8"}.fa-linux:before{content:"\f17c"}.fa-lira-sign:before{content:"\f195"}.fa-list:before{content:"\f03a"}.fa-list-alt:before{content:"\f022"}.fa-list-ol:before{content:"\f0cb"}.fa-list-ul:before{content:"\f0ca"}.fa-location-arrow:before{content:"\f124"}.fa-lock:before{content:"\f023"}.fa-lock-open:before{content:"\f3c1"}.fa-long-arrow-alt-down:before{content:"\f309"}.fa-long-arrow-alt-left:before{content:"\f30a"}.fa-long-arrow-alt-right:before{content:"\f30b"}.fa-long-arrow-alt-up:before{content:"\f30c"}.fa-low-vision:before{content:"\f2a8"}.fa-luggage-cart:before{content:"\f59d"}.fa-lungs:before{content:"\f604"}.fa-lungs-virus:before{content:"\e067"}.fa-lyft:before{content:"\f3c3"}.fa-magento:before{content:"\f3c4"}.fa-magic:before{content:"\f0d0"}.fa-magnet:before{content:"\f076"}.fa-mail-bulk:before{content:"\f674"}.fa-mailchimp:before{content:"\f59e"}.fa-male:before{content:"\f183"}.fa-mandalorian:before{content:"\f50f"}.fa-map:before{content:"\f279"}.fa-map-marked:before{content:"\f59f"}.fa-map-marked-alt:before{content:"\f5a0"}.fa-map-marker:before{content:"\f041"}.fa-map-marker-alt:before{content:"\f3c5"}.fa-map-pin:before{content:"\f276"}.fa-map-signs:before{content:"\f277"}.fa-markdown:before{content:"\f60f"}.fa-marker:before{content:"\f5a1"}.fa-mars:before{content:"\f222"}.fa-mars-double:before{content:"\f227"}.fa-mars-stroke:before{content:"\f229"}.fa-mars-stroke-h:before{content:"\f22b"}.fa-mars-stroke-v:before{content:"\f22a"}.fa-mask:before{content:"\f6fa"}.fa-mastodon:before{content:"\f4f6"}.fa-maxcdn:before{content:"\f136"}.fa-mdb:before{content:"\f8ca"}.fa-medal:before{content:"\f5a2"}.fa-medapps:before{content:"\f3c6"}.fa-medium:before{content:"\f23a"}.fa-medium-m:before{content:"\f3c7"}.fa-medkit:before{content:"\f0fa"}.fa-medrt:before{content:"\f3c8"}.fa-meetup:before{content:"\f2e0"}.fa-megaport:before{content:"\f5a3"}.fa-meh:before{content:"\f11a"}.fa-meh-blank:before{content:"\f5a4"}.fa-meh-rolling-eyes:before{content:"\f5a5"}.fa-memory:before{content:"\f538"}.fa-mendeley:before{content:"\f7b3"}.fa-menorah:before{content:"\f676"}.fa-mercury:before{content:"\f223"}.fa-meteor:before{content:"\f753"}.fa-microblog:before{content:"\e01a"}.fa-microchip:before{content:"\f2db"}.fa-microphone:before{content:"\f130"}.fa-microphone-alt:before{content:"\f3c9"}.fa-microphone-alt-slash:before{content:"\f539"}.fa-microphone-slash:before{content:"\f131"}.fa-microscope:before{content:"\f610"}.fa-microsoft:before{content:"\f3ca"}.fa-minus:before{content:"\f068"}.fa-minus-circle:before{content:"\f056"}.fa-minus-square:before{content:"\f146"}.fa-mitten:before{content:"\f7b5"}.fa-mix:before{content:"\f3cb"}.fa-mixcloud:before{content:"\f289"}.fa-mixer:before{content:"\e056"}.fa-mizuni:before{content:"\f3cc"}.fa-mobile:before{content:"\f10b"}.fa-mobile-alt:before{content:"\f3cd"}.fa-modx:before{content:"\f285"}.fa-monero:before{content:"\f3d0"}.fa-money-bill:before{content:"\f0d6"}.fa-money-bill-alt:before{content:"\f3d1"}.fa-money-bill-wave:before{content:"\f53a"}.fa-money-bill-wave-alt:before{content:"\f53b"}.fa-money-check:before{content:"\f53c"}.fa-money-check-alt:before{content:"\f53d"}.fa-monument:before{content:"\f5a6"}.fa-moon:before{content:"\f186"}.fa-mortar-pestle:before{content:"\f5a7"}.fa-mosque:before{content:"\f678"}.fa-motorcycle:before{content:"\f21c"}.fa-mountain:before{content:"\f6fc"}.fa-mouse:before{content:"\f8cc"}.fa-mouse-pointer:before{content:"\f245"}.fa-mug-hot:before{content:"\f7b6"}.fa-music:before{content:"\f001"}.fa-napster:before{content:"\f3d2"}.fa-neos:before{content:"\f612"}.fa-network-wired:before{content:"\f6ff"}.fa-neuter:before{content:"\f22c"}.fa-newspaper:before{content:"\f1ea"}.fa-nimblr:before{content:"\f5a8"}.fa-node:before{content:"\f419"}.fa-node-js:before{content:"\f3d3"}.fa-not-equal:before{content:"\f53e"}.fa-notes-medical:before{content:"\f481"}.fa-npm:before{content:"\f3d4"}.fa-ns8:before{content:"\f3d5"}.fa-nutritionix:before{content:"\f3d6"}.fa-object-group:before{content:"\f247"}.fa-object-ungroup:before{content:"\f248"}.fa-octopus-deploy:before{content:"\e082"}.fa-odnoklassniki:before{content:"\f263"}.fa-odnoklassniki-square:before{content:"\f264"}.fa-oil-can:before{content:"\f613"}.fa-old-republic:before{content:"\f510"}.fa-om:before{content:"\f679"}.fa-opencart:before{content:"\f23d"}.fa-openid:before{content:"\f19b"}.fa-opera:before{content:"\f26a"}.fa-optin-monster:before{content:"\f23c"}.fa-orcid:before{content:"\f8d2"}.fa-osi:before{content:"\f41a"}.fa-otter:before{content:"\f700"}.fa-outdent:before{content:"\f03b"}.fa-page4:before{content:"\f3d7"}.fa-pagelines:before{content:"\f18c"}.fa-pager:before{content:"\f815"}.fa-paint-brush:before{content:"\f1fc"}.fa-paint-roller:before{content:"\f5aa"}.fa-palette:before{content:"\f53f"}.fa-palfed:before{content:"\f3d8"}.fa-pallet:before{content:"\f482"}.fa-paper-plane:before{content:"\f1d8"}.fa-paperclip:before{content:"\f0c6"}.fa-parachute-box:before{content:"\f4cd"}.fa-paragraph:before{content:"\f1dd"}.fa-parking:before{content:"\f540"}.fa-passport:before{content:"\f5ab"}.fa-pastafarianism:before{content:"\f67b"}.fa-paste:before{content:"\f0ea"}.fa-patreon:before{content:"\f3d9"}.fa-pause:before{content:"\f04c"}.fa-pause-circle:before{content:"\f28b"}.fa-paw:before{content:"\f1b0"}.fa-paypal:before{content:"\f1ed"}.fa-peace:before{content:"\f67c"}.fa-pen:before{content:"\f304"}.fa-pen-alt:before{content:"\f305"}.fa-pen-fancy:before{content:"\f5ac"}.fa-pen-nib:before{content:"\f5ad"}.fa-pen-square:before{content:"\f14b"}.fa-pencil-alt:before{content:"\f303"}.fa-pencil-ruler:before{content:"\f5ae"}.fa-penny-arcade:before{content:"\f704"}.fa-people-arrows:before{content:"\e068"}.fa-people-carry:before{content:"\f4ce"}.fa-pepper-hot:before{content:"\f816"}.fa-perbyte:before{content:"\e083"}.fa-percent:before{content:"\f295"}.fa-percentage:before{content:"\f541"}.fa-periscope:before{content:"\f3da"}.fa-person-booth:before{content:"\f756"}.fa-phabricator:before{content:"\f3db"}.fa-phoenix-framework:before{content:"\f3dc"}.fa-phoenix-squadron:before{content:"\f511"}.fa-phone:before{content:"\f095"}.fa-phone-alt:before{content:"\f879"}.fa-phone-slash:before{content:"\f3dd"}.fa-phone-square:before{content:"\f098"}.fa-phone-square-alt:before{content:"\f87b"}.fa-phone-volume:before{content:"\f2a0"}.fa-photo-video:before{content:"\f87c"}.fa-php:before{content:"\f457"}.fa-pied-piper:before{content:"\f2ae"}.fa-pied-piper-alt:before{content:"\f1a8"}.fa-pied-piper-hat:before{content:"\f4e5"}.fa-pied-piper-pp:before{content:"\f1a7"}.fa-pied-piper-square:before{content:"\e01e"}.fa-piggy-bank:before{content:"\f4d3"}.fa-pills:before{content:"\f484"}.fa-pinterest:before{content:"\f0d2"}.fa-pinterest-p:before{content:"\f231"}.fa-pinterest-square:before{content:"\f0d3"}.fa-pizza-slice:before{content:"\f818"}.fa-place-of-worship:before{content:"\f67f"}.fa-plane:before{content:"\f072"}.fa-plane-arrival:before{content:"\f5af"}.fa-plane-departure:before{content:"\f5b0"}.fa-plane-slash:before{content:"\e069"}.fa-play:before{content:"\f04b"}.fa-play-circle:before{content:"\f144"}.fa-playstation:before{content:"\f3df"}.fa-plug:before{content:"\f1e6"}.fa-plus:before{content:"\f067"}.fa-plus-circle:before{content:"\f055"}.fa-plus-square:before{content:"\f0fe"}.fa-podcast:before{content:"\f2ce"}.fa-poll:before{content:"\f681"}.fa-poll-h:before{content:"\f682"}.fa-poo:before{content:"\f2fe"}.fa-poo-storm:before{content:"\f75a"}.fa-poop:before{content:"\f619"}.fa-portrait:before{content:"\f3e0"}.fa-pound-sign:before{content:"\f154"}.fa-power-off:before{content:"\f011"}.fa-pray:before{content:"\f683"}.fa-praying-hands:before{content:"\f684"}.fa-prescription:before{content:"\f5b1"}.fa-prescription-bottle:before{content:"\f485"}.fa-prescription-bottle-alt:before{content:"\f486"}.fa-print:before{content:"\f02f"}.fa-procedures:before{content:"\f487"}.fa-product-hunt:before{content:"\f288"}.fa-project-diagram:before{content:"\f542"}.fa-pump-medical:before{content:"\e06a"}.fa-pump-soap:before{content:"\e06b"}.fa-pushed:before{content:"\f3e1"}.fa-puzzle-piece:before{content:"\f12e"}.fa-python:before{content:"\f3e2"}.fa-qq:before{content:"\f1d6"}.fa-qrcode:before{content:"\f029"}.fa-question:before{content:"\f128"}.fa-question-circle:before{content:"\f059"}.fa-quidditch:before{content:"\f458"}.fa-quinscape:before{content:"\f459"}.fa-quora:before{content:"\f2c4"}.fa-quote-left:before{content:"\f10d"}.fa-quote-right:before{content:"\f10e"}.fa-quran:before{content:"\f687"}.fa-r-project:before{content:"\f4f7"}.fa-radiation:before{content:"\f7b9"}.fa-radiation-alt:before{content:"\f7ba"}.fa-rainbow:before{content:"\f75b"}.fa-random:before{content:"\f074"}.fa-raspberry-pi:before{content:"\f7bb"}.fa-ravelry:before{content:"\f2d9"}.fa-react:before{content:"\f41b"}.fa-reacteurope:before{content:"\f75d"}.fa-readme:before{content:"\f4d5"}.fa-rebel:before{content:"\f1d0"}.fa-receipt:before{content:"\f543"}.fa-record-vinyl:before{content:"\f8d9"}.fa-recycle:before{content:"\f1b8"}.fa-red-river:before{content:"\f3e3"}.fa-reddit:before{content:"\f1a1"}.fa-reddit-alien:before{content:"\f281"}.fa-reddit-square:before{content:"\f1a2"}.fa-redhat:before{content:"\f7bc"}.fa-redo:before{content:"\f01e"}.fa-redo-alt:before{content:"\f2f9"}.fa-registered:before{content:"\f25d"}.fa-remove-format:before{content:"\f87d"}.fa-renren:before{content:"\f18b"}.fa-reply:before{content:"\f3e5"}.fa-reply-all:before{content:"\f122"}.fa-replyd:before{content:"\f3e6"}.fa-republican:before{content:"\f75e"}.fa-researchgate:before{content:"\f4f8"}.fa-resolving:before{content:"\f3e7"}.fa-restroom:before{content:"\f7bd"}.fa-retweet:before{content:"\f079"}.fa-rev:before{content:"\f5b2"}.fa-ribbon:before{content:"\f4d6"}.fa-ring:before{content:"\f70b"}.fa-road:before{content:"\f018"}.fa-robot:before{content:"\f544"}.fa-rocket:before{content:"\f135"}.fa-rocketchat:before{content:"\f3e8"}.fa-rockrms:before{content:"\f3e9"}.fa-route:before{content:"\f4d7"}.fa-rss:before{content:"\f09e"}.fa-rss-square:before{content:"\f143"}.fa-ruble-sign:before{content:"\f158"}.fa-ruler:before{content:"\f545"}.fa-ruler-combined:before{content:"\f546"}.fa-ruler-horizontal:before{content:"\f547"}.fa-ruler-vertical:before{content:"\f548"}.fa-running:before{content:"\f70c"}.fa-rupee-sign:before{content:"\f156"}.fa-rust:before{content:"\e07a"}.fa-sad-cry:before{content:"\f5b3"}.fa-sad-tear:before{content:"\f5b4"}.fa-safari:before{content:"\f267"}.fa-salesforce:before{content:"\f83b"}.fa-sass:before{content:"\f41e"}.fa-satellite:before{content:"\f7bf"}.fa-satellite-dish:before{content:"\f7c0"}.fa-save:before{content:"\f0c7"}.fa-schlix:before{content:"\f3ea"}.fa-school:before{content:"\f549"}.fa-screwdriver:before{content:"\f54a"}.fa-scribd:before{content:"\f28a"}.fa-scroll:before{content:"\f70e"}.fa-sd-card:before{content:"\f7c2"}.fa-search:before{content:"\f002"}.fa-search-dollar:before{content:"\f688"}.fa-search-location:before{content:"\f689"}.fa-search-minus:before{content:"\f010"}.fa-search-plus:before{content:"\f00e"}.fa-searchengin:before{content:"\f3eb"}.fa-seedling:before{content:"\f4d8"}.fa-sellcast:before{content:"\f2da"}.fa-sellsy:before{content:"\f213"}.fa-server:before{content:"\f233"}.fa-servicestack:before{content:"\f3ec"}.fa-shapes:before{content:"\f61f"}.fa-share:before{content:"\f064"}.fa-share-alt:before{content:"\f1e0"}.fa-share-alt-square:before{content:"\f1e1"}.fa-share-square:before{content:"\f14d"}.fa-shekel-sign:before{content:"\f20b"}.fa-shield-alt:before{content:"\f3ed"}.fa-shield-virus:before{content:"\e06c"}.fa-ship:before{content:"\f21a"}.fa-shipping-fast:before{content:"\f48b"}.fa-shirtsinbulk:before{content:"\f214"}.fa-shoe-prints:before{content:"\f54b"}.fa-shopify:before{content:"\e057"}.fa-shopping-bag:before{content:"\f290"}.fa-shopping-basket:before{content:"\f291"}.fa-shopping-cart:before{content:"\f07a"}.fa-shopware:before{content:"\f5b5"}.fa-shower:before{content:"\f2cc"}.fa-shuttle-van:before{content:"\f5b6"}.fa-sign:before{content:"\f4d9"}.fa-sign-in-alt:before{content:"\f2f6"}.fa-sign-language:before{content:"\f2a7"}.fa-sign-out-alt:before{content:"\f2f5"}.fa-signal:before{content:"\f012"}.fa-signature:before{content:"\f5b7"}.fa-sim-card:before{content:"\f7c4"}.fa-simplybuilt:before{content:"\f215"}.fa-sink:before{content:"\e06d"}.fa-sistrix:before{content:"\f3ee"}.fa-sitemap:before{content:"\f0e8"}.fa-sith:before{content:"\f512"}.fa-skating:before{content:"\f7c5"}.fa-sketch:before{content:"\f7c6"}.fa-skiing:before{content:"\f7c9"}.fa-skiing-nordic:before{content:"\f7ca"}.fa-skull:before{content:"\f54c"}.fa-skull-crossbones:before{content:"\f714"}.fa-skyatlas:before{content:"\f216"}.fa-skype:before{content:"\f17e"}.fa-slack:before{content:"\f198"}.fa-slack-hash:before{content:"\f3ef"}.fa-slash:before{content:"\f715"}.fa-sleigh:before{content:"\f7cc"}.fa-sliders-h:before{content:"\f1de"}.fa-slideshare:before{content:"\f1e7"}.fa-smile:before{content:"\f118"}.fa-smile-beam:before{content:"\f5b8"}.fa-smile-wink:before{content:"\f4da"}.fa-smog:before{content:"\f75f"}.fa-smoking:before{content:"\f48d"}.fa-smoking-ban:before{content:"\f54d"}.fa-sms:before{content:"\f7cd"}.fa-snapchat:before{content:"\f2ab"}.fa-snapchat-ghost:before{content:"\f2ac"}.fa-snapchat-square:before{content:"\f2ad"}.fa-snowboarding:before{content:"\f7ce"}.fa-snowflake:before{content:"\f2dc"}.fa-snowman:before{content:"\f7d0"}.fa-snowplow:before{content:"\f7d2"}.fa-soap:before{content:"\e06e"}.fa-socks:before{content:"\f696"}.fa-solar-panel:before{content:"\f5ba"}.fa-sort:before{content:"\f0dc"}.fa-sort-alpha-down:before{content:"\f15d"}.fa-sort-alpha-down-alt:before{content:"\f881"}.fa-sort-alpha-up:before{content:"\f15e"}.fa-sort-alpha-up-alt:before{content:"\f882"}.fa-sort-amount-down:before{content:"\f160"}.fa-sort-amount-down-alt:before{content:"\f884"}.fa-sort-amount-up:before{content:"\f161"}.fa-sort-amount-up-alt:before{content:"\f885"}.fa-sort-down:before{content:"\f0dd"}.fa-sort-numeric-down:before{content:"\f162"}.fa-sort-numeric-down-alt:before{content:"\f886"}.fa-sort-numeric-up:before{content:"\f163"}.fa-sort-numeric-up-alt:before{content:"\f887"}.fa-sort-up:before{content:"\f0de"}.fa-soundcloud:before{content:"\f1be"}.fa-sourcetree:before{content:"\f7d3"}.fa-spa:before{content:"\f5bb"}.fa-space-shuttle:before{content:"\f197"}.fa-speakap:before{content:"\f3f3"}.fa-speaker-deck:before{content:"\f83c"}.fa-spell-check:before{content:"\f891"}.fa-spider:before{content:"\f717"}.fa-spinner:before{content:"\f110"}.fa-splotch:before{content:"\f5bc"}.fa-spotify:before{content:"\f1bc"}.fa-spray-can:before{content:"\f5bd"}.fa-square:before{content:"\f0c8"}.fa-square-full:before{content:"\f45c"}.fa-square-root-alt:before{content:"\f698"}.fa-squarespace:before{content:"\f5be"}.fa-stack-exchange:before{content:"\f18d"}.fa-stack-overflow:before{content:"\f16c"}.fa-stackpath:before{content:"\f842"}.fa-stamp:before{content:"\f5bf"}.fa-star:before{content:"\f005"}.fa-star-and-crescent:before{content:"\f699"}.fa-star-half:before{content:"\f089"}.fa-star-half-alt:before{content:"\f5c0"}.fa-star-of-david:before{content:"\f69a"}.fa-star-of-life:before{content:"\f621"}.fa-staylinked:before{content:"\f3f5"}.fa-steam:before{content:"\f1b6"}.fa-steam-square:before{content:"\f1b7"}.fa-steam-symbol:before{content:"\f3f6"}.fa-step-backward:before{content:"\f048"}.fa-step-forward:before{content:"\f051"}.fa-stethoscope:before{content:"\f0f1"}.fa-sticker-mule:before{content:"\f3f7"}.fa-sticky-note:before{content:"\f249"}.fa-stop:before{content:"\f04d"}.fa-stop-circle:before{content:"\f28d"}.fa-stopwatch:before{content:"\f2f2"}.fa-stopwatch-20:before{content:"\e06f"}.fa-store:before{content:"\f54e"}.fa-store-alt:before{content:"\f54f"}.fa-store-alt-slash:before{content:"\e070"}.fa-store-slash:before{content:"\e071"}.fa-strava:before{content:"\f428"}.fa-stream:before{content:"\f550"}.fa-street-view:before{content:"\f21d"}.fa-strikethrough:before{content:"\f0cc"}.fa-stripe:before{content:"\f429"}.fa-stripe-s:before{content:"\f42a"}.fa-stroopwafel:before{content:"\f551"}.fa-studiovinari:before{content:"\f3f8"}.fa-stumbleupon:before{content:"\f1a4"}.fa-stumbleupon-circle:before{content:"\f1a3"}.fa-subscript:before{content:"\f12c"}.fa-subway:before{content:"\f239"}.fa-suitcase:before{content:"\f0f2"}.fa-suitcase-rolling:before{content:"\f5c1"}.fa-sun:before{content:"\f185"}.fa-superpowers:before{content:"\f2dd"}.fa-superscript:before{content:"\f12b"}.fa-supple:before{content:"\f3f9"}.fa-surprise:before{content:"\f5c2"}.fa-suse:before{content:"\f7d6"}.fa-swatchbook:before{content:"\f5c3"}.fa-swift:before{content:"\f8e1"}.fa-swimmer:before{content:"\f5c4"}.fa-swimming-pool:before{content:"\f5c5"}.fa-symfony:before{content:"\f83d"}.fa-synagogue:before{content:"\f69b"}.fa-sync:before{content:"\f021"}.fa-sync-alt:before{content:"\f2f1"}.fa-syringe:before{content:"\f48e"}.fa-table:before{content:"\f0ce"}.fa-table-tennis:before{content:"\f45d"}.fa-tablet:before{content:"\f10a"}.fa-tablet-alt:before{content:"\f3fa"}.fa-tablets:before{content:"\f490"}.fa-tachometer-alt:before{content:"\f3fd"}.fa-tag:before{content:"\f02b"}.fa-tags:before{content:"\f02c"}.fa-tape:before{content:"\f4db"}.fa-tasks:before{content:"\f0ae"}.fa-taxi:before{content:"\f1ba"}.fa-teamspeak:before{content:"\f4f9"}.fa-teeth:before{content:"\f62e"}.fa-teeth-open:before{content:"\f62f"}.fa-telegram:before{content:"\f2c6"}.fa-telegram-plane:before{content:"\f3fe"}.fa-temperature-high:before{content:"\f769"}.fa-temperature-low:before{content:"\f76b"}.fa-tencent-weibo:before{content:"\f1d5"}.fa-tenge:before{content:"\f7d7"}.fa-terminal:before{content:"\f120"}.fa-text-height:before{content:"\f034"}.fa-text-width:before{content:"\f035"}.fa-th:before{content:"\f00a"}.fa-th-large:before{content:"\f009"}.fa-th-list:before{content:"\f00b"}.fa-the-red-yeti:before{content:"\f69d"}.fa-theater-masks:before{content:"\f630"}.fa-themeco:before{content:"\f5c6"}.fa-themeisle:before{content:"\f2b2"}.fa-thermometer:before{content:"\f491"}.fa-thermometer-empty:before{content:"\f2cb"}.fa-thermometer-full:before{content:"\f2c7"}.fa-thermometer-half:before{content:"\f2c9"}.fa-thermometer-quarter:before{content:"\f2ca"}.fa-thermometer-three-quarters:before{content:"\f2c8"}.fa-think-peaks:before{content:"\f731"}.fa-thumbs-down:before{content:"\f165"}.fa-thumbs-up:before{content:"\f164"}.fa-thumbtack:before{content:"\f08d"}.fa-ticket-alt:before{content:"\f3ff"}.fa-tiktok:before{content:"\e07b"}.fa-times:before{content:"\f00d"}.fa-times-circle:before{content:"\f057"}.fa-tint:before{content:"\f043"}.fa-tint-slash:before{content:"\f5c7"}.fa-tired:before{content:"\f5c8"}.fa-toggle-off:before{content:"\f204"}.fa-toggle-on:before{content:"\f205"}.fa-toilet:before{content:"\f7d8"}.fa-toilet-paper:before{content:"\f71e"}.fa-toilet-paper-slash:before{content:"\e072"}.fa-toolbox:before{content:"\f552"}.fa-tools:before{content:"\f7d9"}.fa-tooth:before{content:"\f5c9"}.fa-torah:before{content:"\f6a0"}.fa-torii-gate:before{content:"\f6a1"}.fa-tractor:before{content:"\f722"}.fa-trade-federation:before{content:"\f513"}.fa-trademark:before{content:"\f25c"}.fa-traffic-light:before{content:"\f637"}.fa-trailer:before{content:"\e041"}.fa-train:before{content:"\f238"}.fa-tram:before{content:"\f7da"}.fa-transgender:before{content:"\f224"}.fa-transgender-alt:before{content:"\f225"}.fa-trash:before{content:"\f1f8"}.fa-trash-alt:before{content:"\f2ed"}.fa-trash-restore:before{content:"\f829"}.fa-trash-restore-alt:before{content:"\f82a"}.fa-tree:before{content:"\f1bb"}.fa-trello:before{content:"\f181"}.fa-tripadvisor:before{content:"\f262"}.fa-trophy:before{content:"\f091"}.fa-truck:before{content:"\f0d1"}.fa-truck-loading:before{content:"\f4de"}.fa-truck-monster:before{content:"\f63b"}.fa-truck-moving:before{content:"\f4df"}.fa-truck-pickup:before{content:"\f63c"}.fa-tshirt:before{content:"\f553"}.fa-tty:before{content:"\f1e4"}.fa-tumblr:before{content:"\f173"}.fa-tumblr-square:before{content:"\f174"}.fa-tv:before{content:"\f26c"}.fa-twitch:before{content:"\f1e8"}.fa-twitter:before{content:"\f099"}.fa-twitter-square:before{content:"\f081"}.fa-typo3:before{content:"\f42b"}.fa-uber:before{content:"\f402"}.fa-ubuntu:before{content:"\f7df"}.fa-uikit:before{content:"\f403"}.fa-umbraco:before{content:"\f8e8"}.fa-umbrella:before{content:"\f0e9"}.fa-umbrella-beach:before{content:"\f5ca"}.fa-uncharted:before{content:"\e084"}.fa-underline:before{content:"\f0cd"}.fa-undo:before{content:"\f0e2"}.fa-undo-alt:before{content:"\f2ea"}.fa-uniregistry:before{content:"\f404"}.fa-unity:before{content:"\e049"}.fa-universal-access:before{content:"\f29a"}.fa-university:before{content:"\f19c"}.fa-unlink:before{content:"\f127"}.fa-unlock:before{content:"\f09c"}.fa-unlock-alt:before{content:"\f13e"}.fa-unsplash:before{content:"\e07c"}.fa-untappd:before{content:"\f405"}.fa-upload:before{content:"\f093"}.fa-ups:before{content:"\f7e0"}.fa-usb:before{content:"\f287"}.fa-user:before{content:"\f007"}.fa-user-alt:before{content:"\f406"}.fa-user-alt-slash:before{content:"\f4fa"}.fa-user-astronaut:before{content:"\f4fb"}.fa-user-check:before{content:"\f4fc"}.fa-user-circle:before{content:"\f2bd"}.fa-user-clock:before{content:"\f4fd"}.fa-user-cog:before{content:"\f4fe"}.fa-user-edit:before{content:"\f4ff"}.fa-user-friends:before{content:"\f500"}.fa-user-graduate:before{content:"\f501"}.fa-user-injured:before{content:"\f728"}.fa-user-lock:before{content:"\f502"}.fa-user-md:before{content:"\f0f0"}.fa-user-minus:before{content:"\f503"}.fa-user-ninja:before{content:"\f504"}.fa-user-nurse:before{content:"\f82f"}.fa-user-plus:before{content:"\f234"}.fa-user-secret:before{content:"\f21b"}.fa-user-shield:before{content:"\f505"}.fa-user-slash:before{content:"\f506"}.fa-user-tag:before{content:"\f507"}.fa-user-tie:before{content:"\f508"}.fa-user-times:before{content:"\f235"}.fa-users:before{content:"\f0c0"}.fa-users-cog:before{content:"\f509"}.fa-users-slash:before{content:"\e073"}.fa-usps:before{content:"\f7e1"}.fa-ussunnah:before{content:"\f407"}.fa-utensil-spoon:before{content:"\f2e5"}.fa-utensils:before{content:"\f2e7"}.fa-vaadin:before{content:"\f408"}.fa-vector-square:before{content:"\f5cb"}.fa-venus:before{content:"\f221"}.fa-venus-double:before{content:"\f226"}.fa-venus-mars:before{content:"\f228"}.fa-vest:before{content:"\e085"}.fa-vest-patches:before{content:"\e086"}.fa-viacoin:before{content:"\f237"}.fa-viadeo:before{content:"\f2a9"}.fa-viadeo-square:before{content:"\f2aa"}.fa-vial:before{content:"\f492"}.fa-vials:before{content:"\f493"}.fa-viber:before{content:"\f409"}.fa-video:before{content:"\f03d"}.fa-video-slash:before{content:"\f4e2"}.fa-vihara:before{content:"\f6a7"}.fa-vimeo:before{content:"\f40a"}.fa-vimeo-square:before{content:"\f194"}.fa-vimeo-v:before{content:"\f27d"}.fa-vine:before{content:"\f1ca"}.fa-virus:before{content:"\e074"}.fa-virus-slash:before{content:"\e075"}.fa-viruses:before{content:"\e076"}.fa-vk:before{content:"\f189"}.fa-vnv:before{content:"\f40b"}.fa-voicemail:before{content:"\f897"}.fa-volleyball-ball:before{content:"\f45f"}.fa-volume-down:before{content:"\f027"}.fa-volume-mute:before{content:"\f6a9"}.fa-volume-off:before{content:"\f026"}.fa-volume-up:before{content:"\f028"}.fa-vote-yea:before{content:"\f772"}.fa-vr-cardboard:before{content:"\f729"}.fa-vuejs:before{content:"\f41f"}.fa-walking:before{content:"\f554"}.fa-wallet:before{content:"\f555"}.fa-warehouse:before{content:"\f494"}.fa-watchman-monitoring:before{content:"\e087"}.fa-water:before{content:"\f773"}.fa-wave-square:before{content:"\f83e"}.fa-waze:before{content:"\f83f"}.fa-weebly:before{content:"\f5cc"}.fa-weibo:before{content:"\f18a"}.fa-weight:before{content:"\f496"}.fa-weight-hanging:before{content:"\f5cd"}.fa-weixin:before{content:"\f1d7"}.fa-whatsapp:before{content:"\f232"}.fa-whatsapp-square:before{content:"\f40c"}.fa-wheelchair:before{content:"\f193"}.fa-whmcs:before{content:"\f40d"}.fa-wifi:before{content:"\f1eb"}.fa-wikipedia-w:before{content:"\f266"}.fa-wind:before{content:"\f72e"}.fa-window-close:before{content:"\f410"}.fa-window-maximize:before{content:"\f2d0"}.fa-window-minimize:before{content:"\f2d1"}.fa-window-restore:before{content:"\f2d2"}.fa-windows:before{content:"\f17a"}.fa-wine-bottle:before{content:"\f72f"}.fa-wine-glass:before{content:"\f4e3"}.fa-wine-glass-alt:before{content:"\f5ce"}.fa-wix:before{content:"\f5cf"}.fa-wizards-of-the-coast:before{content:"\f730"}.fa-wodu:before{content:"\e088"}.fa-wolf-pack-battalion:before{content:"\f514"}.fa-won-sign:before{content:"\f159"}.fa-wordpress:before{content:"\f19a"}.fa-wordpress-simple:before{content:"\f411"}.fa-wpbeginner:before{content:"\f297"}.fa-wpexplorer:before{content:"\f2de"}.fa-wpforms:before{content:"\f298"}.fa-wpressr:before{content:"\f3e4"}.fa-wrench:before{content:"\f0ad"}.fa-x-ray:before{content:"\f497"}.fa-xbox:before{content:"\f412"}.fa-xing:before{content:"\f168"}.fa-xing-square:before{content:"\f169"}.fa-y-combinator:before{content:"\f23b"}.fa-yahoo:before{content:"\f19e"}.fa-yammer:before{content:"\f840"}.fa-yandex:before{content:"\f413"}.fa-yandex-international:before{content:"\f414"}.fa-yarn:before{content:"\f7e3"}.fa-yelp:before{content:"\f1e9"}.fa-yen-sign:before{content:"\f157"}.fa-yin-yang:before{content:"\f6ad"}.fa-yoast:before{content:"\f2b1"}.fa-youtube:before{content:"\f167"}.fa-youtube-square:before{content:"\f431"}.fa-zhihu:before{content:"\f63f"}.sr-only{border:0;clip:rect(0,0,0,0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.sr-only-focusable:active,.sr-only-focusable:focus{clip:auto;height:auto;margin:0;overflow:visible;position:static;width:auto}@font-face{font-family:"Font Awesome 5 Brands";font-style:normal;font-weight:400;font-display:block;src:url(../webfonts/fa-brands-400.eot);src:url(../webfonts/fa-brands-400.eot?#iefix) format("embedded-opentype"),url(../webfonts/fa-brands-400.woff2) format("woff2"),url(../webfonts/fa-brands-400.woff) format("woff"),url(../webfonts/fa-brands-400.ttf) format("truetype"),url(../webfonts/fa-brands-400.svg#fontawesome) format("svg")}.fab{font-family:"Font Awesome 5 Brands"}@font-face{font-family:"Font Awesome 5 Free";font-style:normal;font-weight:400;font-display:block;src:url(../webfonts/fa-regular-400.eot);src:url(../webfonts/fa-regular-400.eot?#iefix) format("embedded-opentype"),url(../webfonts/fa-regular-400.woff2) format("woff2"),url(../webfonts/fa-regular-400.woff) format("woff"),url(../webfonts/fa-regular-400.ttf) format("truetype"),url(../webfonts/fa-regular-400.svg#fontawesome) format("svg")}.fab,.far{font-weight:400}@font-face{font-family:"Font Awesome 5 Free";font-style:normal;font-weight:900;font-display:block;src:url(../webfonts/fa-solid-900.eot);src:url(../webfonts/fa-solid-900.eot?#iefix) format("embedded-opentype"),url(../webfonts/fa-solid-900.woff2) format("woff2"),url(../webfonts/fa-solid-900.woff) format("woff"),url(../webfonts/fa-solid-900.ttf) format("truetype"),url(../webfonts/fa-solid-900.svg#fontawesome) format("svg")}.fa,.far,.fas{font-family:"Font Awesome 5 Free"}.fa,.fas{font-weight:900} \ No newline at end of file diff --git a/docs/static/css/index.css b/docs/static/css/index.css new file mode 100644 index 0000000..21076ef --- /dev/null +++ b/docs/static/css/index.css @@ -0,0 +1,157 @@ +body { + font-family: 'Noto Sans', sans-serif; +} + + +.footer .icon-link { + font-size: 25px; + color: #000; +} + +.link-block a { + margin-top: 5px; + margin-bottom: 5px; +} + +.dnerf { + font-variant: small-caps; +} + + +.teaser .hero-body { + padding-top: 0; + padding-bottom: 3rem; +} + +.teaser { + font-family: 'Google Sans', sans-serif; +} + + +.publication-title { +} + +.publication-banner { + max-height: parent; + +} + +.publication-banner video { + position: relative; + left: auto; + top: auto; + transform: none; + object-fit: fit; +} + +.publication-header .hero-body { +} + +.publication-title { + font-family: 'Google Sans', sans-serif; +} + +.publication-authors { + font-family: 'Google Sans', sans-serif; +} + +.publication-venue { + color: #555; + width: fit-content; + font-weight: bold; +} + +.publication-awards { + color: #ff3860; + width: fit-content; + font-weight: bolder; +} + +.publication-authors { +} + +.publication-authors a { + color: hsl(204, 86%, 53%) !important; +} + +.publication-authors a:hover { + text-decoration: underline; +} + +.author-block { + display: inline-block; +} + +.publication-banner img { +} + +.publication-authors { + /*color: #4286f4;*/ +} + +.publication-video { + position: relative; + width: 100%; + height: 0; + padding-bottom: 56.25%; + + overflow: hidden; + border-radius: 10px !important; +} + +.publication-video iframe { + position: absolute; + top: 0; + left: 0; + width: 100%; + height: 100%; +} + +.publication-body img { +} + +.results-carousel { + overflow: hidden; +} + +.results-carousel .item { + margin: 5px; + overflow: hidden; + border: 1px solid #bbb; + border-radius: 10px; + padding: 0; + font-size: 0; +} + +.results-carousel video { + margin: 0; +} + + +.interpolation-panel { + background: #f5f5f5; + border-radius: 10px; +} + +.interpolation-panel .interpolation-image { + width: 100%; + border-radius: 5px; +} + +.interpolation-video-column { +} + +.interpolation-panel .slider { + margin: 0 !important; +} + +.interpolation-panel .slider { + margin: 0 !important; +} + +#interpolation-image-wrapper { + width: 100%; +} +#interpolation-image-wrapper img { + border-radius: 5px; +} diff --git a/docs/static/images/case_study.jpg b/docs/static/images/case_study.jpg new file mode 100644 index 0000000..d12ff61 Binary files /dev/null and b/docs/static/images/case_study.jpg differ diff --git a/docs/static/images/case_study2.jpg b/docs/static/images/case_study2.jpg new file mode 100644 index 0000000..1ff7ec6 Binary files /dev/null and b/docs/static/images/case_study2.jpg differ diff --git a/docs/static/images/comparison.jpg b/docs/static/images/comparison.jpg new file mode 100644 index 0000000..326c2e1 Binary files /dev/null and b/docs/static/images/comparison.jpg differ diff --git a/docs/static/images/error_breakdown_v.jpg b/docs/static/images/error_breakdown_v.jpg new file mode 100644 index 0000000..e869054 Binary files /dev/null and b/docs/static/images/error_breakdown_v.jpg differ diff --git a/docs/static/images/icon.jpg b/docs/static/images/icon.jpg new file mode 100644 index 0000000..d00d1ba Binary files /dev/null and b/docs/static/images/icon.jpg differ diff --git a/docs/static/images/result_1.jpg b/docs/static/images/result_1.jpg new file mode 100644 index 0000000..f9a6b6f Binary files /dev/null and b/docs/static/images/result_1.jpg differ diff --git a/docs/static/images/result_2.jpg b/docs/static/images/result_2.jpg new file mode 100644 index 0000000..c82f7fb Binary files /dev/null and b/docs/static/images/result_2.jpg differ diff --git a/docs/static/images/result_3.jpg b/docs/static/images/result_3.jpg new file mode 100644 index 0000000..f150048 Binary files /dev/null and b/docs/static/images/result_3.jpg differ diff --git a/docs/static/images/result_4.jpg b/docs/static/images/result_4.jpg new file mode 100644 index 0000000..185deee Binary files /dev/null and b/docs/static/images/result_4.jpg differ diff --git a/docs/static/images/teaser.jpg b/docs/static/images/teaser.jpg new file mode 100644 index 0000000..46da873 Binary files /dev/null and b/docs/static/images/teaser.jpg differ diff --git a/docs/static/js/bulma-carousel.js b/docs/static/js/bulma-carousel.js new file mode 100644 index 0000000..229edba --- /dev/null +++ b/docs/static/js/bulma-carousel.js @@ -0,0 +1,2371 @@ +(function webpackUniversalModuleDefinition(root, factory) { + if(typeof exports === 'object' && typeof module === 'object') + module.exports = factory(); + else if(typeof define === 'function' && define.amd) + define([], factory); + else if(typeof exports === 'object') + exports["bulmaCarousel"] = factory(); + else + root["bulmaCarousel"] = factory(); +})(typeof self !== 'undefined' ? self : this, function() { +return /******/ (function(modules) { // webpackBootstrap +/******/ // The module cache +/******/ var installedModules = {}; +/******/ +/******/ // The require function +/******/ function __webpack_require__(moduleId) { +/******/ +/******/ // Check if module is in cache +/******/ if(installedModules[moduleId]) { +/******/ return installedModules[moduleId].exports; +/******/ } +/******/ // Create a new module (and put it into the cache) +/******/ var module = installedModules[moduleId] = { +/******/ i: moduleId, +/******/ l: false, +/******/ exports: {} +/******/ }; +/******/ +/******/ // Execute the module function +/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); +/******/ +/******/ // Flag the module as loaded +/******/ module.l = true; +/******/ +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } +/******/ +/******/ +/******/ // expose the modules object (__webpack_modules__) +/******/ __webpack_require__.m = modules; +/******/ +/******/ // expose the module cache +/******/ __webpack_require__.c = installedModules; +/******/ +/******/ // define getter function for harmony exports +/******/ __webpack_require__.d = function(exports, name, getter) { +/******/ if(!__webpack_require__.o(exports, name)) { +/******/ Object.defineProperty(exports, name, { +/******/ configurable: false, +/******/ enumerable: true, +/******/ get: getter +/******/ }); +/******/ } +/******/ }; +/******/ +/******/ // getDefaultExport function for compatibility with non-harmony modules +/******/ __webpack_require__.n = function(module) { +/******/ var getter = module && module.__esModule ? +/******/ function getDefault() { return module['default']; } : +/******/ function getModuleExports() { return module; }; +/******/ __webpack_require__.d(getter, 'a', getter); +/******/ return getter; +/******/ }; +/******/ +/******/ // Object.prototype.hasOwnProperty.call +/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); }; +/******/ +/******/ // __webpack_public_path__ +/******/ __webpack_require__.p = ""; +/******/ +/******/ // Load entry module and return exports +/******/ return __webpack_require__(__webpack_require__.s = 5); +/******/ }) +/************************************************************************/ +/******/ ([ +/* 0 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* unused harmony export addClasses */ +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "d", function() { return removeClasses; }); +/* unused harmony export show */ +/* unused harmony export hide */ +/* unused harmony export offset */ +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "e", function() { return width; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "b", function() { return height; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "c", function() { return outerHeight; }); +/* unused harmony export outerWidth */ +/* unused harmony export position */ +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "a", function() { return css; }); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__type__ = __webpack_require__(2); + + +var addClasses = function addClasses(element, classes) { + classes = Array.isArray(classes) ? classes : classes.split(' '); + classes.forEach(function (cls) { + element.classList.add(cls); + }); +}; + +var removeClasses = function removeClasses(element, classes) { + classes = Array.isArray(classes) ? classes : classes.split(' '); + classes.forEach(function (cls) { + element.classList.remove(cls); + }); +}; + +var show = function show(elements) { + elements = Array.isArray(elements) ? elements : [elements]; + elements.forEach(function (element) { + element.style.display = ''; + }); +}; + +var hide = function hide(elements) { + elements = Array.isArray(elements) ? elements : [elements]; + elements.forEach(function (element) { + element.style.display = 'none'; + }); +}; + +var offset = function offset(element) { + var rect = element.getBoundingClientRect(); + return { + top: rect.top + document.body.scrollTop, + left: rect.left + document.body.scrollLeft + }; +}; + +// returns an element's width +var width = function width(element) { + return element.getBoundingClientRect().width || element.offsetWidth; +}; +// returns an element's height +var height = function height(element) { + return element.getBoundingClientRect().height || element.offsetHeight; +}; + +var outerHeight = function outerHeight(element) { + var withMargin = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false; + + var height = element.offsetHeight; + if (withMargin) { + var style = window.getComputedStyle(element); + height += parseInt(style.marginTop) + parseInt(style.marginBottom); + } + return height; +}; + +var outerWidth = function outerWidth(element) { + var withMargin = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false; + + var width = element.offsetWidth; + if (withMargin) { + var style = window.getComputedStyle(element); + width += parseInt(style.marginLeft) + parseInt(style.marginRight); + } + return width; +}; + +var position = function position(element) { + return { + left: element.offsetLeft, + top: element.offsetTop + }; +}; + +var css = function css(element, obj) { + if (!obj) { + return window.getComputedStyle(element); + } + if (Object(__WEBPACK_IMPORTED_MODULE_0__type__["b" /* isObject */])(obj)) { + var style = ''; + Object.keys(obj).forEach(function (key) { + style += key + ': ' + obj[key] + ';'; + }); + + element.style.cssText += style; + } +}; + +/***/ }), +/* 1 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony export (immutable) */ __webpack_exports__["a"] = detectSupportsPassive; +function detectSupportsPassive() { + var supportsPassive = false; + + try { + var opts = Object.defineProperty({}, 'passive', { + get: function get() { + supportsPassive = true; + } + }); + + window.addEventListener('testPassive', null, opts); + window.removeEventListener('testPassive', null, opts); + } catch (e) {} + + return supportsPassive; +} + +/***/ }), +/* 2 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "a", function() { return isFunction; }); +/* unused harmony export isNumber */ +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "c", function() { return isString; }); +/* unused harmony export isDate */ +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "b", function() { return isObject; }); +/* unused harmony export isEmptyObject */ +/* unused harmony export isNode */ +/* unused harmony export isVideo */ +/* unused harmony export isHTML5 */ +/* unused harmony export isIFrame */ +/* unused harmony export isYoutube */ +/* unused harmony export isVimeo */ +var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; + +var isFunction = function isFunction(unknown) { + return typeof unknown === 'function'; +}; +var isNumber = function isNumber(unknown) { + return typeof unknown === "number"; +}; +var isString = function isString(unknown) { + return typeof unknown === 'string' || !!unknown && (typeof unknown === 'undefined' ? 'undefined' : _typeof(unknown)) === 'object' && Object.prototype.toString.call(unknown) === '[object String]'; +}; +var isDate = function isDate(unknown) { + return (Object.prototype.toString.call(unknown) === '[object Date]' || unknown instanceof Date) && !isNaN(unknown.valueOf()); +}; +var isObject = function isObject(unknown) { + return (typeof unknown === 'function' || (typeof unknown === 'undefined' ? 'undefined' : _typeof(unknown)) === 'object' && !!unknown) && !Array.isArray(unknown); +}; +var isEmptyObject = function isEmptyObject(unknown) { + for (var name in unknown) { + if (unknown.hasOwnProperty(name)) { + return false; + } + } + return true; +}; + +var isNode = function isNode(unknown) { + return !!(unknown && unknown.nodeType === HTMLElement | SVGElement); +}; +var isVideo = function isVideo(unknown) { + return isYoutube(unknown) || isVimeo(unknown) || isHTML5(unknown); +}; +var isHTML5 = function isHTML5(unknown) { + return isNode(unknown) && unknown.tagName === 'VIDEO'; +}; +var isIFrame = function isIFrame(unknown) { + return isNode(unknown) && unknown.tagName === 'IFRAME'; +}; +var isYoutube = function isYoutube(unknown) { + return isIFrame(unknown) && !!unknown.src.match(/\/\/.*?youtube(-nocookie)?\.[a-z]+\/(watch\?v=[^&\s]+|embed)|youtu\.be\/.*/); +}; +var isVimeo = function isVimeo(unknown) { + return isIFrame(unknown) && !!unknown.src.match(/vimeo\.com\/video\/.*/); +}; + +/***/ }), +/* 3 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } } + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +var EventEmitter = function () { + function EventEmitter() { + var events = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : []; + + _classCallCheck(this, EventEmitter); + + this.events = new Map(events); + } + + _createClass(EventEmitter, [{ + key: "on", + value: function on(name, cb) { + var _this = this; + + this.events.set(name, [].concat(_toConsumableArray(this.events.has(name) ? this.events.get(name) : []), [cb])); + + return function () { + return _this.events.set(name, _this.events.get(name).filter(function (fn) { + return fn !== cb; + })); + }; + } + }, { + key: "emit", + value: function emit(name) { + for (var _len = arguments.length, args = Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) { + args[_key - 1] = arguments[_key]; + } + + return this.events.has(name) && this.events.get(name).map(function (fn) { + return fn.apply(undefined, args); + }); + } + }]); + + return EventEmitter; +}(); + +/* harmony default export */ __webpack_exports__["a"] = (EventEmitter); + +/***/ }), +/* 4 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +var Coordinate = function () { + function Coordinate() { + var x = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0; + var y = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; + + _classCallCheck(this, Coordinate); + + this._x = x; + this._y = y; + } + + _createClass(Coordinate, [{ + key: 'add', + value: function add(coord) { + return new Coordinate(this._x + coord._x, this._y + coord._y); + } + }, { + key: 'sub', + value: function sub(coord) { + return new Coordinate(this._x - coord._x, this._y - coord._y); + } + }, { + key: 'distance', + value: function distance(coord) { + var deltaX = this._x - coord._x; + var deltaY = this._y - coord._y; + + return Math.sqrt(Math.pow(deltaX, 2) + Math.pow(deltaY, 2)); + } + }, { + key: 'max', + value: function max(coord) { + var x = Math.max(this._x, coord._x); + var y = Math.max(this._y, coord._y); + + return new Coordinate(x, y); + } + }, { + key: 'equals', + value: function equals(coord) { + if (this == coord) { + return true; + } + if (!coord || coord == null) { + return false; + } + return this._x == coord._x && this._y == coord._y; + } + }, { + key: 'inside', + value: function inside(northwest, southeast) { + if (this._x >= northwest._x && this._x <= southeast._x && this._y >= northwest._y && this._y <= southeast._y) { + + return true; + } + return false; + } + }, { + key: 'constrain', + value: function constrain(min, max) { + if (min._x > max._x || min._y > max._y) { + return this; + } + + var x = this._x, + y = this._y; + + if (min._x !== null) { + x = Math.max(x, min._x); + } + if (max._x !== null) { + x = Math.min(x, max._x); + } + if (min._y !== null) { + y = Math.max(y, min._y); + } + if (max._y !== null) { + y = Math.min(y, max._y); + } + + return new Coordinate(x, y); + } + }, { + key: 'reposition', + value: function reposition(element) { + element.style['top'] = this._y + 'px'; + element.style['left'] = this._x + 'px'; + } + }, { + key: 'toString', + value: function toString() { + return '(' + this._x + ',' + this._y + ')'; + } + }, { + key: 'x', + get: function get() { + return this._x; + }, + set: function set() { + var value = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0; + + this._x = value; + return this; + } + }, { + key: 'y', + get: function get() { + return this._y; + }, + set: function set() { + var value = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0; + + this._y = value; + return this; + } + }]); + + return Coordinate; +}(); + +/* harmony default export */ __webpack_exports__["a"] = (Coordinate); + +/***/ }), +/* 5 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +Object.defineProperty(__webpack_exports__, "__esModule", { value: true }); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__utils_index__ = __webpack_require__(6); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__utils_css__ = __webpack_require__(0); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_2__utils_type__ = __webpack_require__(2); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_3__utils_eventEmitter__ = __webpack_require__(3); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_4__components_autoplay__ = __webpack_require__(7); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_5__components_breakpoint__ = __webpack_require__(9); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_6__components_infinite__ = __webpack_require__(10); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_7__components_loop__ = __webpack_require__(11); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_8__components_navigation__ = __webpack_require__(13); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_9__components_pagination__ = __webpack_require__(15); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_10__components_swipe__ = __webpack_require__(18); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_11__components_transitioner__ = __webpack_require__(19); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_12__defaultOptions__ = __webpack_require__(22); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_13__templates__ = __webpack_require__(23); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_14__templates_item__ = __webpack_require__(24); +var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; + +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } + +function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } + + + + + + + + + + + + + + + + + + + +var bulmaCarousel = function (_EventEmitter) { + _inherits(bulmaCarousel, _EventEmitter); + + function bulmaCarousel(selector) { + var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + _classCallCheck(this, bulmaCarousel); + + var _this = _possibleConstructorReturn(this, (bulmaCarousel.__proto__ || Object.getPrototypeOf(bulmaCarousel)).call(this)); + + _this.element = Object(__WEBPACK_IMPORTED_MODULE_2__utils_type__["c" /* isString */])(selector) ? document.querySelector(selector) : selector; + // An invalid selector or non-DOM node has been provided. + if (!_this.element) { + throw new Error('An invalid selector or non-DOM node has been provided.'); + } + _this._clickEvents = ['click', 'touch']; + + // Use Element dataset values to override options + var elementConfig = _this.element.dataset ? Object.keys(_this.element.dataset).filter(function (key) { + return Object.keys(__WEBPACK_IMPORTED_MODULE_12__defaultOptions__["a" /* default */]).includes(key); + }).reduce(function (obj, key) { + return _extends({}, obj, _defineProperty({}, key, _this.element.dataset[key])); + }, {}) : {}; + // Set default options - dataset attributes are master + _this.options = _extends({}, __WEBPACK_IMPORTED_MODULE_12__defaultOptions__["a" /* default */], options, elementConfig); + + _this._id = Object(__WEBPACK_IMPORTED_MODULE_0__utils_index__["a" /* uuid */])('slider'); + + _this.onShow = _this.onShow.bind(_this); + + // Initiate plugin + _this._init(); + return _this; + } + + /** + * Initiate all DOM element containing datePicker class + * @method + * @return {Array} Array of all datePicker instances + */ + + + _createClass(bulmaCarousel, [{ + key: '_init', + + + /**************************************************** + * * + * PRIVATE FUNCTIONS * + * * + ****************************************************/ + /** + * Initiate plugin instance + * @method _init + * @return {Slider} Current plugin instance + */ + value: function _init() { + this._items = Array.from(this.element.children); + + // Load plugins + this._breakpoint = new __WEBPACK_IMPORTED_MODULE_5__components_breakpoint__["a" /* default */](this); + this._autoplay = new __WEBPACK_IMPORTED_MODULE_4__components_autoplay__["a" /* default */](this); + this._navigation = new __WEBPACK_IMPORTED_MODULE_8__components_navigation__["a" /* default */](this); + this._pagination = new __WEBPACK_IMPORTED_MODULE_9__components_pagination__["a" /* default */](this); + this._infinite = new __WEBPACK_IMPORTED_MODULE_6__components_infinite__["a" /* default */](this); + this._loop = new __WEBPACK_IMPORTED_MODULE_7__components_loop__["a" /* default */](this); + this._swipe = new __WEBPACK_IMPORTED_MODULE_10__components_swipe__["a" /* default */](this); + + this._build(); + + if (Object(__WEBPACK_IMPORTED_MODULE_2__utils_type__["a" /* isFunction */])(this.options.onReady)) { + this.options.onReady(this); + } + + return this; + } + + /** + * Build Slider HTML component and append it to the DOM + * @method _build + */ + + }, { + key: '_build', + value: function _build() { + var _this2 = this; + + // Generate HTML Fragment of template + this.node = document.createRange().createContextualFragment(Object(__WEBPACK_IMPORTED_MODULE_13__templates__["a" /* default */])(this.id)); + // Save pointers to template parts + this._ui = { + wrapper: this.node.firstChild, + container: this.node.querySelector('.slider-container') + + // Add slider to DOM + };this.element.appendChild(this.node); + this._ui.wrapper.classList.add('is-loading'); + this._ui.container.style.opacity = 0; + + this._transitioner = new __WEBPACK_IMPORTED_MODULE_11__components_transitioner__["a" /* default */](this); + + // Wrap all items by slide element + this._slides = this._items.map(function (item, index) { + return _this2._createSlide(item, index); + }); + + this.reset(); + + this._bindEvents(); + + this._ui.container.style.opacity = 1; + this._ui.wrapper.classList.remove('is-loading'); + } + + /** + * Bind all events + * @method _bindEvents + * @return {void} + */ + + }, { + key: '_bindEvents', + value: function _bindEvents() { + this.on('show', this.onShow); + } + }, { + key: '_unbindEvents', + value: function _unbindEvents() { + this.off('show', this.onShow); + } + }, { + key: '_createSlide', + value: function _createSlide(item, index) { + var slide = document.createRange().createContextualFragment(Object(__WEBPACK_IMPORTED_MODULE_14__templates_item__["a" /* default */])()).firstChild; + slide.dataset.sliderIndex = index; + slide.appendChild(item); + return slide; + } + + /** + * Calculate slider dimensions + */ + + }, { + key: '_setDimensions', + value: function _setDimensions() { + var _this3 = this; + + if (!this.options.vertical) { + if (this.options.centerMode) { + this._ui.wrapper.style.padding = '0px ' + this.options.centerPadding; + } + } else { + this._ui.wrapper.style.height = Object(__WEBPACK_IMPORTED_MODULE_1__utils_css__["c" /* outerHeight */])(this._slides[0]) * this.slidesToShow; + if (this.options.centerMode) { + this._ui.wrapper.style.padding = this.options.centerPadding + ' 0px'; + } + } + + this._wrapperWidth = Object(__WEBPACK_IMPORTED_MODULE_1__utils_css__["e" /* width */])(this._ui.wrapper); + this._wrapperHeight = Object(__WEBPACK_IMPORTED_MODULE_1__utils_css__["c" /* outerHeight */])(this._ui.wrapper); + + if (!this.options.vertical) { + this._slideWidth = Math.ceil(this._wrapperWidth / this.slidesToShow); + this._containerWidth = Math.ceil(this._slideWidth * this._slides.length); + this._ui.container.style.width = this._containerWidth + 'px'; + } else { + this._slideWidth = Math.ceil(this._wrapperWidth); + this._containerHeight = Math.ceil(Object(__WEBPACK_IMPORTED_MODULE_1__utils_css__["c" /* outerHeight */])(this._slides[0]) * this._slides.length); + this._ui.container.style.height = this._containerHeight + 'px'; + } + + this._slides.forEach(function (slide) { + slide.style.width = _this3._slideWidth + 'px'; + }); + } + }, { + key: '_setHeight', + value: function _setHeight() { + if (this.options.effect !== 'translate') { + this._ui.container.style.height = Object(__WEBPACK_IMPORTED_MODULE_1__utils_css__["c" /* outerHeight */])(this._slides[this.state.index]) + 'px'; + } + } + + // Update slides classes + + }, { + key: '_setClasses', + value: function _setClasses() { + var _this4 = this; + + this._slides.forEach(function (slide) { + Object(__WEBPACK_IMPORTED_MODULE_1__utils_css__["d" /* removeClasses */])(slide, 'is-active is-current is-slide-previous is-slide-next'); + if (Math.abs((_this4.state.index - 1) % _this4.state.length) === parseInt(slide.dataset.sliderIndex, 10)) { + slide.classList.add('is-slide-previous'); + } + if (Math.abs(_this4.state.index % _this4.state.length) === parseInt(slide.dataset.sliderIndex, 10)) { + slide.classList.add('is-current'); + } + if (Math.abs((_this4.state.index + 1) % _this4.state.length) === parseInt(slide.dataset.sliderIndex, 10)) { + slide.classList.add('is-slide-next'); + } + }); + } + + /**************************************************** + * * + * GETTERS and SETTERS * + * * + ****************************************************/ + + /** + * Get id of current datePicker + */ + + }, { + key: 'onShow', + + + /**************************************************** + * * + * EVENTS FUNCTIONS * + * * + ****************************************************/ + value: function onShow(e) { + this._navigation.refresh(); + this._pagination.refresh(); + this._setClasses(); + } + + /**************************************************** + * * + * PUBLIC FUNCTIONS * + * * + ****************************************************/ + + }, { + key: 'next', + value: function next() { + if (!this.options.loop && !this.options.infinite && this.state.index + this.slidesToScroll > this.state.length - this.slidesToShow && !this.options.centerMode) { + this.state.next = this.state.index; + } else { + this.state.next = this.state.index + this.slidesToScroll; + } + this.show(); + } + }, { + key: 'previous', + value: function previous() { + if (!this.options.loop && !this.options.infinite && this.state.index === 0) { + this.state.next = this.state.index; + } else { + this.state.next = this.state.index - this.slidesToScroll; + } + this.show(); + } + }, { + key: 'start', + value: function start() { + this._autoplay.start(); + } + }, { + key: 'pause', + value: function pause() { + this._autoplay.pause(); + } + }, { + key: 'stop', + value: function stop() { + this._autoplay.stop(); + } + }, { + key: 'show', + value: function show(index) { + var force = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false; + + // If all slides are already visible then return + if (!this.state.length || this.state.length <= this.slidesToShow) { + return; + } + + if (typeof index === 'Number') { + this.state.next = index; + } + + if (this.options.loop) { + this._loop.apply(); + } + if (this.options.infinite) { + this._infinite.apply(); + } + + // If new slide is already the current one then return + if (this.state.index === this.state.next) { + return; + } + + this.emit('before:show', this.state); + this._transitioner.apply(force, this._setHeight.bind(this)); + this.emit('after:show', this.state); + + this.emit('show', this); + } + }, { + key: 'reset', + value: function reset() { + var _this5 = this; + + this.state = { + length: this._items.length, + index: Math.abs(this.options.initialSlide), + next: Math.abs(this.options.initialSlide), + prev: undefined + }; + + // Fix options + if (this.options.loop && this.options.infinite) { + this.options.loop = false; + } + if (this.options.slidesToScroll > this.options.slidesToShow) { + this.options.slidesToScroll = this.slidesToShow; + } + this._breakpoint.init(); + + if (this.state.index >= this.state.length && this.state.index !== 0) { + this.state.index = this.state.index - this.slidesToScroll; + } + if (this.state.length <= this.slidesToShow) { + this.state.index = 0; + } + + this._ui.wrapper.appendChild(this._navigation.init().render()); + this._ui.wrapper.appendChild(this._pagination.init().render()); + + if (this.options.navigationSwipe) { + this._swipe.bindEvents(); + } else { + this._swipe._bindEvents(); + } + + this._breakpoint.apply(); + // Move all created slides into slider + this._slides.forEach(function (slide) { + return _this5._ui.container.appendChild(slide); + }); + this._transitioner.init().apply(true, this._setHeight.bind(this)); + + if (this.options.autoplay) { + this._autoplay.init().start(); + } + } + + /** + * Destroy Slider + * @method destroy + */ + + }, { + key: 'destroy', + value: function destroy() { + var _this6 = this; + + this._unbindEvents(); + this._items.forEach(function (item) { + _this6.element.appendChild(item); + }); + this.node.remove(); + } + }, { + key: 'id', + get: function get() { + return this._id; + } + }, { + key: 'index', + set: function set(index) { + this._index = index; + }, + get: function get() { + return this._index; + } + }, { + key: 'length', + set: function set(length) { + this._length = length; + }, + get: function get() { + return this._length; + } + }, { + key: 'slides', + get: function get() { + return this._slides; + }, + set: function set(slides) { + this._slides = slides; + } + }, { + key: 'slidesToScroll', + get: function get() { + return this.options.effect === 'translate' ? this._breakpoint.getSlidesToScroll() : 1; + } + }, { + key: 'slidesToShow', + get: function get() { + return this.options.effect === 'translate' ? this._breakpoint.getSlidesToShow() : 1; + } + }, { + key: 'direction', + get: function get() { + return this.element.dir.toLowerCase() === 'rtl' || this.element.style.direction === 'rtl' ? 'rtl' : 'ltr'; + } + }, { + key: 'wrapper', + get: function get() { + return this._ui.wrapper; + } + }, { + key: 'wrapperWidth', + get: function get() { + return this._wrapperWidth || 0; + } + }, { + key: 'container', + get: function get() { + return this._ui.container; + } + }, { + key: 'containerWidth', + get: function get() { + return this._containerWidth || 0; + } + }, { + key: 'slideWidth', + get: function get() { + return this._slideWidth || 0; + } + }, { + key: 'transitioner', + get: function get() { + return this._transitioner; + } + }], [{ + key: 'attach', + value: function attach() { + var _this7 = this; + + var selector = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : '.slider'; + var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + var instances = new Array(); + + var elements = Object(__WEBPACK_IMPORTED_MODULE_2__utils_type__["c" /* isString */])(selector) ? document.querySelectorAll(selector) : Array.isArray(selector) ? selector : [selector]; + [].forEach.call(elements, function (element) { + if (typeof element[_this7.constructor.name] === 'undefined') { + var instance = new bulmaCarousel(element, options); + element[_this7.constructor.name] = instance; + instances.push(instance); + } else { + instances.push(element[_this7.constructor.name]); + } + }); + + return instances; + } + }]); + + return bulmaCarousel; +}(__WEBPACK_IMPORTED_MODULE_3__utils_eventEmitter__["a" /* default */]); + +/* harmony default export */ __webpack_exports__["default"] = (bulmaCarousel); + +/***/ }), +/* 6 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "a", function() { return uuid; }); +/* unused harmony export isRtl */ +/* unused harmony export defer */ +/* unused harmony export getNodeIndex */ +/* unused harmony export camelize */ +function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } } + +var uuid = function uuid() { + var prefix = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : ''; + return prefix + ([1e7] + -1e3 + -4e3 + -8e3 + -1e11).replace(/[018]/g, function (c) { + return (c ^ crypto.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16); + }); +}; +var isRtl = function isRtl() { + return document.documentElement.getAttribute('dir') === 'rtl'; +}; + +var defer = function defer() { + this.promise = new Promise(function (resolve, reject) { + this.resolve = resolve; + this.reject = reject; + }.bind(this)); + + this.then = this.promise.then.bind(this.promise); + this.catch = this.promise.catch.bind(this.promise); +}; + +var getNodeIndex = function getNodeIndex(node) { + return [].concat(_toConsumableArray(node.parentNode.children)).indexOf(node); +}; +var camelize = function camelize(str) { + return str.replace(/-(\w)/g, toUpper); +}; + +/***/ }), +/* 7 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__utils_eventEmitter__ = __webpack_require__(3); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__utils_device__ = __webpack_require__(8); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } + +function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } + + + + +var onVisibilityChange = Symbol('onVisibilityChange'); +var onMouseEnter = Symbol('onMouseEnter'); +var onMouseLeave = Symbol('onMouseLeave'); + +var defaultOptions = { + autoplay: false, + autoplaySpeed: 3000 +}; + +var Autoplay = function (_EventEmitter) { + _inherits(Autoplay, _EventEmitter); + + function Autoplay(slider) { + _classCallCheck(this, Autoplay); + + var _this = _possibleConstructorReturn(this, (Autoplay.__proto__ || Object.getPrototypeOf(Autoplay)).call(this)); + + _this.slider = slider; + + _this.onVisibilityChange = _this.onVisibilityChange.bind(_this); + _this.onMouseEnter = _this.onMouseEnter.bind(_this); + _this.onMouseLeave = _this.onMouseLeave.bind(_this); + return _this; + } + + _createClass(Autoplay, [{ + key: 'init', + value: function init() { + this._bindEvents(); + return this; + } + }, { + key: '_bindEvents', + value: function _bindEvents() { + document.addEventListener('visibilitychange', this.onVisibilityChange); + if (this.slider.options.pauseOnHover) { + this.slider.container.addEventListener(__WEBPACK_IMPORTED_MODULE_1__utils_device__["a" /* pointerEnter */], this.onMouseEnter); + this.slider.container.addEventListener(__WEBPACK_IMPORTED_MODULE_1__utils_device__["b" /* pointerLeave */], this.onMouseLeave); + } + } + }, { + key: '_unbindEvents', + value: function _unbindEvents() { + document.removeEventListener('visibilitychange', this.onVisibilityChange); + this.slider.container.removeEventListener(__WEBPACK_IMPORTED_MODULE_1__utils_device__["a" /* pointerEnter */], this.onMouseEnter); + this.slider.container.removeEventListener(__WEBPACK_IMPORTED_MODULE_1__utils_device__["b" /* pointerLeave */], this.onMouseLeave); + } + }, { + key: 'start', + value: function start() { + var _this2 = this; + + this.stop(); + if (this.slider.options.autoplay) { + this.emit('start', this); + this._interval = setInterval(function () { + if (!(_this2._hovering && _this2.slider.options.pauseOnHover)) { + if (!_this2.slider.options.centerMode && _this2.slider.state.next >= _this2.slider.state.length - _this2.slider.slidesToShow && !_this2.slider.options.loop && !_this2.slider.options.infinite) { + _this2.stop(); + } else { + _this2.slider.next(); + } + } + }, this.slider.options.autoplaySpeed); + } + } + }, { + key: 'stop', + value: function stop() { + this._interval = clearInterval(this._interval); + this.emit('stop', this); + } + }, { + key: 'pause', + value: function pause() { + var _this3 = this; + + var speed = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0; + + if (this.paused) { + return; + } + if (this.timer) { + this.stop(); + } + this.paused = true; + if (speed === 0) { + this.paused = false; + this.start(); + } else { + this.slider.on('transition:end', function () { + if (!_this3) { + return; + } + _this3.paused = false; + if (!_this3.run) { + _this3.stop(); + } else { + _this3.start(); + } + }); + } + } + }, { + key: 'onVisibilityChange', + value: function onVisibilityChange(e) { + if (document.hidden) { + this.stop(); + } else { + this.start(); + } + } + }, { + key: 'onMouseEnter', + value: function onMouseEnter(e) { + this._hovering = true; + if (this.slider.options.pauseOnHover) { + this.pause(); + } + } + }, { + key: 'onMouseLeave', + value: function onMouseLeave(e) { + this._hovering = false; + if (this.slider.options.pauseOnHover) { + this.pause(); + } + } + }]); + + return Autoplay; +}(__WEBPACK_IMPORTED_MODULE_0__utils_eventEmitter__["a" /* default */]); + +/* harmony default export */ __webpack_exports__["a"] = (Autoplay); + +/***/ }), +/* 8 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* unused harmony export isIE */ +/* unused harmony export isIETouch */ +/* unused harmony export isAndroid */ +/* unused harmony export isiPad */ +/* unused harmony export isiPod */ +/* unused harmony export isiPhone */ +/* unused harmony export isSafari */ +/* unused harmony export isUiWebView */ +/* unused harmony export supportsTouchEvents */ +/* unused harmony export supportsPointerEvents */ +/* unused harmony export supportsTouch */ +/* unused harmony export pointerDown */ +/* unused harmony export pointerMove */ +/* unused harmony export pointerUp */ +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "a", function() { return pointerEnter; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "b", function() { return pointerLeave; }); +var isIE = window.navigator.pointerEnabled || window.navigator.msPointerEnabled; +var isIETouch = window.navigator.msPointerEnabled && window.navigator.msMaxTouchPoints > 1 || window.navigator.pointerEnabled && window.navigator.maxTouchPoints > 1; +var isAndroid = navigator.userAgent.match(/(Android);?[\s\/]+([\d.]+)?/); +var isiPad = navigator.userAgent.match(/(iPad).*OS\s([\d_]+)/); +var isiPod = navigator.userAgent.match(/(iPod)(.*OS\s([\d_]+))?/); +var isiPhone = !navigator.userAgent.match(/(iPad).*OS\s([\d_]+)/) && navigator.userAgent.match(/(iPhone\sOS)\s([\d_]+)/); +var isSafari = navigator.userAgent.toLowerCase().indexOf('safari') >= 0 && navigator.userAgent.toLowerCase().indexOf('chrome') < 0 && navigator.userAgent.toLowerCase().indexOf('android') < 0; +var isUiWebView = /(iPhone|iPod|iPad).*AppleWebKit(?!.*Safari)/i.test(navigator.userAgent); + +var supportsTouchEvents = !!('ontouchstart' in window); +var supportsPointerEvents = !!('PointerEvent' in window); +var supportsTouch = supportsTouchEvents || window.DocumentTouch && document instanceof DocumentTouch || navigator.maxTouchPoints; // IE >=11 +var pointerDown = !supportsTouch ? 'mousedown' : 'mousedown ' + (supportsTouchEvents ? 'touchstart' : 'pointerdown'); +var pointerMove = !supportsTouch ? 'mousemove' : 'mousemove ' + (supportsTouchEvents ? 'touchmove' : 'pointermove'); +var pointerUp = !supportsTouch ? 'mouseup' : 'mouseup ' + (supportsTouchEvents ? 'touchend' : 'pointerup'); +var pointerEnter = supportsTouch && supportsPointerEvents ? 'pointerenter' : 'mouseenter'; +var pointerLeave = supportsTouch && supportsPointerEvents ? 'pointerleave' : 'mouseleave'; + +/***/ }), +/* 9 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +var onResize = Symbol('onResize'); + +var Breakpoints = function () { + function Breakpoints(slider) { + _classCallCheck(this, Breakpoints); + + this.slider = slider; + this.options = slider.options; + + this[onResize] = this[onResize].bind(this); + + this._bindEvents(); + } + + _createClass(Breakpoints, [{ + key: 'init', + value: function init() { + this._defaultBreakpoint = { + slidesToShow: this.options.slidesToShow, + slidesToScroll: this.options.slidesToScroll + }; + this.options.breakpoints.sort(function (a, b) { + return parseInt(a.changePoint, 10) > parseInt(b.changePoint, 10); + }); + this._currentBreakpoint = this._getActiveBreakpoint(); + + return this; + } + }, { + key: 'destroy', + value: function destroy() { + this._unbindEvents(); + } + }, { + key: '_bindEvents', + value: function _bindEvents() { + window.addEventListener('resize', this[onResize]); + window.addEventListener('orientationchange', this[onResize]); + } + }, { + key: '_unbindEvents', + value: function _unbindEvents() { + window.removeEventListener('resize', this[onResize]); + window.removeEventListener('orientationchange', this[onResize]); + } + }, { + key: '_getActiveBreakpoint', + value: function _getActiveBreakpoint() { + //Get breakpoint for window width + var _iteratorNormalCompletion = true; + var _didIteratorError = false; + var _iteratorError = undefined; + + try { + for (var _iterator = this.options.breakpoints[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { + var point = _step.value; + + if (point.changePoint >= window.innerWidth) { + return point; + } + } + } catch (err) { + _didIteratorError = true; + _iteratorError = err; + } finally { + try { + if (!_iteratorNormalCompletion && _iterator.return) { + _iterator.return(); + } + } finally { + if (_didIteratorError) { + throw _iteratorError; + } + } + } + + return this._defaultBreakpoint; + } + }, { + key: 'getSlidesToShow', + value: function getSlidesToShow() { + return this._currentBreakpoint ? this._currentBreakpoint.slidesToShow : this._defaultBreakpoint.slidesToShow; + } + }, { + key: 'getSlidesToScroll', + value: function getSlidesToScroll() { + return this._currentBreakpoint ? this._currentBreakpoint.slidesToScroll : this._defaultBreakpoint.slidesToScroll; + } + }, { + key: 'apply', + value: function apply() { + if (this.slider.state.index >= this.slider.state.length && this.slider.state.index !== 0) { + this.slider.state.index = this.slider.state.index - this._currentBreakpoint.slidesToScroll; + } + if (this.slider.state.length <= this._currentBreakpoint.slidesToShow) { + this.slider.state.index = 0; + } + + if (this.options.loop) { + this.slider._loop.init().apply(); + } + + if (this.options.infinite) { + this.slider._infinite.init().apply(); + } + + this.slider._setDimensions(); + this.slider._transitioner.init().apply(true, this.slider._setHeight.bind(this.slider)); + this.slider._setClasses(); + + this.slider._navigation.refresh(); + this.slider._pagination.refresh(); + } + }, { + key: onResize, + value: function value(e) { + var newBreakPoint = this._getActiveBreakpoint(); + if (newBreakPoint.slidesToShow !== this._currentBreakpoint.slidesToShow) { + this._currentBreakpoint = newBreakPoint; + this.apply(); + } + } + }]); + + return Breakpoints; +}(); + +/* harmony default export */ __webpack_exports__["a"] = (Breakpoints); + +/***/ }), +/* 10 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } } + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +var Infinite = function () { + function Infinite(slider) { + _classCallCheck(this, Infinite); + + this.slider = slider; + } + + _createClass(Infinite, [{ + key: 'init', + value: function init() { + if (this.slider.options.infinite && this.slider.options.effect === 'translate') { + if (this.slider.options.centerMode) { + this._infiniteCount = Math.ceil(this.slider.slidesToShow + this.slider.slidesToShow / 2); + } else { + this._infiniteCount = this.slider.slidesToShow; + } + + var frontClones = []; + var slideIndex = 0; + for (var i = this.slider.state.length; i > this.slider.state.length - 1 - this._infiniteCount; i -= 1) { + slideIndex = i - 1; + frontClones.unshift(this._cloneSlide(this.slider.slides[slideIndex], slideIndex - this.slider.state.length)); + } + + var backClones = []; + for (var _i = 0; _i < this._infiniteCount + this.slider.state.length; _i += 1) { + backClones.push(this._cloneSlide(this.slider.slides[_i % this.slider.state.length], _i + this.slider.state.length)); + } + + this.slider.slides = [].concat(frontClones, _toConsumableArray(this.slider.slides), backClones); + } + return this; + } + }, { + key: 'apply', + value: function apply() {} + }, { + key: 'onTransitionEnd', + value: function onTransitionEnd(e) { + if (this.slider.options.infinite) { + if (this.slider.state.next >= this.slider.state.length) { + this.slider.state.index = this.slider.state.next = this.slider.state.next - this.slider.state.length; + this.slider.transitioner.apply(true); + } else if (this.slider.state.next < 0) { + this.slider.state.index = this.slider.state.next = this.slider.state.length + this.slider.state.next; + this.slider.transitioner.apply(true); + } + } + } + }, { + key: '_cloneSlide', + value: function _cloneSlide(slide, index) { + var newSlide = slide.cloneNode(true); + newSlide.dataset.sliderIndex = index; + newSlide.dataset.cloned = true; + var ids = newSlide.querySelectorAll('[id]') || []; + ids.forEach(function (id) { + id.setAttribute('id', ''); + }); + return newSlide; + } + }]); + + return Infinite; +}(); + +/* harmony default export */ __webpack_exports__["a"] = (Infinite); + +/***/ }), +/* 11 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__utils_dom__ = __webpack_require__(12); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + + + +var Loop = function () { + function Loop(slider) { + _classCallCheck(this, Loop); + + this.slider = slider; + } + + _createClass(Loop, [{ + key: "init", + value: function init() { + return this; + } + }, { + key: "apply", + value: function apply() { + if (this.slider.options.loop) { + if (this.slider.state.next > 0) { + if (this.slider.state.next < this.slider.state.length) { + if (this.slider.state.next > this.slider.state.length - this.slider.slidesToShow && Object(__WEBPACK_IMPORTED_MODULE_0__utils_dom__["a" /* isInViewport */])(this.slider._slides[this.slider.state.length - 1], this.slider.wrapper)) { + this.slider.state.next = 0; + } else { + this.slider.state.next = Math.min(Math.max(this.slider.state.next, 0), this.slider.state.length - this.slider.slidesToShow); + } + } else { + this.slider.state.next = 0; + } + } else { + if (this.slider.state.next <= 0 - this.slider.slidesToScroll) { + this.slider.state.next = this.slider.state.length - this.slider.slidesToShow; + } else { + this.slider.state.next = 0; + } + } + } + } + }]); + + return Loop; +}(); + +/* harmony default export */ __webpack_exports__["a"] = (Loop); + +/***/ }), +/* 12 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "a", function() { return isInViewport; }); +var isInViewport = function isInViewport(element, html) { + var rect = element.getBoundingClientRect(); + html = html || document.documentElement; + return rect.top >= 0 && rect.left >= 0 && rect.bottom <= (window.innerHeight || html.clientHeight) && rect.right <= (window.innerWidth || html.clientWidth); +}; + +/***/ }), +/* 13 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__templates_navigation__ = __webpack_require__(14); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__utils_detect_supportsPassive__ = __webpack_require__(1); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + + + + +var Navigation = function () { + function Navigation(slider) { + _classCallCheck(this, Navigation); + + this.slider = slider; + + this._clickEvents = ['click', 'touch']; + this._supportsPassive = Object(__WEBPACK_IMPORTED_MODULE_1__utils_detect_supportsPassive__["a" /* default */])(); + + this.onPreviousClick = this.onPreviousClick.bind(this); + this.onNextClick = this.onNextClick.bind(this); + this.onKeyUp = this.onKeyUp.bind(this); + } + + _createClass(Navigation, [{ + key: 'init', + value: function init() { + this.node = document.createRange().createContextualFragment(Object(__WEBPACK_IMPORTED_MODULE_0__templates_navigation__["a" /* default */])(this.slider.options.icons)); + this._ui = { + previous: this.node.querySelector('.slider-navigation-previous'), + next: this.node.querySelector('.slider-navigation-next') + }; + + this._unbindEvents(); + this._bindEvents(); + + this.refresh(); + + return this; + } + }, { + key: 'destroy', + value: function destroy() { + this._unbindEvents(); + } + }, { + key: '_bindEvents', + value: function _bindEvents() { + var _this = this; + + this.slider.wrapper.addEventListener('keyup', this.onKeyUp); + this._clickEvents.forEach(function (clickEvent) { + _this._ui.previous.addEventListener(clickEvent, _this.onPreviousClick); + _this._ui.next.addEventListener(clickEvent, _this.onNextClick); + }); + } + }, { + key: '_unbindEvents', + value: function _unbindEvents() { + var _this2 = this; + + this.slider.wrapper.removeEventListener('keyup', this.onKeyUp); + this._clickEvents.forEach(function (clickEvent) { + _this2._ui.previous.removeEventListener(clickEvent, _this2.onPreviousClick); + _this2._ui.next.removeEventListener(clickEvent, _this2.onNextClick); + }); + } + }, { + key: 'onNextClick', + value: function onNextClick(e) { + if (!this._supportsPassive) { + e.preventDefault(); + } + + if (this.slider.options.navigation) { + this.slider.next(); + } + } + }, { + key: 'onPreviousClick', + value: function onPreviousClick(e) { + if (!this._supportsPassive) { + e.preventDefault(); + } + + if (this.slider.options.navigation) { + this.slider.previous(); + } + } + }, { + key: 'onKeyUp', + value: function onKeyUp(e) { + if (this.slider.options.keyNavigation) { + if (e.key === 'ArrowRight' || e.key === 'Right') { + this.slider.next(); + } else if (e.key === 'ArrowLeft' || e.key === 'Left') { + this.slider.previous(); + } + } + } + }, { + key: 'refresh', + value: function refresh() { + // let centerOffset = Math.floor(this.options.slidesToShow / 2); + if (!this.slider.options.loop && !this.slider.options.infinite) { + if (this.slider.options.navigation && this.slider.state.length > this.slider.slidesToShow) { + this._ui.previous.classList.remove('is-hidden'); + this._ui.next.classList.remove('is-hidden'); + if (this.slider.state.next === 0) { + this._ui.previous.classList.add('is-hidden'); + this._ui.next.classList.remove('is-hidden'); + } else if (this.slider.state.next >= this.slider.state.length - this.slider.slidesToShow && !this.slider.options.centerMode) { + this._ui.previous.classList.remove('is-hidden'); + this._ui.next.classList.add('is-hidden'); + } else if (this.slider.state.next >= this.slider.state.length - 1 && this.slider.options.centerMode) { + this._ui.previous.classList.remove('is-hidden'); + this._ui.next.classList.add('is-hidden'); + } + } else { + this._ui.previous.classList.add('is-hidden'); + this._ui.next.classList.add('is-hidden'); + } + } + } + }, { + key: 'render', + value: function render() { + return this.node; + } + }]); + + return Navigation; +}(); + +/* harmony default export */ __webpack_exports__["a"] = (Navigation); + +/***/ }), +/* 14 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony default export */ __webpack_exports__["a"] = (function (icons) { + return "
" + icons.previous + "
\n
" + icons.next + "
"; +}); + +/***/ }), +/* 15 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__templates_pagination__ = __webpack_require__(16); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__templates_pagination_page__ = __webpack_require__(17); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_2__utils_detect_supportsPassive__ = __webpack_require__(1); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + + + + + +var Pagination = function () { + function Pagination(slider) { + _classCallCheck(this, Pagination); + + this.slider = slider; + + this._clickEvents = ['click', 'touch']; + this._supportsPassive = Object(__WEBPACK_IMPORTED_MODULE_2__utils_detect_supportsPassive__["a" /* default */])(); + + this.onPageClick = this.onPageClick.bind(this); + this.onResize = this.onResize.bind(this); + } + + _createClass(Pagination, [{ + key: 'init', + value: function init() { + this._pages = []; + this.node = document.createRange().createContextualFragment(Object(__WEBPACK_IMPORTED_MODULE_0__templates_pagination__["a" /* default */])()); + this._ui = { + container: this.node.firstChild + }; + + this._count = Math.ceil((this.slider.state.length - this.slider.slidesToShow) / this.slider.slidesToScroll); + + this._draw(); + this.refresh(); + + return this; + } + }, { + key: 'destroy', + value: function destroy() { + this._unbindEvents(); + } + }, { + key: '_bindEvents', + value: function _bindEvents() { + var _this = this; + + window.addEventListener('resize', this.onResize); + window.addEventListener('orientationchange', this.onResize); + + this._clickEvents.forEach(function (clickEvent) { + _this._pages.forEach(function (page) { + return page.addEventListener(clickEvent, _this.onPageClick); + }); + }); + } + }, { + key: '_unbindEvents', + value: function _unbindEvents() { + var _this2 = this; + + window.removeEventListener('resize', this.onResize); + window.removeEventListener('orientationchange', this.onResize); + + this._clickEvents.forEach(function (clickEvent) { + _this2._pages.forEach(function (page) { + return page.removeEventListener(clickEvent, _this2.onPageClick); + }); + }); + } + }, { + key: '_draw', + value: function _draw() { + this._ui.container.innerHTML = ''; + if (this.slider.options.pagination && this.slider.state.length > this.slider.slidesToShow) { + for (var i = 0; i <= this._count; i++) { + var newPageNode = document.createRange().createContextualFragment(Object(__WEBPACK_IMPORTED_MODULE_1__templates_pagination_page__["a" /* default */])()).firstChild; + newPageNode.dataset.index = i * this.slider.slidesToScroll; + this._pages.push(newPageNode); + this._ui.container.appendChild(newPageNode); + } + this._bindEvents(); + } + } + }, { + key: 'onPageClick', + value: function onPageClick(e) { + if (!this._supportsPassive) { + e.preventDefault(); + } + + this.slider.state.next = e.currentTarget.dataset.index; + this.slider.show(); + } + }, { + key: 'onResize', + value: function onResize() { + this._draw(); + } + }, { + key: 'refresh', + value: function refresh() { + var _this3 = this; + + var newCount = void 0; + + if (this.slider.options.infinite) { + newCount = Math.ceil(this.slider.state.length - 1 / this.slider.slidesToScroll); + } else { + newCount = Math.ceil((this.slider.state.length - this.slider.slidesToShow) / this.slider.slidesToScroll); + } + if (newCount !== this._count) { + this._count = newCount; + this._draw(); + } + + this._pages.forEach(function (page) { + page.classList.remove('is-active'); + if (parseInt(page.dataset.index, 10) === _this3.slider.state.next % _this3.slider.state.length) { + page.classList.add('is-active'); + } + }); + } + }, { + key: 'render', + value: function render() { + return this.node; + } + }]); + + return Pagination; +}(); + +/* harmony default export */ __webpack_exports__["a"] = (Pagination); + +/***/ }), +/* 16 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony default export */ __webpack_exports__["a"] = (function () { + return "
"; +}); + +/***/ }), +/* 17 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony default export */ __webpack_exports__["a"] = (function () { + return "
"; +}); + +/***/ }), +/* 18 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__utils_coordinate__ = __webpack_require__(4); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__utils_detect_supportsPassive__ = __webpack_require__(1); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + + + + +var Swipe = function () { + function Swipe(slider) { + _classCallCheck(this, Swipe); + + this.slider = slider; + + this._supportsPassive = Object(__WEBPACK_IMPORTED_MODULE_1__utils_detect_supportsPassive__["a" /* default */])(); + + this.onStartDrag = this.onStartDrag.bind(this); + this.onMoveDrag = this.onMoveDrag.bind(this); + this.onStopDrag = this.onStopDrag.bind(this); + + this._init(); + } + + _createClass(Swipe, [{ + key: '_init', + value: function _init() {} + }, { + key: 'bindEvents', + value: function bindEvents() { + var _this = this; + + this.slider.container.addEventListener('dragstart', function (e) { + if (!_this._supportsPassive) { + e.preventDefault(); + } + }); + this.slider.container.addEventListener('mousedown', this.onStartDrag); + this.slider.container.addEventListener('touchstart', this.onStartDrag); + + window.addEventListener('mousemove', this.onMoveDrag); + window.addEventListener('touchmove', this.onMoveDrag); + + window.addEventListener('mouseup', this.onStopDrag); + window.addEventListener('touchend', this.onStopDrag); + window.addEventListener('touchcancel', this.onStopDrag); + } + }, { + key: 'unbindEvents', + value: function unbindEvents() { + var _this2 = this; + + this.slider.container.removeEventListener('dragstart', function (e) { + if (!_this2._supportsPassive) { + e.preventDefault(); + } + }); + this.slider.container.removeEventListener('mousedown', this.onStartDrag); + this.slider.container.removeEventListener('touchstart', this.onStartDrag); + + window.removeEventListener('mousemove', this.onMoveDrag); + window.removeEventListener('touchmove', this.onMoveDrag); + + window.removeEventListener('mouseup', this.onStopDrag); + window.removeEventListener('mouseup', this.onStopDrag); + window.removeEventListener('touchcancel', this.onStopDrag); + } + + /** + * @param {MouseEvent|TouchEvent} + */ + + }, { + key: 'onStartDrag', + value: function onStartDrag(e) { + if (e.touches) { + if (e.touches.length > 1) { + return; + } else { + e = e.touches[0]; + } + } + + this._origin = new __WEBPACK_IMPORTED_MODULE_0__utils_coordinate__["a" /* default */](e.screenX, e.screenY); + this.width = this.slider.wrapperWidth; + this.slider.transitioner.disable(); + } + + /** + * @param {MouseEvent|TouchEvent} + */ + + }, { + key: 'onMoveDrag', + value: function onMoveDrag(e) { + if (this._origin) { + var point = e.touches ? e.touches[0] : e; + this._lastTranslate = new __WEBPACK_IMPORTED_MODULE_0__utils_coordinate__["a" /* default */](point.screenX - this._origin.x, point.screenY - this._origin.y); + if (e.touches) { + if (Math.abs(this._lastTranslate.x) > Math.abs(this._lastTranslate.y)) { + if (!this._supportsPassive) { + e.preventDefault(); + } + e.stopPropagation(); + } + } + } + } + + /** + * @param {MouseEvent|TouchEvent} + */ + + }, { + key: 'onStopDrag', + value: function onStopDrag(e) { + if (this._origin && this._lastTranslate) { + if (Math.abs(this._lastTranslate.x) > 0.2 * this.width) { + if (this._lastTranslate.x < 0) { + this.slider.next(); + } else { + this.slider.previous(); + } + } else { + this.slider.show(true); + } + } + this._origin = null; + this._lastTranslate = null; + } + }]); + + return Swipe; +}(); + +/* harmony default export */ __webpack_exports__["a"] = (Swipe); + +/***/ }), +/* 19 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__transitions_fade__ = __webpack_require__(20); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__transitions_translate__ = __webpack_require__(21); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + + + + +var Transitioner = function () { + function Transitioner(slider) { + _classCallCheck(this, Transitioner); + + this.slider = slider; + this.options = slider.options; + + this._animating = false; + this._animation = undefined; + + this._translate = new __WEBPACK_IMPORTED_MODULE_1__transitions_translate__["a" /* default */](this, slider, slider.options); + this._fade = new __WEBPACK_IMPORTED_MODULE_0__transitions_fade__["a" /* default */](this, slider, slider.options); + } + + _createClass(Transitioner, [{ + key: 'init', + value: function init() { + this._fade.init(); + this._translate.init(); + return this; + } + }, { + key: 'isAnimating', + value: function isAnimating() { + return this._animating; + } + }, { + key: 'enable', + value: function enable() { + this._animation && this._animation.enable(); + } + }, { + key: 'disable', + value: function disable() { + this._animation && this._animation.disable(); + } + }, { + key: 'apply', + value: function apply(force, callback) { + // If we don't force refresh and animation in progress then return + if (this._animating && !force) { + return; + } + + switch (this.options.effect) { + case 'fade': + this._animation = this._fade; + break; + case 'translate': + default: + this._animation = this._translate; + break; + } + + this._animationCallback = callback; + + if (force) { + this._animation && this._animation.disable(); + } else { + this._animation && this._animation.enable(); + this._animating = true; + } + + this._animation && this._animation.apply(); + + if (force) { + this.end(); + } + } + }, { + key: 'end', + value: function end() { + this._animating = false; + this._animation = undefined; + this.slider.state.index = this.slider.state.next; + if (this._animationCallback) { + this._animationCallback(); + } + } + }]); + + return Transitioner; +}(); + +/* harmony default export */ __webpack_exports__["a"] = (Transitioner); + +/***/ }), +/* 20 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__utils_css__ = __webpack_require__(0); +var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; + +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + + + +var Fade = function () { + function Fade(transitioner, slider) { + var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}; + + _classCallCheck(this, Fade); + + this.transitioner = transitioner; + this.slider = slider; + this.options = _extends({}, options); + } + + _createClass(Fade, [{ + key: 'init', + value: function init() { + var _this = this; + + if (this.options.effect === 'fade') { + this.slider.slides.forEach(function (slide, index) { + Object(__WEBPACK_IMPORTED_MODULE_0__utils_css__["a" /* css */])(slide, { + position: 'absolute', + left: 0, + top: 0, + bottom: 0, + 'z-index': slide.dataset.sliderIndex == _this.slider.state.index ? 0 : -2, + opacity: slide.dataset.sliderIndex == _this.slider.state.index ? 1 : 0 + }); + }); + } + return this; + } + }, { + key: 'enable', + value: function enable() { + var _this2 = this; + + this._oldSlide = this.slider.slides.filter(function (slide) { + return slide.dataset.sliderIndex == _this2.slider.state.index; + })[0]; + this._newSlide = this.slider.slides.filter(function (slide) { + return slide.dataset.sliderIndex == _this2.slider.state.next; + })[0]; + if (this._newSlide) { + this._newSlide.addEventListener('transitionend', this.onTransitionEnd.bind(this)); + this._newSlide.style.transition = this.options.duration + 'ms ' + this.options.timing; + if (this._oldSlide) { + this._oldSlide.addEventListener('transitionend', this.onTransitionEnd.bind(this)); + this._oldSlide.style.transition = this.options.duration + 'ms ' + this.options.timing; + } + } + } + }, { + key: 'disable', + value: function disable() { + var _this3 = this; + + this._oldSlide = this.slider.slides.filter(function (slide) { + return slide.dataset.sliderIndex == _this3.slider.state.index; + })[0]; + this._newSlide = this.slider.slides.filter(function (slide) { + return slide.dataset.sliderIndex == _this3.slider.state.next; + })[0]; + if (this._newSlide) { + this._newSlide.removeEventListener('transitionend', this.onTransitionEnd.bind(this)); + this._newSlide.style.transition = 'none'; + if (this._oldSlide) { + this._oldSlide.removeEventListener('transitionend', this.onTransitionEnd.bind(this)); + this._oldSlide.style.transition = 'none'; + } + } + } + }, { + key: 'apply', + value: function apply(force) { + var _this4 = this; + + this._oldSlide = this.slider.slides.filter(function (slide) { + return slide.dataset.sliderIndex == _this4.slider.state.index; + })[0]; + this._newSlide = this.slider.slides.filter(function (slide) { + return slide.dataset.sliderIndex == _this4.slider.state.next; + })[0]; + + if (this._oldSlide && this._newSlide) { + Object(__WEBPACK_IMPORTED_MODULE_0__utils_css__["a" /* css */])(this._oldSlide, { + opacity: 0 + }); + Object(__WEBPACK_IMPORTED_MODULE_0__utils_css__["a" /* css */])(this._newSlide, { + opacity: 1, + 'z-index': force ? 0 : -1 + }); + } + } + }, { + key: 'onTransitionEnd', + value: function onTransitionEnd(e) { + if (this.options.effect === 'fade') { + if (this.transitioner.isAnimating() && e.target == this._newSlide) { + if (this._newSlide) { + Object(__WEBPACK_IMPORTED_MODULE_0__utils_css__["a" /* css */])(this._newSlide, { + 'z-index': 0 + }); + this._newSlide.removeEventListener('transitionend', this.onTransitionEnd.bind(this)); + } + if (this._oldSlide) { + Object(__WEBPACK_IMPORTED_MODULE_0__utils_css__["a" /* css */])(this._oldSlide, { + 'z-index': -2 + }); + this._oldSlide.removeEventListener('transitionend', this.onTransitionEnd.bind(this)); + } + } + this.transitioner.end(); + } + } + }]); + + return Fade; +}(); + +/* harmony default export */ __webpack_exports__["a"] = (Fade); + +/***/ }), +/* 21 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__utils_coordinate__ = __webpack_require__(4); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_1__utils_css__ = __webpack_require__(0); +var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; + +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + + + + +var Translate = function () { + function Translate(transitioner, slider) { + var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}; + + _classCallCheck(this, Translate); + + this.transitioner = transitioner; + this.slider = slider; + this.options = _extends({}, options); + + this.onTransitionEnd = this.onTransitionEnd.bind(this); + } + + _createClass(Translate, [{ + key: 'init', + value: function init() { + this._position = new __WEBPACK_IMPORTED_MODULE_0__utils_coordinate__["a" /* default */](this.slider.container.offsetLeft, this.slider.container.offsetTop); + this._bindEvents(); + return this; + } + }, { + key: 'destroy', + value: function destroy() { + this._unbindEvents(); + } + }, { + key: '_bindEvents', + value: function _bindEvents() { + this.slider.container.addEventListener('transitionend', this.onTransitionEnd); + } + }, { + key: '_unbindEvents', + value: function _unbindEvents() { + this.slider.container.removeEventListener('transitionend', this.onTransitionEnd); + } + }, { + key: 'enable', + value: function enable() { + this.slider.container.style.transition = this.options.duration + 'ms ' + this.options.timing; + } + }, { + key: 'disable', + value: function disable() { + this.slider.container.style.transition = 'none'; + } + }, { + key: 'apply', + value: function apply() { + var _this = this; + + var maxOffset = void 0; + if (this.options.effect === 'translate') { + var slide = this.slider.slides.filter(function (slide) { + return slide.dataset.sliderIndex == _this.slider.state.next; + })[0]; + var slideOffset = new __WEBPACK_IMPORTED_MODULE_0__utils_coordinate__["a" /* default */](slide.offsetLeft, slide.offsetTop); + if (this.options.centerMode) { + maxOffset = new __WEBPACK_IMPORTED_MODULE_0__utils_coordinate__["a" /* default */](Math.round(Object(__WEBPACK_IMPORTED_MODULE_1__utils_css__["e" /* width */])(this.slider.container)), Math.round(Object(__WEBPACK_IMPORTED_MODULE_1__utils_css__["b" /* height */])(this.slider.container))); + } else { + maxOffset = new __WEBPACK_IMPORTED_MODULE_0__utils_coordinate__["a" /* default */](Math.round(Object(__WEBPACK_IMPORTED_MODULE_1__utils_css__["e" /* width */])(this.slider.container) - Object(__WEBPACK_IMPORTED_MODULE_1__utils_css__["e" /* width */])(this.slider.wrapper)), Math.round(Object(__WEBPACK_IMPORTED_MODULE_1__utils_css__["b" /* height */])(this.slider.container) - Object(__WEBPACK_IMPORTED_MODULE_1__utils_css__["b" /* height */])(this.slider.wrapper))); + } + var nextOffset = new __WEBPACK_IMPORTED_MODULE_0__utils_coordinate__["a" /* default */](Math.min(Math.max(slideOffset.x * -1, maxOffset.x * -1), 0), Math.min(Math.max(slideOffset.y * -1, maxOffset.y * -1), 0)); + if (this.options.loop) { + if (!this.options.vertical && Math.abs(this._position.x) > maxOffset.x) { + nextOffset.x = 0; + this.slider.state.next = 0; + } else if (this.options.vertical && Math.abs(this._position.y) > maxOffset.y) { + nextOffset.y = 0; + this.slider.state.next = 0; + } + } + + this._position.x = nextOffset.x; + this._position.y = nextOffset.y; + if (this.options.centerMode) { + this._position.x = this._position.x + this.slider.wrapperWidth / 2 - Object(__WEBPACK_IMPORTED_MODULE_1__utils_css__["e" /* width */])(slide) / 2; + } + + if (this.slider.direction === 'rtl') { + this._position.x = -this._position.x; + this._position.y = -this._position.y; + } + this.slider.container.style.transform = 'translate3d(' + this._position.x + 'px, ' + this._position.y + 'px, 0)'; + + /** + * update the index with the nextIndex only if + * the offset of the nextIndex is in the range of the maxOffset + */ + if (slideOffset.x > maxOffset.x) { + this.slider.transitioner.end(); + } + } + } + }, { + key: 'onTransitionEnd', + value: function onTransitionEnd(e) { + if (this.options.effect === 'translate') { + + if (this.transitioner.isAnimating() && e.target == this.slider.container) { + if (this.options.infinite) { + this.slider._infinite.onTransitionEnd(e); + } + } + this.transitioner.end(); + } + } + }]); + + return Translate; +}(); + +/* harmony default export */ __webpack_exports__["a"] = (Translate); + +/***/ }), +/* 22 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +var defaultOptions = { + initialSlide: 0, + slidesToScroll: 1, + slidesToShow: 1, + + navigation: true, + navigationKeys: true, + navigationSwipe: true, + + pagination: true, + + loop: false, + infinite: false, + + effect: 'translate', + duration: 300, + timing: 'ease', + + autoplay: false, + autoplaySpeed: 3000, + pauseOnHover: true, + breakpoints: [{ + changePoint: 480, + slidesToShow: 1, + slidesToScroll: 1 + }, { + changePoint: 640, + slidesToShow: 2, + slidesToScroll: 2 + }, { + changePoint: 768, + slidesToShow: 3, + slidesToScroll: 3 + }], + + onReady: null, + icons: { + 'previous': '\n \n ', + 'next': '\n \n ' + } +}; + +/* harmony default export */ __webpack_exports__["a"] = (defaultOptions); + +/***/ }), +/* 23 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony default export */ __webpack_exports__["a"] = (function (id) { + return "
\n
\n
"; +}); + +/***/ }), +/* 24 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +/* harmony default export */ __webpack_exports__["a"] = (function () { + return "
"; +}); + +/***/ }) +/******/ ])["default"]; +}); \ No newline at end of file diff --git a/docs/static/js/bulma-carousel.min.js b/docs/static/js/bulma-carousel.min.js new file mode 100644 index 0000000..5fff069 --- /dev/null +++ b/docs/static/js/bulma-carousel.min.js @@ -0,0 +1 @@ +!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.bulmaCarousel=e():t.bulmaCarousel=e()}("undefined"!=typeof self?self:this,function(){return function(i){var n={};function s(t){if(n[t])return n[t].exports;var e=n[t]={i:t,l:!1,exports:{}};return i[t].call(e.exports,e,e.exports,s),e.l=!0,e.exports}return s.m=i,s.c=n,s.d=function(t,e,i){s.o(t,e)||Object.defineProperty(t,e,{configurable:!1,enumerable:!0,get:i})},s.n=function(t){var e=t&&t.__esModule?function(){return t.default}:function(){return t};return s.d(e,"a",e),e},s.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},s.p="",s(s.s=5)}([function(t,e,i){"use strict";i.d(e,"d",function(){return s}),i.d(e,"e",function(){return r}),i.d(e,"b",function(){return o}),i.d(e,"c",function(){return a}),i.d(e,"a",function(){return l});var n=i(2),s=function(e,t){(t=Array.isArray(t)?t:t.split(" ")).forEach(function(t){e.classList.remove(t)})},r=function(t){return t.getBoundingClientRect().width||t.offsetWidth},o=function(t){return t.getBoundingClientRect().height||t.offsetHeight},a=function(t){var e=1=t._x&&this._x<=e._x&&this._y>=t._y&&this._y<=e._y}},{key:"constrain",value:function(t,e){if(t._x>e._x||t._y>e._y)return this;var i=this._x,n=this._y;return null!==t._x&&(i=Math.max(i,t._x)),null!==e._x&&(i=Math.min(i,e._x)),null!==t._y&&(n=Math.max(n,t._y)),null!==e._y&&(n=Math.min(n,e._y)),new s(i,n)}},{key:"reposition",value:function(t){t.style.top=this._y+"px",t.style.left=this._x+"px"}},{key:"toString",value:function(){return"("+this._x+","+this._y+")"}},{key:"x",get:function(){return this._x},set:function(){var t=0this.state.length-this.slidesToShow&&!this.options.centerMode?this.state.next=this.state.index:this.state.next=this.state.index+this.slidesToScroll,this.show()}},{key:"previous",value:function(){this.options.loop||this.options.infinite||0!==this.state.index?this.state.next=this.state.index-this.slidesToScroll:this.state.next=this.state.index,this.show()}},{key:"start",value:function(){this._autoplay.start()}},{key:"pause",value:function(){this._autoplay.pause()}},{key:"stop",value:function(){this._autoplay.stop()}},{key:"show",value:function(t){var e=1this.options.slidesToShow&&(this.options.slidesToScroll=this.slidesToShow),this._breakpoint.init(),this.state.index>=this.state.length&&0!==this.state.index&&(this.state.index=this.state.index-this.slidesToScroll),this.state.length<=this.slidesToShow&&(this.state.index=0),this._ui.wrapper.appendChild(this._navigation.init().render()),this._ui.wrapper.appendChild(this._pagination.init().render()),this.options.navigationSwipe?this._swipe.bindEvents():this._swipe._bindEvents(),this._breakpoint.apply(),this._slides.forEach(function(t){return e._ui.container.appendChild(t)}),this._transitioner.init().apply(!0,this._setHeight.bind(this)),this.options.autoplay&&this._autoplay.init().start()}},{key:"destroy",value:function(){var e=this;this._unbindEvents(),this._items.forEach(function(t){e.element.appendChild(t)}),this.node.remove()}},{key:"id",get:function(){return this._id}},{key:"index",set:function(t){this._index=t},get:function(){return this._index}},{key:"length",set:function(t){this._length=t},get:function(){return this._length}},{key:"slides",get:function(){return this._slides},set:function(t){this._slides=t}},{key:"slidesToScroll",get:function(){return"translate"===this.options.effect?this._breakpoint.getSlidesToScroll():1}},{key:"slidesToShow",get:function(){return"translate"===this.options.effect?this._breakpoint.getSlidesToShow():1}},{key:"direction",get:function(){return"rtl"===this.element.dir.toLowerCase()||"rtl"===this.element.style.direction?"rtl":"ltr"}},{key:"wrapper",get:function(){return this._ui.wrapper}},{key:"wrapperWidth",get:function(){return this._wrapperWidth||0}},{key:"container",get:function(){return this._ui.container}},{key:"containerWidth",get:function(){return this._containerWidth||0}},{key:"slideWidth",get:function(){return this._slideWidth||0}},{key:"transitioner",get:function(){return this._transitioner}}],[{key:"attach",value:function(){var i=this,t=0>t/4).toString(16)})}},function(t,e,i){"use strict";var n=i(3),s=i(8),r=function(){function n(t,e){for(var i=0;i=t.slider.state.length-t.slider.slidesToShow&&!t.slider.options.loop&&!t.slider.options.infinite?t.stop():t.slider.next())},this.slider.options.autoplaySpeed))}},{key:"stop",value:function(){this._interval=clearInterval(this._interval),this.emit("stop",this)}},{key:"pause",value:function(){var t=this,e=0parseInt(e.changePoint,10)}),this._currentBreakpoint=this._getActiveBreakpoint(),this}},{key:"destroy",value:function(){this._unbindEvents()}},{key:"_bindEvents",value:function(){window.addEventListener("resize",this[s]),window.addEventListener("orientationchange",this[s])}},{key:"_unbindEvents",value:function(){window.removeEventListener("resize",this[s]),window.removeEventListener("orientationchange",this[s])}},{key:"_getActiveBreakpoint",value:function(){var t=!0,e=!1,i=void 0;try{for(var n,s=this.options.breakpoints[Symbol.iterator]();!(t=(n=s.next()).done);t=!0){var r=n.value;if(r.changePoint>=window.innerWidth)return r}}catch(t){e=!0,i=t}finally{try{!t&&s.return&&s.return()}finally{if(e)throw i}}return this._defaultBreakpoint}},{key:"getSlidesToShow",value:function(){return this._currentBreakpoint?this._currentBreakpoint.slidesToShow:this._defaultBreakpoint.slidesToShow}},{key:"getSlidesToScroll",value:function(){return this._currentBreakpoint?this._currentBreakpoint.slidesToScroll:this._defaultBreakpoint.slidesToScroll}},{key:"apply",value:function(){this.slider.state.index>=this.slider.state.length&&0!==this.slider.state.index&&(this.slider.state.index=this.slider.state.index-this._currentBreakpoint.slidesToScroll),this.slider.state.length<=this._currentBreakpoint.slidesToShow&&(this.slider.state.index=0),this.options.loop&&this.slider._loop.init().apply(),this.options.infinite&&this.slider._infinite.init().apply(),this.slider._setDimensions(),this.slider._transitioner.init().apply(!0,this.slider._setHeight.bind(this.slider)),this.slider._setClasses(),this.slider._navigation.refresh(),this.slider._pagination.refresh()}},{key:s,value:function(t){var e=this._getActiveBreakpoint();e.slidesToShow!==this._currentBreakpoint.slidesToShow&&(this._currentBreakpoint=e,this.apply())}}]),e}();e.a=r},function(t,e,i){"use strict";var n=function(){function n(t,e){for(var i=0;ithis.slider.state.length-1-this._infiniteCount;i-=1)e=i-1,t.unshift(this._cloneSlide(this.slider.slides[e],e-this.slider.state.length));for(var n=[],s=0;s=this.slider.state.length?(this.slider.state.index=this.slider.state.next=this.slider.state.next-this.slider.state.length,this.slider.transitioner.apply(!0)):this.slider.state.next<0&&(this.slider.state.index=this.slider.state.next=this.slider.state.length+this.slider.state.next,this.slider.transitioner.apply(!0)))}},{key:"_cloneSlide",value:function(t,e){var i=t.cloneNode(!0);return i.dataset.sliderIndex=e,i.dataset.cloned=!0,(i.querySelectorAll("[id]")||[]).forEach(function(t){t.setAttribute("id","")}),i}}]),e}();e.a=s},function(t,e,i){"use strict";var n=i(12),s=function(){function n(t,e){for(var i=0;ithis.slider.state.length-this.slider.slidesToShow&&Object(n.a)(this.slider._slides[this.slider.state.length-1],this.slider.wrapper)?this.slider.state.next=0:this.slider.state.next=Math.min(Math.max(this.slider.state.next,0),this.slider.state.length-this.slider.slidesToShow):this.slider.state.next=0:this.slider.state.next<=0-this.slider.slidesToScroll?this.slider.state.next=this.slider.state.length-this.slider.slidesToShow:this.slider.state.next=0)}}]),e}();e.a=r},function(t,e,i){"use strict";i.d(e,"a",function(){return n});var n=function(t,e){var i=t.getBoundingClientRect();return e=e||document.documentElement,0<=i.top&&0<=i.left&&i.bottom<=(window.innerHeight||e.clientHeight)&&i.right<=(window.innerWidth||e.clientWidth)}},function(t,e,i){"use strict";var n=i(14),s=i(1),r=function(){function n(t,e){for(var i=0;ithis.slider.slidesToShow?(this._ui.previous.classList.remove("is-hidden"),this._ui.next.classList.remove("is-hidden"),0===this.slider.state.next?(this._ui.previous.classList.add("is-hidden"),this._ui.next.classList.remove("is-hidden")):this.slider.state.next>=this.slider.state.length-this.slider.slidesToShow&&!this.slider.options.centerMode?(this._ui.previous.classList.remove("is-hidden"),this._ui.next.classList.add("is-hidden")):this.slider.state.next>=this.slider.state.length-1&&this.slider.options.centerMode&&(this._ui.previous.classList.remove("is-hidden"),this._ui.next.classList.add("is-hidden"))):(this._ui.previous.classList.add("is-hidden"),this._ui.next.classList.add("is-hidden")))}},{key:"render",value:function(){return this.node}}]),e}();e.a=o},function(t,e,i){"use strict";e.a=function(t){return'
'+t.previous+'
\n
'+t.next+"
"}},function(t,e,i){"use strict";var n=i(16),s=i(17),r=i(1),o=function(){function n(t,e){for(var i=0;ithis.slider.slidesToShow){for(var t=0;t<=this._count;t++){var e=document.createRange().createContextualFragment(Object(s.a)()).firstChild;e.dataset.index=t*this.slider.slidesToScroll,this._pages.push(e),this._ui.container.appendChild(e)}this._bindEvents()}}},{key:"onPageClick",value:function(t){this._supportsPassive||t.preventDefault(),this.slider.state.next=t.currentTarget.dataset.index,this.slider.show()}},{key:"onResize",value:function(){this._draw()}},{key:"refresh",value:function(){var e=this,t=void 0;(t=this.slider.options.infinite?Math.ceil(this.slider.state.length-1/this.slider.slidesToScroll):Math.ceil((this.slider.state.length-this.slider.slidesToShow)/this.slider.slidesToScroll))!==this._count&&(this._count=t,this._draw()),this._pages.forEach(function(t){t.classList.remove("is-active"),parseInt(t.dataset.index,10)===e.slider.state.next%e.slider.state.length&&t.classList.add("is-active")})}},{key:"render",value:function(){return this.node}}]),e}();e.a=a},function(t,e,i){"use strict";e.a=function(){return'
'}},function(t,e,i){"use strict";e.a=function(){return'
'}},function(t,e,i){"use strict";var n=i(4),s=i(1),r=function(){function n(t,e){for(var i=0;iMath.abs(this._lastTranslate.y)&&(this._supportsPassive||t.preventDefault(),t.stopPropagation())}}},{key:"onStopDrag",value:function(t){this._origin&&this._lastTranslate&&(Math.abs(this._lastTranslate.x)>.2*this.width?this._lastTranslate.x<0?this.slider.next():this.slider.previous():this.slider.show(!0)),this._origin=null,this._lastTranslate=null}}]),e}();e.a=o},function(t,e,i){"use strict";var n=i(20),s=i(21),r=function(){function n(t,e){for(var i=0;it.x?(s.x=0,this.slider.state.next=0):this.options.vertical&&Math.abs(this._position.y)>t.y&&(s.y=0,this.slider.state.next=0)),this._position.x=s.x,this._position.y=s.y,this.options.centerMode&&(this._position.x=this._position.x+this.slider.wrapperWidth/2-Object(o.e)(i)/2),"rtl"===this.slider.direction&&(this._position.x=-this._position.x,this._position.y=-this._position.y),this.slider.container.style.transform="translate3d("+this._position.x+"px, "+this._position.y+"px, 0)",n.x>t.x&&this.slider.transitioner.end()}}},{key:"onTransitionEnd",value:function(t){"translate"===this.options.effect&&(this.transitioner.isAnimating()&&t.target==this.slider.container&&this.options.infinite&&this.slider._infinite.onTransitionEnd(t),this.transitioner.end())}}]),n}();e.a=n},function(t,e,i){"use strict";e.a={initialSlide:0,slidesToScroll:1,slidesToShow:1,navigation:!0,navigationKeys:!0,navigationSwipe:!0,pagination:!0,loop:!1,infinite:!1,effect:"translate",duration:300,timing:"ease",autoplay:!1,autoplaySpeed:3e3,pauseOnHover:!0,breakpoints:[{changePoint:480,slidesToShow:1,slidesToScroll:1},{changePoint:640,slidesToShow:2,slidesToScroll:2},{changePoint:768,slidesToShow:3,slidesToScroll:3}],onReady:null,icons:{previous:'\n \n ',next:'\n \n '}}},function(t,e,i){"use strict";e.a=function(t){return'
\n
\n
'}},function(t,e,i){"use strict";e.a=function(){return'
'}}]).default}); \ No newline at end of file diff --git a/docs/static/js/bulma-slider.js b/docs/static/js/bulma-slider.js new file mode 100644 index 0000000..c6718de --- /dev/null +++ b/docs/static/js/bulma-slider.js @@ -0,0 +1,461 @@ +(function webpackUniversalModuleDefinition(root, factory) { + if(typeof exports === 'object' && typeof module === 'object') + module.exports = factory(); + else if(typeof define === 'function' && define.amd) + define([], factory); + else if(typeof exports === 'object') + exports["bulmaSlider"] = factory(); + else + root["bulmaSlider"] = factory(); +})(typeof self !== 'undefined' ? self : this, function() { +return /******/ (function(modules) { // webpackBootstrap +/******/ // The module cache +/******/ var installedModules = {}; +/******/ +/******/ // The require function +/******/ function __webpack_require__(moduleId) { +/******/ +/******/ // Check if module is in cache +/******/ if(installedModules[moduleId]) { +/******/ return installedModules[moduleId].exports; +/******/ } +/******/ // Create a new module (and put it into the cache) +/******/ var module = installedModules[moduleId] = { +/******/ i: moduleId, +/******/ l: false, +/******/ exports: {} +/******/ }; +/******/ +/******/ // Execute the module function +/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); +/******/ +/******/ // Flag the module as loaded +/******/ module.l = true; +/******/ +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } +/******/ +/******/ +/******/ // expose the modules object (__webpack_modules__) +/******/ __webpack_require__.m = modules; +/******/ +/******/ // expose the module cache +/******/ __webpack_require__.c = installedModules; +/******/ +/******/ // define getter function for harmony exports +/******/ __webpack_require__.d = function(exports, name, getter) { +/******/ if(!__webpack_require__.o(exports, name)) { +/******/ Object.defineProperty(exports, name, { +/******/ configurable: false, +/******/ enumerable: true, +/******/ get: getter +/******/ }); +/******/ } +/******/ }; +/******/ +/******/ // getDefaultExport function for compatibility with non-harmony modules +/******/ __webpack_require__.n = function(module) { +/******/ var getter = module && module.__esModule ? +/******/ function getDefault() { return module['default']; } : +/******/ function getModuleExports() { return module; }; +/******/ __webpack_require__.d(getter, 'a', getter); +/******/ return getter; +/******/ }; +/******/ +/******/ // Object.prototype.hasOwnProperty.call +/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); }; +/******/ +/******/ // __webpack_public_path__ +/******/ __webpack_require__.p = ""; +/******/ +/******/ // Load entry module and return exports +/******/ return __webpack_require__(__webpack_require__.s = 0); +/******/ }) +/************************************************************************/ +/******/ ([ +/* 0 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +Object.defineProperty(__webpack_exports__, "__esModule", { value: true }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isString", function() { return isString; }); +/* harmony import */ var __WEBPACK_IMPORTED_MODULE_0__events__ = __webpack_require__(1); +var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; + +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } + +function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } + + + +var isString = function isString(unknown) { + return typeof unknown === 'string' || !!unknown && (typeof unknown === 'undefined' ? 'undefined' : _typeof(unknown)) === 'object' && Object.prototype.toString.call(unknown) === '[object String]'; +}; + +var bulmaSlider = function (_EventEmitter) { + _inherits(bulmaSlider, _EventEmitter); + + function bulmaSlider(selector) { + var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + _classCallCheck(this, bulmaSlider); + + var _this = _possibleConstructorReturn(this, (bulmaSlider.__proto__ || Object.getPrototypeOf(bulmaSlider)).call(this)); + + _this.element = typeof selector === 'string' ? document.querySelector(selector) : selector; + // An invalid selector or non-DOM node has been provided. + if (!_this.element) { + throw new Error('An invalid selector or non-DOM node has been provided.'); + } + + _this._clickEvents = ['click']; + /// Set default options and merge with instance defined + _this.options = _extends({}, options); + + _this.onSliderInput = _this.onSliderInput.bind(_this); + + _this.init(); + return _this; + } + + /** + * Initiate all DOM element containing selector + * @method + * @return {Array} Array of all slider instances + */ + + + _createClass(bulmaSlider, [{ + key: 'init', + + + /** + * Initiate plugin + * @method init + * @return {void} + */ + value: function init() { + this._id = 'bulmaSlider' + new Date().getTime() + Math.floor(Math.random() * Math.floor(9999)); + this.output = this._findOutputForSlider(); + + this._bindEvents(); + + if (this.output) { + if (this.element.classList.contains('has-output-tooltip')) { + // Get new output position + var newPosition = this._getSliderOutputPosition(); + + // Set output position + this.output.style['left'] = newPosition.position; + } + } + + this.emit('bulmaslider:ready', this.element.value); + } + }, { + key: '_findOutputForSlider', + value: function _findOutputForSlider() { + var _this2 = this; + + var result = null; + var outputs = document.getElementsByTagName('output') || []; + + Array.from(outputs).forEach(function (output) { + if (output.htmlFor == _this2.element.getAttribute('id')) { + result = output; + return true; + } + }); + return result; + } + }, { + key: '_getSliderOutputPosition', + value: function _getSliderOutputPosition() { + // Update output position + var newPlace, minValue; + + var style = window.getComputedStyle(this.element, null); + // Measure width of range input + var sliderWidth = parseInt(style.getPropertyValue('width'), 10); + + // Figure out placement percentage between left and right of input + if (!this.element.getAttribute('min')) { + minValue = 0; + } else { + minValue = this.element.getAttribute('min'); + } + var newPoint = (this.element.value - minValue) / (this.element.getAttribute('max') - minValue); + + // Prevent bubble from going beyond left or right (unsupported browsers) + if (newPoint < 0) { + newPlace = 0; + } else if (newPoint > 1) { + newPlace = sliderWidth; + } else { + newPlace = sliderWidth * newPoint; + } + + return { + 'position': newPlace + 'px' + }; + } + + /** + * Bind all events + * @method _bindEvents + * @return {void} + */ + + }, { + key: '_bindEvents', + value: function _bindEvents() { + if (this.output) { + // Add event listener to update output when slider value change + this.element.addEventListener('input', this.onSliderInput, false); + } + } + }, { + key: 'onSliderInput', + value: function onSliderInput(e) { + e.preventDefault(); + + if (this.element.classList.contains('has-output-tooltip')) { + // Get new output position + var newPosition = this._getSliderOutputPosition(); + + // Set output position + this.output.style['left'] = newPosition.position; + } + + // Check for prefix and postfix + var prefix = this.output.hasAttribute('data-prefix') ? this.output.getAttribute('data-prefix') : ''; + var postfix = this.output.hasAttribute('data-postfix') ? this.output.getAttribute('data-postfix') : ''; + + // Update output with slider value + this.output.value = prefix + this.element.value + postfix; + + this.emit('bulmaslider:ready', this.element.value); + } + }], [{ + key: 'attach', + value: function attach() { + var _this3 = this; + + var selector = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 'input[type="range"].slider'; + var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + var instances = new Array(); + + var elements = isString(selector) ? document.querySelectorAll(selector) : Array.isArray(selector) ? selector : [selector]; + elements.forEach(function (element) { + if (typeof element[_this3.constructor.name] === 'undefined') { + var instance = new bulmaSlider(element, options); + element[_this3.constructor.name] = instance; + instances.push(instance); + } else { + instances.push(element[_this3.constructor.name]); + } + }); + + return instances; + } + }]); + + return bulmaSlider; +}(__WEBPACK_IMPORTED_MODULE_0__events__["a" /* default */]); + +/* harmony default export */ __webpack_exports__["default"] = (bulmaSlider); + +/***/ }), +/* 1 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +var EventEmitter = function () { + function EventEmitter() { + var listeners = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : []; + + _classCallCheck(this, EventEmitter); + + this._listeners = new Map(listeners); + this._middlewares = new Map(); + } + + _createClass(EventEmitter, [{ + key: "listenerCount", + value: function listenerCount(eventName) { + if (!this._listeners.has(eventName)) { + return 0; + } + + var eventListeners = this._listeners.get(eventName); + return eventListeners.length; + } + }, { + key: "removeListeners", + value: function removeListeners() { + var _this = this; + + var eventName = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + var middleware = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false; + + if (eventName !== null) { + if (Array.isArray(eventName)) { + name.forEach(function (e) { + return _this.removeListeners(e, middleware); + }); + } else { + this._listeners.delete(eventName); + + if (middleware) { + this.removeMiddleware(eventName); + } + } + } else { + this._listeners = new Map(); + } + } + }, { + key: "middleware", + value: function middleware(eventName, fn) { + var _this2 = this; + + if (Array.isArray(eventName)) { + name.forEach(function (e) { + return _this2.middleware(e, fn); + }); + } else { + if (!Array.isArray(this._middlewares.get(eventName))) { + this._middlewares.set(eventName, []); + } + + this._middlewares.get(eventName).push(fn); + } + } + }, { + key: "removeMiddleware", + value: function removeMiddleware() { + var _this3 = this; + + var eventName = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + + if (eventName !== null) { + if (Array.isArray(eventName)) { + name.forEach(function (e) { + return _this3.removeMiddleware(e); + }); + } else { + this._middlewares.delete(eventName); + } + } else { + this._middlewares = new Map(); + } + } + }, { + key: "on", + value: function on(name, callback) { + var _this4 = this; + + var once = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false; + + if (Array.isArray(name)) { + name.forEach(function (e) { + return _this4.on(e, callback); + }); + } else { + name = name.toString(); + var split = name.split(/,|, | /); + + if (split.length > 1) { + split.forEach(function (e) { + return _this4.on(e, callback); + }); + } else { + if (!Array.isArray(this._listeners.get(name))) { + this._listeners.set(name, []); + } + + this._listeners.get(name).push({ once: once, callback: callback }); + } + } + } + }, { + key: "once", + value: function once(name, callback) { + this.on(name, callback, true); + } + }, { + key: "emit", + value: function emit(name, data) { + var _this5 = this; + + var silent = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false; + + name = name.toString(); + var listeners = this._listeners.get(name); + var middlewares = null; + var doneCount = 0; + var execute = silent; + + if (Array.isArray(listeners)) { + listeners.forEach(function (listener, index) { + // Start Middleware checks unless we're doing a silent emit + if (!silent) { + middlewares = _this5._middlewares.get(name); + // Check and execute Middleware + if (Array.isArray(middlewares)) { + middlewares.forEach(function (middleware) { + middleware(data, function () { + var newData = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + + if (newData !== null) { + data = newData; + } + doneCount++; + }, name); + }); + + if (doneCount >= middlewares.length) { + execute = true; + } + } else { + execute = true; + } + } + + // If Middleware checks have been passed, execute + if (execute) { + if (listener.once) { + listeners[index] = null; + } + listener.callback(data); + } + }); + + // Dirty way of removing used Events + while (listeners.indexOf(null) !== -1) { + listeners.splice(listeners.indexOf(null), 1); + } + } + } + }]); + + return EventEmitter; +}(); + +/* harmony default export */ __webpack_exports__["a"] = (EventEmitter); + +/***/ }) +/******/ ])["default"]; +}); \ No newline at end of file diff --git a/docs/static/js/bulma-slider.min.js b/docs/static/js/bulma-slider.min.js new file mode 100644 index 0000000..7e62685 --- /dev/null +++ b/docs/static/js/bulma-slider.min.js @@ -0,0 +1 @@ +!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.bulmaSlider=e():t.bulmaSlider=e()}("undefined"!=typeof self?self:this,function(){return function(n){var r={};function i(t){if(r[t])return r[t].exports;var e=r[t]={i:t,l:!1,exports:{}};return n[t].call(e.exports,e,e.exports,i),e.l=!0,e.exports}return i.m=n,i.c=r,i.d=function(t,e,n){i.o(t,e)||Object.defineProperty(t,e,{configurable:!1,enumerable:!0,get:n})},i.n=function(t){var e=t&&t.__esModule?function(){return t.default}:function(){return t};return i.d(e,"a",e),e},i.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},i.p="",i(i.s=0)}([function(t,e,n){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),n.d(e,"isString",function(){return l});var r=n(1),i=Object.assign||function(t){for(var e=1;e=l.length&&(s=!0)):s=!0),s&&(t.once&&(u[e]=null),t.callback(r))});-1!==u.indexOf(null);)u.splice(u.indexOf(null),1)}}]),e}();e.a=i}]).default}); \ No newline at end of file diff --git a/docs/static/js/explorer-index.js b/docs/static/js/explorer-index.js new file mode 100644 index 0000000..b5c8d25 --- /dev/null +++ b/docs/static/js/explorer-index.js @@ -0,0 +1,168 @@ +$(document).ready(function() { + // Check for click events on the navbar burger icon + $(".navbar-burger").click(function() { + // Toggle the "is-active" class on both the "navbar-burger" and the "navbar-menu" + $(".navbar-burger").toggleClass("is-active"); + $(".navbar-menu").toggleClass("is-active"); + + }); + + var options = { + slidesToScroll: 1, + slidesToShow: 1, + loop: true, + infinite: true, + autoplay: false, + autoplaySpeed: 3000, + } + + // Initialize all div with carousel class + var carousels = bulmaCarousel.attach('.carousel', options); + + // Loop on each carousel initialized + for(var i = 0; i < carousels.length; i++) { + // Add listener to event + carousels[i].on('before:show', state => { + console.log(state); + }); + } + + // Access to bulmaCarousel instance of an element + var element = document.querySelector('#my-element'); + if (element && element.bulmaCarousel) { + // bulmaCarousel instance is available as element.bulmaCarousel + element.bulmaCarousel.on('before-show', function(state) { + console.log(state); + }); + } + + var dropdowns = document.getElementsByClassName('dropdown'); + for (let dropdown of dropdowns) { + dropdown.addEventListener('click', function(event) { + event.stopPropagation(); + event.preventDefault(); + dropdown.classList.toggle('is-active'); + }); + } + + // load and display default models + let qids = getRandomSubarray(num_output_qs); + let [folder , output_data] = read_data('Multimodal Bard'); + output_data.addEventListener('load', function() { + refresh_table(qids); + }); + [folder , output_data] = read_data('CoT GPT4 (Caption+OCR)'); + output_data.addEventListener('load', function() { + refresh_table(qids); + }); + // refresh_table(qids); + let dropdown_displays = document.getElementsByClassName('dropdown-display'); + let refresh_button = document.getElementById('refresh-qids'); + refresh_button.addEventListener('click', function(event) { + qids = getRandomSubarray(num_output_qs); + refresh_table(qids); + }); + + // let dropdown_displays = document.getElementsByClassName('dropdown-display'); + let dropdown_contents = document.getElementsByClassName('dropdown-content'); + for (let i = 0; i < dropdown_contents.length; i++) { + // add an tag to the dropdown-content for each key in model_output_folder_list + let dropdown_content = dropdown_contents[i]; + for (let name in model_output_folder_list) { + let a = document.createElement('a'); + a.classList.add('dropdown-item'); + a.innerHTML = ' ' + name + ' '; + dropdown_content.appendChild(a); + a.addEventListener('click', function(event) { + dropdown_displays[i].innerHTML = name; + let [folder, script_tag] = read_data(name); + script_tag.addEventListener('load', function() { + refresh_table(qids); + }); + }); + a.style.padding = '0.375em 1em'; + } + } + + // // create the leaderboard + // let leaderboard = new Tabulator("#score-table", { + // data:score_table, //assign data to table + // layout:"fitDataTable", + // // layout:"fitColumns", + // initialSort:[ + // {column:"ALL", dir:"desc"}, //sort by this first + // ], + // autoColumns:true, //create columns from data field names + // }); +}) + +var cache = {}; +var num_output_qs = 5; +// var + +// dynamically links a js data file +function read_data(model_name) { + console.log('loading data for ' + model_name); + let folder = model_output_folder_list[model_name]; + // dynamically link the js file + let script = document.createElement('script'); + script.src = './data/results/' + folder + '/data.js'; + document.body.appendChild(script); + return [folder, script]; + +} + +function getRandomSubarray(size, arr=null) { + if (arr == null) { + arr = []; + for (let i = 1; i < 1001; i++) { + arr.push(i); + } + } + var shuffled = arr.slice(0), i = arr.length, temp, index; + while (i--) { + index = Math.floor((i + 1) * Math.random()); + temp = shuffled[index]; + shuffled[index] = shuffled[i]; + shuffled[i] = temp; + } + return shuffled.slice(0, size); +} + +function refresh_table(qids) { + let table = document.getElementById('result-table'); + let dropdown_displays = document.getElementsByClassName('dropdown-display'); + let model_names = []; + for (let i = 0; i < dropdown_displays.length; i++) { + model_names.push(dropdown_displays[i].innerText); + } + console.log(qids); + console.log(model_names); + while (table.children.length > 3) + table.removeChild(table.lastChild); + + for (let qid of qids) { + let row = generate_row(qid, model_names); + // console.log('inserting' + row); + table.insertAdjacentHTML('beforeend', row); + } +} + +function generate_row(qid, model_names) { + let responses = []; + for (let model_name of model_names) { + if (model_name in cache) + responses.push(cache[model_name][qid.toString()]); + else + responses.push({'response': ''}); + } + let html = ` +
+
+ ${create_number(test_data[qid.toString()])} +
+
${responses[0]['response']}
+
${responses[1]['response']}
+
`; + return html; +} \ No newline at end of file diff --git a/docs/static/js/fontawesome.all.min.js b/docs/static/js/fontawesome.all.min.js new file mode 100644 index 0000000..9ee22fd --- /dev/null +++ b/docs/static/js/fontawesome.all.min.js @@ -0,0 +1,5 @@ +/*! + * Font Awesome Free 5.15.1 by @fontawesome - https://fontawesome.com + * License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License) + */ +!function(){"use strict";var c={},l={};try{"undefined"!=typeof window&&(c=window),"undefined"!=typeof document&&(l=document)}catch(c){}var h=(c.navigator||{}).userAgent,z=void 0===h?"":h,a=c,v=l,m=(a.document,!!v.documentElement&&!!v.head&&"function"==typeof v.addEventListener&&v.createElement,~z.indexOf("MSIE")||z.indexOf("Trident/"),"___FONT_AWESOME___"),e=function(){try{return!0}catch(c){return!1}}();var s=a||{};s[m]||(s[m]={}),s[m].styles||(s[m].styles={}),s[m].hooks||(s[m].hooks={}),s[m].shims||(s[m].shims=[]);var t=s[m];function M(c,z){var l=(2>>0;h--;)l[h]=c[h];return l}function Ac(c){return c.classList?bc(c.classList):(c.getAttribute("class")||"").split(" ").filter(function(c){return c})}function gc(c,l){var h,z=l.split("-"),a=z[0],v=z.slice(1).join("-");return a!==c||""===v||(h=v,~T.indexOf(h))?null:v}function Sc(c){return"".concat(c).replace(/&/g,"&").replace(/"/g,""").replace(/'/g,"'").replace(//g,">")}function yc(h){return Object.keys(h||{}).reduce(function(c,l){return c+"".concat(l,": ").concat(h[l],";")},"")}function wc(c){return c.size!==Lc.size||c.x!==Lc.x||c.y!==Lc.y||c.rotate!==Lc.rotate||c.flipX||c.flipY}function Zc(c){var l=c.transform,h=c.containerWidth,z=c.iconWidth,a={transform:"translate(".concat(h/2," 256)")},v="translate(".concat(32*l.x,", ").concat(32*l.y,") "),m="scale(".concat(l.size/16*(l.flipX?-1:1),", ").concat(l.size/16*(l.flipY?-1:1),") "),e="rotate(".concat(l.rotate," 0 0)");return{outer:a,inner:{transform:"".concat(v," ").concat(m," ").concat(e)},path:{transform:"translate(".concat(z/2*-1," -256)")}}}var kc={x:0,y:0,width:"100%",height:"100%"};function xc(c){var l=!(1").concat(m.map(Jc).join(""),"")}var $c=function(){};function cl(c){return"string"==typeof(c.getAttribute?c.getAttribute(cc):null)}var ll={replace:function(c){var l=c[0],h=c[1].map(function(c){return Jc(c)}).join("\n");if(l.parentNode&&l.outerHTML)l.outerHTML=h+(lc.keepOriginalSource&&"svg"!==l.tagName.toLowerCase()?"\x3c!-- ".concat(l.outerHTML," Font Awesome fontawesome.com --\x3e"):"");else if(l.parentNode){var z=document.createElement("span");l.parentNode.replaceChild(z,l),z.outerHTML=h}},nest:function(c){var l=c[0],h=c[1];if(~Ac(l).indexOf(lc.replacementClass))return ll.replace(c);var z=new RegExp("".concat(lc.familyPrefix,"-.*"));delete h[0].attributes.style,delete h[0].attributes.id;var a=h[0].attributes.class.split(" ").reduce(function(c,l){return l===lc.replacementClass||l.match(z)?c.toSvg.push(l):c.toNode.push(l),c},{toNode:[],toSvg:[]});h[0].attributes.class=a.toSvg.join(" ");var v=h.map(function(c){return Jc(c)}).join("\n");l.setAttribute("class",a.toNode.join(" ")),l.setAttribute(cc,""),l.innerHTML=v}};function hl(c){c()}function zl(h,c){var z="function"==typeof c?c:$c;if(0===h.length)z();else{var l=hl;lc.mutateApproach===y&&(l=o.requestAnimationFrame||hl),l(function(){var c=!0===lc.autoReplaceSvg?ll.replace:ll[lc.autoReplaceSvg]||ll.replace,l=_c.begin("mutate");h.map(c),l(),z()})}}var al=!1;function vl(){al=!1}var ml=null;function el(c){if(t&&lc.observeMutations){var a=c.treeCallback,v=c.nodeCallback,m=c.pseudoElementsCallback,l=c.observeMutationsRoot,h=void 0===l?C:l;ml=new t(function(c){al||bc(c).forEach(function(c){if("childList"===c.type&&0 { + console.log(state); + }); + } + + // Access to bulmaCarousel instance of an element + var element = document.querySelector('#my-element'); + if (element && element.bulmaCarousel) { + // bulmaCarousel instance is available as element.bulmaCarousel + element.bulmaCarousel.on('before-show', function(state) { + console.log(state); + }); + } + + /*var player = document.getElementById('interpolation-video'); + player.addEventListener('loadedmetadata', function() { + $('#interpolation-slider').on('input', function(event) { + console.log(this.value, player.duration); + player.currentTime = player.duration / 100 * this.value; + }) + }, false);*/ + preloadInterpolationImages(); + + $('#interpolation-slider').on('input', function(event) { + setInterpolationImage(this.value); + }); + setInterpolationImage(0); + $('#interpolation-slider').prop('max', NUM_INTERP_FRAMES - 1); + + bulmaSlider.attach(); + +}) diff --git a/docs/static/js/question_card.js b/docs/static/js/question_card.js new file mode 100644 index 0000000..a3bc76a --- /dev/null +++ b/docs/static/js/question_card.js @@ -0,0 +1,83 @@ +// let BASE_DIR = './data'; +let BASE_DIR = 'https://raw.githubusercontent.com/mathvista/data/main'; + +function create_number(data) { + let question = make_qt(data.question, data.unit); + + // let hint = make_hint(data.hint) + let image = ""; + if (data.image !== -1) + // image = make_img(`${BASE_DImetadataR}/${filters.dataset}/${data.image}`); + image = make_img(`${BASE_DIR}/${data.image}`); + + let choices = ""; + if (data.question_type === "multi_choice") + choices = make_choices(data.choices); + + // if data has the answer attr. + let answer = ""; + if ("answer" in data) + answer = make_answer(data.answer); + + html = make_box([question, image, choices, answer]); + + return html; +} + +// creates a div with question text in it +function make_qt(question, unit) { + let html = ""; + if (unit === null) + html = ` +

Question

+

${question}

+ `; + else + html = ` +

Question

+

${question} (unit: ${unit})

+ `; + return html; +} + +function make_img(path) { + if (path === null) return ""; + let html = `number image`; + return html; +} + +function make_box(contents, cls = "") { + if (contents.join("").length === 0) return ""; + let html = ` +
+ ${contents.join(" ")} +
+ `; + return html; +} + +function make_choices(choices) { + // console.log(choices); + let temp = ""; + let len = 0; + for (each of choices) { + let html = make_choice(each); + temp += html; + len += each.length; + } + let html = ""; + if (len < 60) + html = `

Choices

${temp}
`; + else + html = `

Choices

${temp}
`; + return html; +} +function make_choice(choice) { + let html = `

${choice}

`; + return html; +} + +function make_answer(answer) { + let html = `

Answer

${answer}

`; + return html; +} \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..8c9c3a5 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,39 @@ +accelerate==0.21.0 +backoff==2.2.1 +bitsandbytes==0.38.1 +git+https://github.com/openai/CLIP.git +decord==0.6.0 +dill==0.3.6 +einops==0.6.0 +ftfy==6.1.1 +h5py==3.8.0 +inflect==7.2.0 +ipython==8.11.0 +ipykernel==6.22.0 +jupyter==1.0.0 +joblib==1.2.0 +kornia==0.6.9 +matplotlib==3.6.2 +nltk==3.8.1 +num2words==0.5.12 +numpy==1.23.5 +omegaconf==2.3.0 +openai==0.28.0 +pandas==1.5.2 +Pillow==9.4.0 +prettytable==3.6.0 +pycocotools==2.0.6 +PyYAML==6.0 +qd==0.8.9 +regex==2022.10.31 +requests==2.28.1 +rich==13.3.2 +scipy==1.9.3 +tensorboardX==2.6 +tensorflow==2.11.1 +timm==0.6.12 +tqdm==4.64.1 +transformers==4.39.3 +wandb==0.13.9 +word2number==1.1 +yacs==0.1.8 \ No newline at end of file diff --git a/vdebugger/finetune.py b/vdebugger/finetune.py new file mode 100644 index 0000000..268fbab --- /dev/null +++ b/vdebugger/finetune.py @@ -0,0 +1,734 @@ +#!/usr/bin/env python +# coding=utf-8 + +import argparse +import logging +import math +import os +import random +from datetime import timedelta +from functools import partial + +import datasets +import deepspeed +import torch +import transformers +from accelerate import Accelerator +from accelerate.logging import get_logger +from accelerate.utils import set_seed, InitProcessGroupKwargs +from datasets import load_dataset +from peft import LoraConfig, TaskType, get_peft_model, prepare_model_for_kbit_training +from torch.utils.data import DataLoader +from tqdm.auto import tqdm +from transformers import ( + AutoConfig, + AutoModelForCausalLM, + AutoTokenizer, + LlamaTokenizer, + LlamaTokenizerFast, + CodeLlamaTokenizer, + CodeLlamaTokenizerFast, + SchedulerType, + DataCollatorForSeq2Seq, + get_scheduler, + GPTNeoXTokenizerFast, + GPT2Tokenizer, + OPTForCausalLM, + BitsAndBytesConfig, +) + +logger = get_logger(__name__) + + +# try: +# from hf_olmo import OLMoTokenizerFast +# except ImportError: +# logger.warning("OLMo not installed. Ignore if using a different model.") + +def parse_args(): + parser = argparse.ArgumentParser(description="Finetune a transformers model on a causal language modeling task") + parser.add_argument( + "--dataset_name", + type=str, + default=None, + help="The name of the dataset to use (via the datasets library).", + ) + parser.add_argument( + "--dataset_config_name", + type=str, + default=None, + help="The configuration name of the dataset to use (via the datasets library).", + ) + parser.add_argument( + "--train_file", type=str, default=None, help="A csv or a json file containing the training data." + ) + parser.add_argument( + "--model_name_or_path", + type=str, + help="Path to pretrained model or model identifier from huggingface.co/models.", + required=False, + ) + parser.add_argument( + "--config_name", + type=str, + default=None, + help="Pretrained config name or path if not the same as model_name", + ) + parser.add_argument( + "--use_lora", + action="store_true", + help="If passed, will use LORA (low-rank parameter-efficient training) to train the model.", + ) + parser.add_argument( + "--lora_rank", + type=int, + default=64, + help="The rank of lora.", + ) + parser.add_argument( + "--lora_alpha", + type=float, + default=16, + help="The alpha parameter of lora.", + ) + parser.add_argument( + "--lora_dropout", + type=float, + default=0.1, + help="The dropout rate of lora modules.", + ) + parser.add_argument( + "--use_flash_attn", + action="store_true", + help="If passed, will use flash attention to train the model.", + ) + parser.add_argument( + "--tokenizer_name", + type=str, + default=None, + help="Pretrained tokenizer name or path if not the same as model_name", + ) + parser.add_argument( + "--use_slow_tokenizer", + action="store_true", + help="If passed, will use a slow tokenizer (not backed by the 🤗 Tokenizers library).", + ) + parser.add_argument( + "--max_seq_length", + type=int, + default=512, + help="The maximum total sequence length (prompt+completion) of each training example.", + ) + parser.add_argument( + "--per_device_train_batch_size", + type=int, + default=8, + help="Batch size (per device) for the training dataloader.", + ) + parser.add_argument( + "--learning_rate", + type=float, + default=5e-5, + help="Initial learning rate (after the potential warmup period) to use.", + ) + parser.add_argument("--weight_decay", type=float, default=0.0, help="Weight decay to use.") + parser.add_argument("--num_train_epochs", type=int, default=3, help="Total number of training epochs to perform.") + parser.add_argument( + "--max_train_steps", + type=int, + default=None, + help="Total number of training steps to perform. If provided, overrides num_train_epochs.", + ) + parser.add_argument( + "--gradient_accumulation_steps", + type=int, + default=1, + help="Number of updates steps to accumulate before performing a backward/update pass.", + ) + parser.add_argument( + "--lr_scheduler_type", + type=SchedulerType, + default="linear", + help="The scheduler type to use.", + choices=["linear", "cosine", "cosine_with_restarts", "polynomial", "constant", "constant_with_warmup"], + ) + parser.add_argument( + "--warmup_ratio", type=float, default=0, help="Ratio of total training steps used for warmup." + ) + parser.add_argument("--output_dir", type=str, default=None, help="Where to store the final model.") + parser.add_argument("--seed", type=int, default=None, help="A seed for reproducible training.") + parser.add_argument( + "--preprocessing_num_workers", + type=int, + default=None, + help="The number of processes to use for the preprocessing.", + ) + parser.add_argument( + "--overwrite_cache", action="store_true", help="Overwrite the cached training and evaluation sets" + ) + parser.add_argument( + "--checkpointing_steps", + type=str, + default=None, + help="Whether the various states should be saved at the end of every n steps, or 'epoch' for each epoch.", + ) + parser.add_argument( + "--logging_steps", + type=int, + default=None, + help="Log the training loss and learning rate every logging_steps steps.", + ) + parser.add_argument( + "--resume_from_checkpoint", + type=str, + default=None, + help="If the training should continue from a checkpoint folder.", + ) + parser.add_argument( + "--with_tracking", + action="store_true", + help="Whether to enable experiment trackers for logging.", + ) + parser.add_argument( + "--report_to", + type=str, + default="all", + help=( + 'The integration to report the results and logs to. Supported platforms are `"tensorboard"`,' + ' `"wandb"`, `"comet_ml"` and `"clearml"`. Use `"all"` (default) to report to all integrations.' + "Only applicable when `--with_tracking` is passed." + ), + ) + parser.add_argument( + "--low_cpu_mem_usage", + action="store_true", + help=( + "It is an option to create the model as an empty shell, then only materialize its parameters when the pretrained weights are loaded." + "If passed, LLM loading time and RAM consumption will be benefited." + ), + ) + parser.add_argument( + "--gradient_checkpointing", + action="store_true", + help=( + "Turn on gradient checkpointing. Saves memory but slows training." + ), + ) + parser.add_argument( + "--use_qlora", + action="store_true", + help=( + "Use qLoRA training - main thing is initialising model in quantised form. Not compatible with deepspeed." + ), + ) + parser.add_argument( + '--clip_grad_norm', + type=float, + default=-1, + help='Clip gradient norm. Not compatible with deepspeed (use deepspeed config instead).', + ) + parser.add_argument( + '--use_8bit_optimizer', + action='store_true', + help='Use 8bit optimizer from bitsandbytes. Not compatible with deepspeed (use deepspeed config instead).', + ) + parser.add_argument( + '--timeout', + type=int, + default=1800, + help='Timeout for the training process. Useful if tokenization process is long. Default is 1800 seconds (30 minutes).', + ) + parser.add_argument( + '--trust_remote_code', + action='store_true', + help='Trust remote code when loading pretrained models and tokenizers. Use only when you trust the remote code.', + ) + parser.add_argument( + '--reduce_loss', + default='mean', + choices=['mean', 'sum'], + help='How to reduce loss over tokens. Default is mean, but using sum can improve chat model performance.', + ) + args = parser.parse_args() + + # Sanity checks + if args.dataset_name is None and args.train_file is None: + raise ValueError("Need either a dataset name or a training file.") + else: + if args.train_file is not None: + extension = args.train_file.split(".")[-1] + assert extension in ["json", "jsonl"], "`train_file` should be a json/jsonl file." + return args + + +def encode_with_prompt_completion_format(example, tokenizer, max_seq_length): + ''' + Here we assume each example has 'prompt' and 'completion' fields. + We concatenate prompt and completion and tokenize them together because otherwise prompt will be padded/trancated + and it doesn't make sense to follow directly with the completion. + ''' + tokenized_prompt = [tokenizer.bos_token_id, ] + tokenizer(example['prompt'], add_special_tokens=False).input_ids + tokenized_inst = [] if example['inst'] is None else tokenizer(example['inst'], add_special_tokens=False).input_ids + tokenized_completion = tokenizer(example['completion'], add_special_tokens=False).input_ids + if example.get('eos', True) is not False: + tokenized_completion += [tokenizer.eos_token_id, ] + # assert tokenized_prompt + tokenized_inst + tokenized_completion == \ # fine... + # tokenizer(example['prompt'] + example['inst'] + example['completion']).input_ids + + assert len(tokenized_inst + tokenized_completion) < max_seq_length + tokenized_prompt = tokenized_prompt[: max_seq_length - 256 - len(tokenized_inst)] + # assumes max output length is 256 + input_ids = tokenized_prompt + tokenized_inst + tokenized_completion + labels = [-100, ] * len(tokenized_prompt) + tokenized_inst + tokenized_completion + + input_ids = torch.LongTensor(input_ids) + labels = torch.LongTensor(labels) + attention_mask = torch.ones_like(input_ids) + return { + 'input_ids': input_ids, + 'labels': labels, + 'attention_mask': attention_mask, + } + + +def save_with_accelerate(accelerator, model, tokenizer, output_dir, args): + unwrapped_model = accelerator.unwrap_model(model) + # When doing multi-gpu training, we need to use accelerator.get_state_dict(model) to get the state_dict. + # Otherwise, sometimes the model will be saved with only part of the parameters. + # Also, accelerator needs to use the wrapped model to get the state_dict. + state_dict = accelerator.get_state_dict(model) + if args.use_lora: + # When using lora, the unwrapped model is a PeftModel, which doesn't support the is_main_process + # and has its own save_pretrained function for only saving lora modules. + # We have to manually specify the is_main_process outside the save_pretrained function. + if accelerator.is_main_process: + unwrapped_model.save_pretrained(output_dir, state_dict=state_dict) + else: + # don't use safetensors for saving for now + unwrapped_model.save_pretrained( + output_dir, is_main_process=accelerator.is_main_process, save_function=accelerator.save, + state_dict=state_dict, + safe_serialization=False + ) + + +def main(): + args = parse_args() + + # Initialize the accelerator. We will let the accelerator handle device placement for us in this example. + # If we're using tracking, we also need to initialize it here and it will by default pick up all supported trackers + # in the environment + accelerator_log_kwargs = {} + + if args.with_tracking: + accelerator_log_kwargs["log_with"] = args.report_to + accelerator_log_kwargs["project_dir"] = args.output_dir + + # if you get timeouts (e.g. due to long tokenization) increase this. + timeout_kwargs = InitProcessGroupKwargs(timeout=timedelta(seconds=args.timeout)) + + accelerator = Accelerator( + gradient_accumulation_steps=args.gradient_accumulation_steps, + **accelerator_log_kwargs, + kwargs_handlers=[timeout_kwargs] + ) + # Make one log on every process with the configuration for debugging. + logging.basicConfig( + format="%(asctime)s - %(levelname)s - %(name)s - %(message)s", + datefmt="%m/%d/%Y %H:%M:%S", + level=logging.INFO, + ) + logger.info(accelerator.state, main_process_only=False) + if accelerator.is_local_main_process: + datasets.utils.logging.set_verbosity_warning() + transformers.utils.logging.set_verbosity_info() + else: + datasets.utils.logging.set_verbosity_error() + transformers.utils.logging.set_verbosity_error() + + # If passed along, set the training seed now. + if args.seed is not None: + set_seed(args.seed) + + if accelerator.is_main_process: + if args.output_dir is not None: + os.makedirs(args.output_dir, exist_ok=True) + + accelerator.wait_for_everyone() + + if args.dataset_name is not None: + # Downloading and loading a dataset from the hub. + raw_datasets = load_dataset( + args.dataset_name, + args.dataset_config_name, + ) + else: + data_files = {} + dataset_args = {} + if args.train_file is not None: + data_files["train"] = args.train_file + raw_datasets = load_dataset( + "json", + data_files=data_files, + **dataset_args, + ) + + # Load pretrained model and tokenizer + if args.config_name: + config = AutoConfig.from_pretrained(args.config_name, trust_remote_code=args.trust_remote_code) + elif args.model_name_or_path: + config = AutoConfig.from_pretrained(args.model_name_or_path, trust_remote_code=args.trust_remote_code) + else: + raise ValueError( + "You are instantiating a new config instance from scratch. This is not supported by this script." + ) + + if args.tokenizer_name: + tokenizer = AutoTokenizer.from_pretrained(args.tokenizer_name, trust_remote_code=args.trust_remote_code, + use_fast=not args.use_slow_tokenizer) + elif args.model_name_or_path: + tokenizer = AutoTokenizer.from_pretrained(args.model_name_or_path, trust_remote_code=args.trust_remote_code, + use_fast=not args.use_slow_tokenizer) + else: + raise ValueError( + "You are instantiating a new tokenizer from scratch. This is not supported by this script." + "You can do it from another script, save it, and load it from here, using --tokenizer_name." + ) + + if args.model_name_or_path: + if args.use_qlora: + bnb_config = BitsAndBytesConfig( + load_in_4bit=True, + bnb_4bit_use_double_quant=True, + bnb_4bit_quant_type="nf4", + bnb_4bit_compute_dtype=torch.bfloat16, + ) + device_index = accelerator.local_process_index + device_map = {"": device_index} # force data-parallel training. + model = AutoModelForCausalLM.from_pretrained( + args.model_name_or_path, + from_tf=bool(".ckpt" in args.model_name_or_path), + config=config, + load_in_4bit=True, + quantization_config=bnb_config, + device_map=device_map, + trust_remote_code=args.trust_remote_code, + torch_dtype=torch.bfloat16, + use_flash_attention_2=True if args.use_flash_attn else False, + ) + else: + model = AutoModelForCausalLM.from_pretrained( + args.model_name_or_path, + from_tf=bool(".ckpt" in args.model_name_or_path), + config=config, + trust_remote_code=args.trust_remote_code, + low_cpu_mem_usage=args.low_cpu_mem_usage, + use_flash_attention_2=True if args.use_flash_attn else False, + torch_dtype=torch.bfloat16, + ) + else: + logger.info("Training new model from scratch") + model = AutoModelForCausalLM.from_config(config) + + # no default pad token for llama! + # here we add all special tokens again, because the default ones are not in the special_tokens_map + if isinstance(tokenizer, LlamaTokenizer) or isinstance(tokenizer, LlamaTokenizerFast) or \ + isinstance(tokenizer, CodeLlamaTokenizer) or isinstance(tokenizer, CodeLlamaTokenizerFast): + num_added_tokens = tokenizer.add_special_tokens({ + "bos_token": "", + "eos_token": "", + "unk_token": "", + "pad_token": "", + }) + assert num_added_tokens in [0, 1], \ + "LlamaTokenizer should only add one special token - the pad_token, or no tokens if pad token present." + elif isinstance(tokenizer, GPTNeoXTokenizerFast): + num_added_tokens = tokenizer.add_special_tokens({ + "pad_token": "", + }) + assert num_added_tokens == 1, "GPTNeoXTokenizer should only add one special token - the pad_token." + elif isinstance(tokenizer, GPT2Tokenizer) and isinstance(model, OPTForCausalLM): + num_added_tokens = tokenizer.add_special_tokens({'unk_token': ''}) + # elif isinstance(tokenizer, OLMoTokenizerFast): + # # only the eos for olmo, but we use it as bos + # tokenizer.bos_token = tokenizer.eos_token + # assert args.add_bos, "For OLMo, you must add bos token to the beginning of the input sequence." + + # We resize the embeddings only when necessary to avoid index errors. If you are creating a model from scratch + # on a small vocab and want a smaller embedding size, remove this test. + # gather deepspeed to get "real" embedding size + embeddings = model.get_input_embeddings() + with deepspeed.zero.GatheredParameters(embeddings.weight, modifier_rank=None): + embedding_size = embeddings.weight.shape[0] + if len(tokenizer) > embeddings.weight.shape[0]: + model.resize_token_embeddings(len(tokenizer)) + + if args.use_lora: + if args.use_qlora: + model = prepare_model_for_kbit_training(model, use_gradient_checkpointing=args.gradient_checkpointing) + + logger.info("Initializing LORA model...") + peft_config = LoraConfig( + task_type=TaskType.CAUSAL_LM, + inference_mode=False, + r=args.lora_rank, + lora_alpha=args.lora_alpha, + lora_dropout=args.lora_dropout, + target_modules=["q_proj", "o_proj", "v_proj", "k_proj", "gate_proj", "up_proj", "down_proj"] + ) + model = get_peft_model(model, peft_config) + model.print_trainable_parameters() + + # Preprocessing the datasets. + assert "prompt" in raw_datasets["train"].column_names and "completion" in raw_datasets["train"].column_names + encode_function = partial( + encode_with_prompt_completion_format, + tokenizer=tokenizer, + max_seq_length=args.max_seq_length, + ) + + with accelerator.main_process_first(): + lm_datasets = raw_datasets.map( + encode_function, + batched=False, + num_proc=args.preprocessing_num_workers, + load_from_cache_file=not args.overwrite_cache, + remove_columns=[name for name in raw_datasets["train"].column_names if + name not in ["input_ids", "labels", "attention_mask"]], + desc="Tokenizing and reformatting instruction data", + ) + lm_datasets.set_format(type="pt") + lm_datasets = lm_datasets.filter(lambda example: (example['labels'] != -100).any()) + + train_dataset = lm_datasets["train"] + + # Log a few random samples from the training set: + for index in random.sample(range(len(train_dataset)), 3): + logger.info(f"Sample {index} of the training set: {train_dataset[index]}.") + + # DataLoaders creation: + train_dataloader = DataLoader( + train_dataset, + shuffle=True, + collate_fn=DataCollatorForSeq2Seq(tokenizer=tokenizer, model=model, padding="longest"), + batch_size=args.per_device_train_batch_size + ) + + # Optimizer + # Split weights in two groups, one with weight decay and the other not. + no_decay = ["bias", "layer_norm.weight"] + optimizer_grouped_parameters = [ + { + "params": [p for n, p in model.named_parameters() if not any(nd in n for nd in no_decay)], + "weight_decay": args.weight_decay, + }, + { + "params": [p for n, p in model.named_parameters() if any(nd in n for nd in no_decay)], + "weight_decay": 0.0, + }, + ] + if args.use_qlora: + from bitsandbytes.optim import AdamW + optimizer = AdamW( + optimizer_grouped_parameters, + lr=args.learning_rate, + optim_bits=8 if args.use_8bit_optimizer else 32, + is_paged=True + ) + else: + optimizer = torch.optim.AdamW(optimizer_grouped_parameters, lr=args.learning_rate) + + # Scheduler and math around the number of training steps. + overrode_max_train_steps = False + num_update_steps_per_epoch = math.ceil(len(train_dataloader) / args.gradient_accumulation_steps) + if args.max_train_steps is None: + args.max_train_steps = args.num_train_epochs * num_update_steps_per_epoch + overrode_max_train_steps = True + + # Create the learning rate scheduler. + # Note: the current accelerator.step() calls the .step() of the real scheduler for the `num_processes` times. This is because they assume + # the user initialize the scheduler with the entire training set. In the case of data parallel training, each process only + # sees a subset (1/num_processes) of the training set. So each time the process needs to update the lr multiple times so that the total + # number of updates in the end matches the num_training_steps here. + # Here we need to set the num_training_steps to either using the entire training set (when epochs is specified) or we need to multiply the + # num_training_steps by num_processes so that the total number of updates matches the num_training_steps. + num_training_steps_for_scheduler = args.max_train_steps if overrode_max_train_steps else args.max_train_steps * accelerator.num_processes + lr_scheduler = get_scheduler( + name=args.lr_scheduler_type, + optimizer=optimizer, + num_training_steps=num_training_steps_for_scheduler, + num_warmup_steps=int(num_training_steps_for_scheduler * args.warmup_ratio), + ) + + # Prepare everything with `accelerator`. + model, optimizer, train_dataloader, lr_scheduler = accelerator.prepare( + model, optimizer, train_dataloader, lr_scheduler + ) + + # We need to recalculate our total training steps as the size of the training dataloader may have changed. + num_update_steps_per_epoch = math.ceil(len(train_dataloader) / args.gradient_accumulation_steps) + if overrode_max_train_steps: + args.max_train_steps = args.num_train_epochs * num_update_steps_per_epoch + # Afterwards we recalculate our number of training epochs + args.num_train_epochs = math.ceil(args.max_train_steps / num_update_steps_per_epoch) + + # Figure out how many steps we should save the Accelerator states + checkpointing_steps = args.checkpointing_steps + if checkpointing_steps is not None and checkpointing_steps.isdigit(): + checkpointing_steps = int(checkpointing_steps) + + # We need to initialize the trackers we use, and also store our configuration. + # The trackers initializes automatically on the main process. + if args.with_tracking: + experiment_config = vars(args) + # TensorBoard cannot log Enums, need the raw value + experiment_config["lr_scheduler_type"] = experiment_config["lr_scheduler_type"].value + accelerator.init_trackers("open_instruct", experiment_config) + + # Train! + total_batch_size = args.per_device_train_batch_size * accelerator.num_processes * args.gradient_accumulation_steps + + logger.info("***** Running training *****") + logger.info(f" Num examples = {len(train_dataset)}") + logger.info(f" Num Epochs = {args.num_train_epochs}") + logger.info(f" Instantaneous batch size per device = {args.per_device_train_batch_size}") + logger.info(f" Total train batch size (w. parallel, distributed & accumulation) = {total_batch_size}") + logger.info(f" Gradient Accumulation steps = {args.gradient_accumulation_steps}") + logger.info(f" Total optimization steps = {args.max_train_steps}") + # Only show the progress bar once on each machine. + progress_bar = tqdm(range(args.max_train_steps), disable=not accelerator.is_local_main_process) + completed_steps = 0 + starting_epoch = 0 + + # Potentially load in the weights and states from a previous save + if args.resume_from_checkpoint: + if args.resume_from_checkpoint is not None or args.resume_from_checkpoint != "": + checkpoint_path = args.resume_from_checkpoint + path = os.path.basename(args.resume_from_checkpoint) + else: + # Get the most recent checkpoint + dirs = [f.name for f in os.scandir(os.getcwd()) if f.is_dir()] + dirs.sort(key=os.path.getctime) + path = dirs[ + -1 + ] # Sorts folders by date modified, most recent checkpoint is the last + checkpoint_path = path + path = os.path.basename(checkpoint_path) + + accelerator.print(f"Resumed from checkpoint: {checkpoint_path}") + accelerator.load_state(path) + # Extract `epoch_{i}` or `step_{i}` + training_difference = os.path.splitext(path)[0] + + if "epoch" in training_difference: + starting_epoch = int(training_difference.replace("epoch_", "")) + 1 + resume_step = None + completed_steps = starting_epoch * num_update_steps_per_epoch + else: + # need to multiply `gradient_accumulation_steps` to reflect real steps + resume_step = ( + int(training_difference.replace("step_", "")) + * args.gradient_accumulation_steps + ) + starting_epoch = resume_step // len(train_dataloader) + completed_steps = resume_step // args.gradient_accumulation_steps + resume_step -= starting_epoch * len(train_dataloader) + + # update the progress_bar if load from checkpoint + progress_bar.update(completed_steps) + + for epoch in range(starting_epoch, args.num_train_epochs): + model.train() + total_loss = 0 + if ( + args.resume_from_checkpoint + and epoch == starting_epoch + and resume_step is not None + ): + # We skip the first `n` batches in the dataloader when resuming from a checkpoint + active_dataloader = accelerator.skip_first_batches( + train_dataloader, resume_step + ) + else: + active_dataloader = train_dataloader + for step, batch in enumerate(active_dataloader): + with accelerator.accumulate(model): + outputs = model(**batch, use_cache=False) + if args.reduce_loss == 'mean': + loss = outputs.loss + else: + # reduce loss is sum + # this ensures that we weight all tokens in the dataset equally, + # rather than weighting each overall example equally when + # using high amounts of gradient accumulation. + # this can result in > 5 point improvements in AlpacaEval + # see https://github.com/huggingface/transformers/issues/24725 for + # more discussion and details. + logits = outputs.logits + labels = batch["labels"] + # Shift so that tokens < n predict n + shift_logits = logits[..., :-1, :].contiguous() + shift_labels = labels[..., 1:].contiguous() + # Flatten the tokens + loss_fct = torch.nn.CrossEntropyLoss(reduction='sum') + shift_logits = shift_logits.view(-1, embedding_size) + shift_labels = shift_labels.view(-1) + # Enable model parallelism + shift_labels = shift_labels.to(shift_logits.device) + loss = loss_fct(shift_logits, shift_labels) + # We keep track of the loss at each logged step + total_loss += loss.detach().float() + accelerator.backward(loss) + # clip gradient norm. don't do this with deepspeed + if accelerator.sync_gradients and args.clip_grad_norm > 0: + accelerator.clip_grad_norm_(model.parameters(), args.clip_grad_norm) + optimizer.step() + optimizer.zero_grad() + lr_scheduler.step() + + # Checks if the accelerator has performed an optimization step behind the scenes + if accelerator.sync_gradients: + progress_bar.update(1) + completed_steps += 1 + if args.logging_steps and completed_steps % args.logging_steps == 0: + avg_loss = accelerator.gather( + total_loss).mean().item() / args.gradient_accumulation_steps / args.logging_steps + logger.info(f" Step: {completed_steps}, LR: {lr_scheduler.get_last_lr()[0]}, Loss: {avg_loss}") + if args.with_tracking: + accelerator.log( + { + "learning_rate": lr_scheduler.get_last_lr()[0], + "train_loss": avg_loss, + }, + step=completed_steps, + ) + total_loss = 0 + + if isinstance(checkpointing_steps, int): + if completed_steps % checkpointing_steps == 0: + output_dir = f"step_{completed_steps}" + if args.output_dir is not None: + output_dir = os.path.join(args.output_dir, output_dir) + save_with_accelerate(accelerator, model, tokenizer, output_dir, args) + + if completed_steps >= args.max_train_steps: + break + + if args.checkpointing_steps == "epoch": + output_dir = f"epoch_{epoch}" + if args.output_dir is not None: + output_dir = os.path.join(args.output_dir, output_dir) + save_with_accelerate(accelerator, model, tokenizer, output_dir, args) + + if args.with_tracking: + accelerator.end_training() + + if args.output_dir is not None: + accelerator.wait_for_everyone() + if accelerator.is_main_process: + tokenizer.save_pretrained(args.output_dir) + save_with_accelerate(accelerator, model, tokenizer, args.output_dir, args) + + +if __name__ == "__main__": + main() diff --git a/vdebugger/infer_critic.py b/vdebugger/infer_critic.py new file mode 100644 index 0000000..b284241 --- /dev/null +++ b/vdebugger/infer_critic.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python +# coding=utf-8 +import argparse +import ast +import os + +import pandas as pd +import sklearn.metrics +import torch +from dump_data import datasets +from transformers import AutoTokenizer +from vllm import LLM, SamplingParams + +from my_datasets import process_result + + +def parse(code): + try: + return ast.unparse(ast.parse(code)) + except: + return None + + +@torch.no_grad() +def main(args): + if os.path.exists(args.model): + output_fname = os.path.join(args.model, args.output_fname) + assert not os.path.exists(output_fname) + print("Dump to", output_fname) + + tokenizer = AutoTokenizer.from_pretrained('codellama/CodeLlama-7b-Python-hf') + model = LLM(args.model, tokenizer='codellama/CodeLlama-7b-Python-hf', + tensor_parallel_size=torch.cuda.device_count()) + + sampling_params = SamplingParams(temperature=0, max_tokens=256) + + dataset = datasets[args.dataset] + data = pd.read_csv(args.input) + input_ids = [] + labels = [] + for i, row in data.iterrows(): + q1, q2 = row['query'].splitlines() + code = row['code'] + if not code.startswith(q2): + code = q2 + code + code_ = parse(code) + if code_ is not None: + code = code_ + + custom_trace = row['traced'] + if pd.isna(custom_trace): + error = row['error'] + assert 'CompileTimeError' in error + custom_trace = 'Compile Error' + info = "\n\n-> {}\n\n--- Trace\n\n{}".format(row['result'], custom_trace) + + prompt = "# {}\n{}{}".format(q1, code, info) + + tokenized_prompt = [tokenizer.bos_token_id, ] + tokenizer(prompt, add_special_tokens=False).input_ids + tokenized_inst = tokenizer('\n\n# Program is', add_special_tokens=False).input_ids + tokenized_prompt = tokenized_prompt[: 1024 - 256 - len(tokenized_inst)] + input_ids.append(tokenized_prompt + tokenized_inst) + labels.append(int(dataset.accuracy([process_result(row['result']), ], [row['answer'], ]))) + + generation = model.generate(prompt_token_ids=input_ids, sampling_params=sampling_params) + generation = [g.outputs[0].text for g in generation] + preds = [x.strip().startswith("right") for x in generation] + + print("acc = {}".format(sklearn.metrics.accuracy_score(labels, preds))) + print("confusion matrix:") + print(sklearn.metrics.confusion_matrix(labels, preds)) + + data.insert(len(data.keys()), 'critic', '') + data['critic'] = generation + data.to_csv(output_fname) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument('model') + parser.add_argument('--input', required=True) + parser.add_argument('--output-fname', default='critic-infer.csv') + parser.add_argument('--dataset', default='gqa') + args = parser.parse_args() + + main(args) diff --git a/vdebugger/infer_refine.py b/vdebugger/infer_refine.py new file mode 100644 index 0000000..f8dea66 --- /dev/null +++ b/vdebugger/infer_refine.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python +# coding=utf-8 +import argparse +import ast +import os + +import numpy as np +import pandas as pd +import torch +from transformers import AutoTokenizer +from vllm import LLM, SamplingParams + + +def parse(code): + try: + return ast.unparse(ast.parse(code)) + except: + return None + + +@torch.no_grad() +def main(args): + if os.path.exists(args.model): + output_fname = os.path.join(args.refine, args.output_fname) + assert not os.path.exists(output_fname) + print("Dump to", output_fname) + + tokenizer = AutoTokenizer.from_pretrained('codellama/CodeLlama-7b-Python-hf') + model = LLM(args.refine, tokenizer='codellama/CodeLlama-7b-Python-hf', + tensor_parallel_size=torch.cuda.device_count()) + + sampling_params = SamplingParams(temperature=0, max_tokens=256) + + critic_outputs = pd.read_csv(args.critic) + inferred_inds = [] + input_ids = [] + for i in range(len(critic_outputs)): + if isinstance(critic_outputs['critic'][i], str) and \ + not critic_outputs['critic'][i].strip().startswith('right'): + q1, q2 = critic_outputs['query'][i].splitlines() + try: + code = 'def execute_command' + critic_outputs['critic'][i].strip().split('def execute_command')[1] + except: + print("Invalid critic generation at %d:" % i) + print(critic_outputs['critic'][i]) + continue + + if '# Program is' in code: + code = code.split("# Program is")[0].strip() # errr, an awkward fix + if args.info == 'trace': + info = '\n\n-> {}\n\n--- Trace\n\n{}'.format( + critic_outputs['result'][i], critic_outputs['traced'][i]) + else: + info = '' + prompt = "# {}\n{}{}".format(q1, code, info) + tokenized_prompt = [tokenizer.bos_token_id, ] + tokenizer(prompt, add_special_tokens=False).input_ids + tokenized_inst = tokenizer('\n\n# Correction', add_special_tokens=False).input_ids + tokenized_prompt = tokenized_prompt[: 1024 - 256 - len(tokenized_inst)] + inferred_inds.append(i) + input_ids.append(tokenized_prompt + tokenized_inst) + + if args.drop_unchanged: + critic_outputs['code'] = '[' + + generation = model.generate(prompt_token_ids=input_ids, sampling_params=sampling_params) + generation = [g.outputs[0].text.strip() for g in generation] + critic_outputs.loc[np.array(inferred_inds), "code"] = generation + critic_outputs.to_csv(output_fname) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument('critic') + parser.add_argument('refine') + parser.add_argument('--output-fname', default='critic-refine-infer.csv') + parser.add_argument('--drop-unchanged', default=False, action='store_true') + args = parser.parse_args() + + main(args) diff --git a/vdebugger/my_datasets/__init__.py b/vdebugger/my_datasets/__init__.py new file mode 100644 index 0000000..f440e36 --- /dev/null +++ b/vdebugger/my_datasets/__init__.py @@ -0,0 +1,35 @@ +from .gqa import GQADataset +from .vsr import VSRDataset +from .tallyqa import TallyQADataset +from .covr import COVRDataset +from .refcoco import RefCOCODataset +from .nlvr import NLVRDataset + + +def process_result(x): + class ImagePatch: + def __init__(self, left, right, upper, lower, *args, **kwargs): + self.left = left + self.right = right + self.upper = upper + self.lower = lower + + self.height = self.upper - self.lower + self.width = self.right - self.left + self.horizontal_center = (self.left + self.right) / 2 + self.vertical_center = (self.lower + self.upper) / 2 + + def __repr__(self): + return "ImagePatch(left={}, right={}, upper={}, lower={}, height={}, width={}, horizontal_center={}, vertical_center={})".format( + self.left, self.right, self.upper, self.lower, self.height, self.width, + self.horizontal_center, self.vertical_center, + ) + + # if x == 'None': # that doesn't really make sense + # return None + if isinstance(x, str) and x.startswith("ImagePatch"): + try: + return eval(x) + except: + print("Weird or invalid ImagePatch:", x) + return x diff --git a/vdebugger/my_datasets/covr.py b/vdebugger/my_datasets/covr.py new file mode 100644 index 0000000..58c2315 --- /dev/null +++ b/vdebugger/my_datasets/covr.py @@ -0,0 +1,136 @@ +import re + +from .utils import general_postprocessing + + +class COVRDataset: + def __init__(self): + self.input_type = 'image_list' + + # For evaluation + self.contractions = {"aint": "ain't", "arent": "aren't", "cant": "can't", "couldve": "could've", + "couldnt": "couldn't", "couldn'tve": "couldn't've", "couldnt've": "couldn't've", + "didnt": "didn't", "doesnt": "doesn't", "dont": "don't", "hadnt": "hadn't", + "hadnt've": "hadn't've", "hadn'tve": "hadn't've", "hasnt": "hasn't", "havent": "haven't", + "hed": "he'd", "hed've": "he'd've", "he'dve": "he'd've", "hes": "he's", "howd": "how'd", + "howll": "how'll", "hows": "how's", "Id've": "I'd've", "I'dve": "I'd've", "Im": "I'm", + "Ive": "I've", "isnt": "isn't", "itd": "it'd", "itd've": "it'd've", "it'dve": "it'd've", + "itll": "it'll", "let's": "let's", "maam": "ma'am", "mightnt": "mightn't", + "mightnt've": "mightn't've", "mightn'tve": "mightn't've", "mightve": "might've", + "mustnt": "mustn't", "mustve": "must've", "neednt": "needn't", "notve": "not've", + "oclock": "o'clock", "oughtnt": "oughtn't", "ow's'at": "'ow's'at", "'ows'at": "'ow's'at", + "'ow'sat": "'ow's'at", "shant": "shan't", "shed've": "she'd've", "she'dve": "she'd've", + "she's": "she's", "shouldve": "should've", "shouldnt": "shouldn't", + "shouldnt've": "shouldn't've", "shouldn'tve": "shouldn't've", "somebody'd": "somebodyd", + "somebodyd've": "somebody'd've", "somebody'dve": "somebody'd've", + "somebodyll": "somebody'll", "somebodys": "somebody's", "someoned": "someone'd", + "someoned've": "someone'd've", "someone'dve": "someone'd've", "someonell": "someone'll", + "someones": "someone's", "somethingd": "something'd", "somethingd've": "something'd've", + "something'dve": "something'd've", "somethingll": "something'll", "thats": "that's", + "thered": "there'd", "thered've": "there'd've", "there'dve": "there'd've", + "therere": "there're", "theres": "there's", "theyd": "they'd", "theyd've": "they'd've", + "they'dve": "they'd've", "theyll": "they'll", "theyre": "they're", "theyve": "they've", + "twas": "'twas", "wasnt": "wasn't", "wed've": "we'd've", "we'dve": "we'd've", + "weve": "we've", "werent": "weren't", "whatll": "what'll", "whatre": "what're", + "whats": "what's", "whatve": "what've", "whens": "when's", "whered": "where'd", + "wheres": "where's", "whereve": "where've", "whod": "who'd", "whod've": "who'd've", + "who'dve": "who'd've", "wholl": "who'll", "whos": "who's", "whove": "who've", + "whyll": "why'll", "whyre": "why're", "whys": "why's", "wont": "won't", + "wouldve": "would've", "wouldnt": "wouldn't", "wouldnt've": "wouldn't've", + "wouldn'tve": "wouldn't've", "yall": "y'all", "yall'll": "y'all'll", "y'allll": "y'all'll", + "yall'd've": "y'all'd've", "y'alld've": "y'all'd've", "y'all'dve": "y'all'd've", + "youd": "you'd", "youd've": "you'd've", "you'dve": "you'd've", "youll": "you'll", + "youre": "you're", "youve": "you've"} + self.manualMap = {'none': '0', + 'zero': '0', + 'one': '1', + 'two': '2', + 'three': '3', + 'four': '4', + 'five': '5', + 'six': '6', + 'seven': '7', + 'eight': '8', + 'nine': '9', + 'ten': '10' + } + self.articles = ['a', + 'an', + 'the' + ] + + self.periodStrip = re.compile("(?!<=\d)(\.)(?!\d)") + self.commaStrip = re.compile("(\d)(\,)(\d)") + self.punct = [';', r"/", '[', ']', '"', '{', '}', + '(', ')', '=', '+', '\\', '_', '-', + '>', '<', '@', '`', ',', '?', '!'] + + def accuracy(self, prediction, ground_truth, *args, **kwargs): + """ + Args: + prediction (list): List of predicted answers. + ground_truth (list): List of ground truth answers. + Returns: + score (float): Score of the prediction. + """ + if len(prediction) == 0: # if no prediction, return 0 + return 0 + assert len(prediction) == len(ground_truth) + score = 0 + for p, g in zip(prediction, ground_truth): + if self.post_process(p) == self.post_process_gt(g): + score += 1 + return score / len(prediction) + + def post_process_gt(self, x): + if x == 'False' or x == 'True': + x = eval(x) + if isinstance(x, bool): + if x: + return 'yes' + else: + return 'no' + return str(x) + + def post_process(self, prediction, stem=True): + """ + Code from https://github.com/GT-Vision-Lab/VQA/blob/master/PythonEvaluationTools/vqaEvaluation/vqaEval.py, + as indicated here https://okvqa.allenai.org/leaderboard.html + :return: + """ + if prediction is None: + return None + + prediction = general_postprocessing(prediction) + + prediction = prediction.replace('\n', ' ') + prediction = prediction.replace('\t', ' ') + prediction = prediction.strip() + prediction = self.processPunctuation(prediction) + prediction = self.processDigitArticle(prediction) + return prediction + + def processPunctuation(self, inText): + outText = inText + for p in self.punct: + if (p + ' ' in inText or ' ' + p in inText) or (re.search(self.commaStrip, inText) != None): + outText = outText.replace(p, '') + else: + outText = outText.replace(p, ' ') + outText = self.periodStrip.sub("", outText, re.UNICODE) + return outText + + def processDigitArticle(self, inText): + outText = [] + tempText = inText.lower().split() + for word in tempText: + word = self.manualMap.setdefault(word, word) + if word not in self.articles: + outText.append(word) + else: + pass + for wordId, word in enumerate(outText): + if word in self.contractions: + outText[wordId] = self.contractions[word] + outText = ' '.join(outText) + return outText diff --git a/vdebugger/my_datasets/gqa.py b/vdebugger/my_datasets/gqa.py new file mode 100644 index 0000000..f38d073 --- /dev/null +++ b/vdebugger/my_datasets/gqa.py @@ -0,0 +1,126 @@ +import re + +from .utils import general_postprocessing + + +class GQADataset: + def __init__(self): + self.input_type = 'image' + + # For evaluation + self.contractions = {"aint": "ain't", "arent": "aren't", "cant": "can't", "couldve": "could've", + "couldnt": "couldn't", "couldn'tve": "couldn't've", "couldnt've": "couldn't've", + "didnt": "didn't", "doesnt": "doesn't", "dont": "don't", "hadnt": "hadn't", + "hadnt've": "hadn't've", "hadn'tve": "hadn't've", "hasnt": "hasn't", "havent": "haven't", + "hed": "he'd", "hed've": "he'd've", "he'dve": "he'd've", "hes": "he's", "howd": "how'd", + "howll": "how'll", "hows": "how's", "Id've": "I'd've", "I'dve": "I'd've", "Im": "I'm", + "Ive": "I've", "isnt": "isn't", "itd": "it'd", "itd've": "it'd've", "it'dve": "it'd've", + "itll": "it'll", "let's": "let's", "maam": "ma'am", "mightnt": "mightn't", + "mightnt've": "mightn't've", "mightn'tve": "mightn't've", "mightve": "might've", + "mustnt": "mustn't", "mustve": "must've", "neednt": "needn't", "notve": "not've", + "oclock": "o'clock", "oughtnt": "oughtn't", "ow's'at": "'ow's'at", "'ows'at": "'ow's'at", + "'ow'sat": "'ow's'at", "shant": "shan't", "shed've": "she'd've", "she'dve": "she'd've", + "she's": "she's", "shouldve": "should've", "shouldnt": "shouldn't", + "shouldnt've": "shouldn't've", "shouldn'tve": "shouldn't've", "somebody'd": "somebodyd", + "somebodyd've": "somebody'd've", "somebody'dve": "somebody'd've", + "somebodyll": "somebody'll", "somebodys": "somebody's", "someoned": "someone'd", + "someoned've": "someone'd've", "someone'dve": "someone'd've", "someonell": "someone'll", + "someones": "someone's", "somethingd": "something'd", "somethingd've": "something'd've", + "something'dve": "something'd've", "somethingll": "something'll", "thats": "that's", + "thered": "there'd", "thered've": "there'd've", "there'dve": "there'd've", + "therere": "there're", "theres": "there's", "theyd": "they'd", "theyd've": "they'd've", + "they'dve": "they'd've", "theyll": "they'll", "theyre": "they're", "theyve": "they've", + "twas": "'twas", "wasnt": "wasn't", "wed've": "we'd've", "we'dve": "we'd've", + "weve": "we've", "werent": "weren't", "whatll": "what'll", "whatre": "what're", + "whats": "what's", "whatve": "what've", "whens": "when's", "whered": "where'd", + "wheres": "where's", "whereve": "where've", "whod": "who'd", "whod've": "who'd've", + "who'dve": "who'd've", "wholl": "who'll", "whos": "who's", "whove": "who've", + "whyll": "why'll", "whyre": "why're", "whys": "why's", "wont": "won't", + "wouldve": "would've", "wouldnt": "wouldn't", "wouldnt've": "wouldn't've", + "wouldn'tve": "wouldn't've", "yall": "y'all", "yall'll": "y'all'll", "y'allll": "y'all'll", + "yall'd've": "y'all'd've", "y'alld've": "y'all'd've", "y'all'dve": "y'all'd've", + "youd": "you'd", "youd've": "you'd've", "you'dve": "you'd've", "youll": "you'll", + "youre": "you're", "youve": "you've"} + self.manualMap = {'none': '0', + 'zero': '0', + 'one': '1', + 'two': '2', + 'three': '3', + 'four': '4', + 'five': '5', + 'six': '6', + 'seven': '7', + 'eight': '8', + 'nine': '9', + 'ten': '10' + } + self.articles = ['a', + 'an', + 'the' + ] + + self.periodStrip = re.compile("(?!<=\d)(\.)(?!\d)") + self.commaStrip = re.compile("(\d)(\,)(\d)") + self.punct = [';', r"/", '[', ']', '"', '{', '}', + '(', ')', '=', '+', '\\', '_', '-', + '>', '<', '@', '`', ',', '?', '!'] + + def processPunctuation(self, inText): + outText = inText + for p in self.punct: + if (p + ' ' in inText or ' ' + p in inText) or (re.search(self.commaStrip, inText) != None): + outText = outText.replace(p, '') + else: + outText = outText.replace(p, ' ') + outText = self.periodStrip.sub("", outText, re.UNICODE) + return outText + + def processDigitArticle(self, inText): + outText = [] + tempText = inText.lower().split() + for word in tempText: + word = self.manualMap.setdefault(word, word) + if word not in self.articles: + outText.append(word) + else: + pass + for wordId, word in enumerate(outText): + if word in self.contractions: + outText[wordId] = self.contractions[word] + outText = ' '.join(outText) + return outText + + def post_process(self, prediction, stem=True): + """ + Code from https://github.com/GT-Vision-Lab/VQA/blob/master/PythonEvaluationTools/vqaEvaluation/vqaEval.py, + as indicated here https://okvqa.allenai.org/leaderboard.html + :return: + """ + if prediction is None: + return None + + prediction = general_postprocessing(prediction) + + prediction = prediction.replace('\n', ' ') + prediction = prediction.replace('\t', ' ') + prediction = prediction.strip() + prediction = self.processPunctuation(prediction) + prediction = self.processDigitArticle(prediction) + return prediction + + def accuracy(self, prediction, ground_truth, *args, **kwargs): + """ + Args: + prediction (list): List of predicted answers. + ground_truth (list): List of ground truth answers. + Returns: + score (float): Score of the prediction. + """ + if len(prediction) == 0: # if no prediction, return 0 + return 0 + assert len(prediction) == len(ground_truth) + score = 0 + for p, g in zip(prediction, ground_truth): + if self.post_process(p) == g: + score += 1 + return score / len(prediction) diff --git a/vdebugger/my_datasets/nlvr.py b/vdebugger/my_datasets/nlvr.py new file mode 100644 index 0000000..9c3f0f8 --- /dev/null +++ b/vdebugger/my_datasets/nlvr.py @@ -0,0 +1,126 @@ +import re + +from .utils import general_postprocessing + + +class NLVRDataset: + def __init__(self): + self.input_type = 'image_dict' + + self.contractions = {"aint": "ain't", "arent": "aren't", "cant": "can't", "couldve": "could've", + "couldnt": "couldn't", "couldn'tve": "couldn't've", "couldnt've": "couldn't've", + "didnt": "didn't", "doesnt": "doesn't", "dont": "don't", "hadnt": "hadn't", + "hadnt've": "hadn't've", "hadn'tve": "hadn't've", "hasnt": "hasn't", "havent": "haven't", + "hed": "he'd", "hed've": "he'd've", "he'dve": "he'd've", "hes": "he's", "howd": "how'd", + "howll": "how'll", "hows": "how's", "Id've": "I'd've", "I'dve": "I'd've", "Im": "I'm", + "Ive": "I've", "isnt": "isn't", "itd": "it'd", "itd've": "it'd've", "it'dve": "it'd've", + "itll": "it'll", "let's": "let's", "maam": "ma'am", "mightnt": "mightn't", + "mightnt've": "mightn't've", "mightn'tve": "mightn't've", "mightve": "might've", + "mustnt": "mustn't", "mustve": "must've", "neednt": "needn't", "notve": "not've", + "oclock": "o'clock", "oughtnt": "oughtn't", "ow's'at": "'ow's'at", "'ows'at": "'ow's'at", + "'ow'sat": "'ow's'at", "shant": "shan't", "shed've": "she'd've", "she'dve": "she'd've", + "she's": "she's", "shouldve": "should've", "shouldnt": "shouldn't", + "shouldnt've": "shouldn't've", "shouldn'tve": "shouldn't've", "somebody'd": "somebodyd", + "somebodyd've": "somebody'd've", "somebody'dve": "somebody'd've", + "somebodyll": "somebody'll", "somebodys": "somebody's", "someoned": "someone'd", + "someoned've": "someone'd've", "someone'dve": "someone'd've", "someonell": "someone'll", + "someones": "someone's", "somethingd": "something'd", "somethingd've": "something'd've", + "something'dve": "something'd've", "somethingll": "something'll", "thats": "that's", + "thered": "there'd", "thered've": "there'd've", "there'dve": "there'd've", + "therere": "there're", "theres": "there's", "theyd": "they'd", "theyd've": "they'd've", + "they'dve": "they'd've", "theyll": "they'll", "theyre": "they're", "theyve": "they've", + "twas": "'twas", "wasnt": "wasn't", "wed've": "we'd've", "we'dve": "we'd've", + "weve": "we've", "werent": "weren't", "whatll": "what'll", "whatre": "what're", + "whats": "what's", "whatve": "what've", "whens": "when's", "whered": "where'd", + "wheres": "where's", "whereve": "where've", "whod": "who'd", "whod've": "who'd've", + "who'dve": "who'd've", "wholl": "who'll", "whos": "who's", "whove": "who've", + "whyll": "why'll", "whyre": "why're", "whys": "why's", "wont": "won't", + "wouldve": "would've", "wouldnt": "wouldn't", "wouldnt've": "wouldn't've", + "wouldn'tve": "wouldn't've", "yall": "y'all", "yall'll": "y'all'll", "y'allll": "y'all'll", + "yall'd've": "y'all'd've", "y'alld've": "y'all'd've", "y'all'dve": "y'all'd've", + "youd": "you'd", "youd've": "you'd've", "you'dve": "you'd've", "youll": "you'll", + "youre": "you're", "youve": "you've"} + self.manualMap = {'none': '0', + 'zero': '0', + 'one': '1', + 'two': '2', + 'three': '3', + 'four': '4', + 'five': '5', + 'six': '6', + 'seven': '7', + 'eight': '8', + 'nine': '9', + 'ten': '10' + } + self.articles = ['a', + 'an', + 'the' + ] + + self.periodStrip = re.compile("(?!<=\d)(\.)(?!\d)") + self.commaStrip = re.compile("(\d)(\,)(\d)") + self.punct = [';', r"/", '[', ']', '"', '{', '}', + '(', ')', '=', '+', '\\', '_', '-', + '>', '<', '@', '`', ',', '?', '!'] + + + def accuracy(self, prediction, ground_truth, *args, **kwargs): + """ + Args: + prediction (list): List of predicted answers. + ground_truth (list): List of ground truth answers. + Returns: + score (float): Score of the prediction. + """ + if len(prediction) == 0: # if no prediction, return 0 + return 0 + assert len(prediction) == len(ground_truth) + score = 0 + for p, g in zip(prediction, ground_truth): + if self.post_process(p) == g: + score += 1 + return score / len(prediction) + + def post_process(self, prediction, stem=True): + """ + Code from https://github.com/GT-Vision-Lab/VQA/blob/master/PythonEvaluationTools/vqaEvaluation/vqaEval.py, + as indicated here https://okvqa.allenai.org/leaderboard.html + :return: + """ + if prediction is None: + return None + + prediction = general_postprocessing(prediction) + + prediction = prediction.replace('\n', ' ') + prediction = prediction.replace('\t', ' ') + prediction = prediction.strip() + prediction = self.processPunctuation(prediction) + prediction = self.processDigitArticle(prediction) + return prediction + + def processPunctuation(self, inText): + outText = inText + for p in self.punct: + if (p + ' ' in inText or ' ' + p in inText) or (re.search(self.commaStrip, inText) != None): + outText = outText.replace(p, '') + else: + outText = outText.replace(p, ' ') + outText = self.periodStrip.sub("", outText, re.UNICODE) + return outText + + def processDigitArticle(self, inText): + outText = [] + tempText = inText.lower().split() + for word in tempText: + word = self.manualMap.setdefault(word, word) + if word not in self.articles: + outText.append(word) + else: + pass + for wordId, word in enumerate(outText): + if word in self.contractions: + outText[wordId] = self.contractions[word] + outText = ' '.join(outText) + return outText diff --git a/vdebugger/my_datasets/refcoco.py b/vdebugger/my_datasets/refcoco.py new file mode 100644 index 0000000..89824e9 --- /dev/null +++ b/vdebugger/my_datasets/refcoco.py @@ -0,0 +1,48 @@ +import torch +from torchvision.ops import box_iou + + +class RefCOCODataset: + def __init__(self): + self.input_type = 'image' + + @classmethod + def accuracy(cls, prediction, ground_truth, *args, return_iou=False): + """ + Compute IoU score + Args: + prediction (list): List of predicted answers. + ground_truth (list): List of ground truth answers. + Returns: + score (float): Score of the prediction. It is an IoU score + """ + assert len(prediction) == len(ground_truth) + num_samples = 0 + iou = 0 + acc = 0 + for p, g in zip(prediction, ground_truth): + num_samples += 1 + try: + if p is None: + continue # take iou as 0 + + if type(p) == list: + p = torch.tensor(p)[None] + elif type(p) == str: + p = torch.tensor([float(x) for x in p.split('(')[1].split(')')[0].split(',')])[None] + else: + p = torch.tensor([p.left, p.lower, p.right, p.upper])[None] + if type(g) == str: + g = [float(x) for x in g.split('[')[1].split(']')[0].split(',')] + g = torch.tensor([g[0], g[1], g[2], g[3]])[None] + iou_ = box_iou(p, g).item() # Expects (x1, y1, x2, y2) format. So (left, lower, right, upper) + iou += iou_ + if iou_ > 0.7: + acc += 1 + except Exception as e: + pass # If the prediction is not a box, we consider iou = 0 + + if return_iou: + return iou / max(num_samples, 1), acc / max(num_samples, 1) + else: + return acc / max(num_samples, 1) # just return acc diff --git a/vdebugger/my_datasets/tallyqa.py b/vdebugger/my_datasets/tallyqa.py new file mode 100644 index 0000000..b18af3d --- /dev/null +++ b/vdebugger/my_datasets/tallyqa.py @@ -0,0 +1,33 @@ +from word2number import w2n + + +class TallyQADataset: + def __init__(self): + self.input_type = 'image' + + def post_process(self, prediction): + prediction = str(prediction).strip() + try: + return int(prediction) + except: + try: + return w2n(prediction) + except: + return None + + def accuracy(self, prediction, ground_truth, *args, **kwargs): + """ + Args: + prediction (list): List of predicted answers. + ground_truth (list): List of ground truth answers. + Returns: + score (float): Score of the prediction. + """ + if len(prediction) == 0: # if no prediction, return 0 + return 0 + assert len(prediction) == len(ground_truth) + score = 0 + for p, g in zip(prediction, ground_truth): + if self.post_process(p) == g: + score += 1 + return score / len(prediction) diff --git a/vdebugger/my_datasets/utils.py b/vdebugger/my_datasets/utils.py new file mode 100644 index 0000000..1d361c6 --- /dev/null +++ b/vdebugger/my_datasets/utils.py @@ -0,0 +1,62 @@ +""" +Data loaders +Adapted in part from https://github.com/phiyodr/vqaloader/blob/master/vqaloader/loaders.py +""" + +import torch + + +def general_postprocessing(prediction): + try: + if type(prediction).__name__ == 'ImagePatch': + prediction = prediction.classify_object() + + if isinstance(prediction, list): + prediction = prediction[0] if len(prediction) > 0 else "no" + + if isinstance(prediction, torch.Tensor): + prediction = prediction.item() + # if prediction is None: + # prediction = "no" + if isinstance(prediction, bool): + if prediction: + prediction = "yes" + else: + prediction = "no" + elif isinstance(prediction, int): + prediction = str(prediction) + # print("No answer is a number, so this will be wrong") + except: + prediction = str(prediction) + + prediction = str(prediction) + + prediction = prediction.replace('\n', ' ') + prediction = prediction.replace('\t', ' ') + prediction = prediction.strip() + prediction = prediction.lower() + + if prediction == 'true': + prediction = 'yes' + elif prediction == 'false': + prediction = 'no' + return prediction + + +def accuracy(prediction, ground_truth, *args): + """ + Args: + prediction (list): List of predicted answers. + ground_truth (list): List of ground truth answers. + Returns: + score (float): Score of the prediction. + """ + if len(prediction) == 0: # if no prediction, return 0 + return 0 + assert len(prediction) == len(ground_truth) + pred_gt_filtered = [(pred, gt) for pred, gt in zip(prediction, ground_truth) if gt != ''] + score = 0 + for p, g in pred_gt_filtered: + if general_postprocessing(p) == g: + score += 1 + return score / len(pred_gt_filtered) diff --git a/vdebugger/my_datasets/vsr.py b/vdebugger/my_datasets/vsr.py new file mode 100644 index 0000000..b38b145 --- /dev/null +++ b/vdebugger/my_datasets/vsr.py @@ -0,0 +1,23 @@ +class VSRDataset: + def __init__(self): + self.input_type = 'image' + + def post_process(self, prediction): + return {'yes': True, 'no': False}.get(prediction, None) + + def accuracy(self, prediction, ground_truth, *args, **kwargs): + """ + Args: + prediction (list): List of predicted answers. + ground_truth (list): List of ground truth answers. + Returns: + score (float): Score of the prediction. + """ + if len(prediction) == 0: # if no prediction, return 0 + return 0 + assert len(prediction) == len(ground_truth) + score = 0 + for p, g in zip(prediction, ground_truth): + if self.post_process(p) == g: + score += 1 + return score / len(prediction) diff --git a/vdebugger/training_scripts/train_critic.sh b/vdebugger/training_scripts/train_critic.sh new file mode 100644 index 0000000..40859f3 --- /dev/null +++ b/vdebugger/training_scripts/train_critic.sh @@ -0,0 +1,38 @@ +MODEL_SIZE=7b +NUM_GPUS=4 +BATCH_SIZE_PER_GPU=1 +TOTAL_BATCH_SIZE=128 +GRADIENT_ACC_STEPS=$(($TOTAL_BATCH_SIZE/$NUM_GPUS/$BATCH_SIZE_PER_GPU)) +if [ $NUM_GPUS != "$( python -c 'import torch; print(torch.cuda.device_count())' )" ]; then + echo "Hasn't set gpu right" + exit +fi +echo "Training llama model ${MODEL_SIZE} using $NUM_GPUS GPUs, $BATCH_SIZE_PER_GPU batch size per GPU, $GRADIENT_ACC_STEPS gradient accumulation steps" + +accelerate launch \ + --main_process_port 23466 \ + --mixed_precision bf16 \ + --num_machines 1 \ + --num_processes $NUM_GPUS \ + --use_deepspeed \ + --deepspeed_config_file ds_configs/stage3_offloading_accelerate.conf \ + open_instruct/finetune.py \ + --model_name_or_path codellama/CodeLlama-${MODEL_SIZE}-Python-hf \ + --use_flash_attn \ + --gradient_checkpointing \ + --tokenizer_name codellama/CodeLlama-${MODEL_SIZE}-Python-hf \ + --use_slow_tokenizer \ + --train_file CRITIQUE_DATA/train-mix.json \ + --max_seq_length 1024 \ + --preprocessing_num_workers 16 \ + --per_device_train_batch_size $BATCH_SIZE_PER_GPU \ + --gradient_accumulation_steps $GRADIENT_ACC_STEPS \ + --learning_rate 2e-5 \ + --lr_scheduler_type linear \ + --warmup_ratio 0.03 \ + --weight_decay 0. \ + --num_train_epochs 3 \ + --output_dir outputs/critique_${MODEL_SIZE}/ \ + --with_tracking \ + --report_to tensorboard \ + --logging_steps 1 \ No newline at end of file diff --git a/vdebugger/training_scripts/train_refine.sh b/vdebugger/training_scripts/train_refine.sh new file mode 100644 index 0000000..dcf8f06 --- /dev/null +++ b/vdebugger/training_scripts/train_refine.sh @@ -0,0 +1,38 @@ +MODEL_SIZE=7b +NUM_GPUS=4 +BATCH_SIZE_PER_GPU=2 +TOTAL_BATCH_SIZE=128 +GRADIENT_ACC_STEPS=$(($TOTAL_BATCH_SIZE/$NUM_GPUS/$BATCH_SIZE_PER_GPU)) +if [ $NUM_GPUS != "$( python -c 'import torch; print(torch.cuda.device_count())' )" ]; then + echo "Hasn't set gpu right" + exit +fi +echo "Training llama model ${MODEL_SIZE} using $NUM_GPUS GPUs, $BATCH_SIZE_PER_GPU batch size per GPU, $GRADIENT_ACC_STEPS gradient accumulation steps" + +accelerate launch \ + --main_process_port 23464 \ + --mixed_precision bf16 \ + --num_machines 1 \ + --num_processes $NUM_GPUS \ + --use_deepspeed \ + --deepspeed_config_file ds_configs/stage3_no_offloading_accelerate.conf \ + open_instruct/finetune.py \ + --model_name_or_path codellama/CodeLlama-${MODEL_SIZE}-Python-hf \ + --use_flash_attn \ + --gradient_checkpointing \ + --tokenizer_name codellama/CodeLlama-${MODEL_SIZE}-Python-hf \ + --use_slow_tokenizer \ + --train_file REFINE_DATA/train.json \ + --max_seq_length 1024 \ + --preprocessing_num_workers 16 \ + --per_device_train_batch_size $BATCH_SIZE_PER_GPU \ + --gradient_accumulation_steps $GRADIENT_ACC_STEPS \ + --learning_rate 2e-5 \ + --lr_scheduler_type linear \ + --warmup_ratio 0.03 \ + --weight_decay 0. \ + --num_train_epochs 3 \ + --output_dir outputs/refine_${MODEL_SIZE}/ \ + --with_tracking \ + --report_to tensorboard \ + --logging_steps 1 \ No newline at end of file diff --git a/viper/GLIP b/viper/GLIP new file mode 160000 index 0000000..d405181 --- /dev/null +++ b/viper/GLIP @@ -0,0 +1 @@ +Subproject commit d40518186100d86eff8718ecd09755c7a0418186 diff --git a/viper/LICENSE b/viper/LICENSE new file mode 100644 index 0000000..5685918 --- /dev/null +++ b/viper/LICENSE @@ -0,0 +1,399 @@ +Attribution-NonCommercial 4.0 International + +======================================================================= + +Creative Commons Corporation ("Creative Commons") is not a law firm and +does not provide legal services or legal advice. Distribution of +Creative Commons public licenses does not create a lawyer-client or +other relationship. Creative Commons makes its licenses and related +information available on an "as-is" basis. Creative Commons gives no +warranties regarding its licenses, any material licensed under their +terms and conditions, or any related information. Creative Commons +disclaims all liability for damages resulting from their use to the +fullest extent possible. + +Using Creative Commons Public Licenses + +Creative Commons public licenses provide a standard set of terms and +conditions that creators and other rights holders may use to share +original works of authorship and other material subject to copyright +and certain other rights specified in the public license below. The +following considerations are for informational purposes only, are not +exhaustive, and do not form part of our licenses. + + Considerations for licensors: Our public licenses are + intended for use by those authorized to give the public + permission to use material in ways otherwise restricted by + copyright and certain other rights. Our licenses are + irrevocable. Licensors should read and understand the terms + and conditions of the license they choose before applying it. + Licensors should also secure all rights necessary before + applying our licenses so that the public can reuse the + material as expected. Licensors should clearly mark any + material not subject to the license. This includes other CC- + licensed material, or material used under an exception or + limitation to copyright. More considerations for licensors: + wiki.creativecommons.org/Considerations_for_licensors + + Considerations for the public: By using one of our public + licenses, a licensor grants the public permission to use the + licensed material under specified terms and conditions. If + the licensor's permission is not necessary for any reason--for + example, because of any applicable exception or limitation to + copyright--then that use is not regulated by the license. Our + licenses grant only permissions under copyright and certain + other rights that a licensor has authority to grant. Use of + the licensed material may still be restricted for other + reasons, including because others have copyright or other + rights in the material. A licensor may make special requests, + such as asking that all changes be marked or described. + Although not required by our licenses, you are encouraged to + respect those requests where reasonable. More_considerations + for the public: + wiki.creativecommons.org/Considerations_for_licensees + +======================================================================= + +Creative Commons Attribution-NonCommercial 4.0 International Public +License + +By exercising the Licensed Rights (defined below), You accept and agree +to be bound by the terms and conditions of this Creative Commons +Attribution-NonCommercial 4.0 International Public License ("Public +License"). To the extent this Public License may be interpreted as a +contract, You are granted the Licensed Rights in consideration of Your +acceptance of these terms and conditions, and the Licensor grants You +such rights in consideration of benefits the Licensor receives from +making the Licensed Material available under these terms and +conditions. + +Section 1 -- Definitions. + + a. Adapted Material means material subject to Copyright and Similar + Rights that is derived from or based upon the Licensed Material + and in which the Licensed Material is translated, altered, + arranged, transformed, or otherwise modified in a manner requiring + permission under the Copyright and Similar Rights held by the + Licensor. For purposes of this Public License, where the Licensed + Material is a musical work, performance, or sound recording, + Adapted Material is always produced where the Licensed Material is + synched in timed relation with a moving image. + + b. Adapter's License means the license You apply to Your Copyright + and Similar Rights in Your contributions to Adapted Material in + accordance with the terms and conditions of this Public License. + + c. Copyright and Similar Rights means copyright and/or similar rights + closely related to copyright including, without limitation, + performance, broadcast, sound recording, and Sui Generis Database + Rights, without regard to how the rights are labeled or + categorized. For purposes of this Public License, the rights + specified in Section 2(b)(1)-(2) are not Copyright and Similar + Rights. + d. Effective Technological Measures means those measures that, in the + absence of proper authority, may not be circumvented under laws + fulfilling obligations under Article 11 of the WIPO Copyright + Treaty adopted on December 20, 1996, and/or similar international + agreements. + + e. Exceptions and Limitations means fair use, fair dealing, and/or + any other exception or limitation to Copyright and Similar Rights + that applies to Your use of the Licensed Material. + + f. Licensed Material means the artistic or literary work, database, + or other material to which the Licensor applied this Public + License. + + g. Licensed Rights means the rights granted to You subject to the + terms and conditions of this Public License, which are limited to + all Copyright and Similar Rights that apply to Your use of the + Licensed Material and that the Licensor has authority to license. + + h. Licensor means the individual(s) or entity(ies) granting rights + under this Public License. + + i. NonCommercial means not primarily intended for or directed towards + commercial advantage or monetary compensation. For purposes of + this Public License, the exchange of the Licensed Material for + other material subject to Copyright and Similar Rights by digital + file-sharing or similar means is NonCommercial provided there is + no payment of monetary compensation in connection with the + exchange. + + j. Share means to provide material to the public by any means or + process that requires permission under the Licensed Rights, such + as reproduction, public display, public performance, distribution, + dissemination, communication, or importation, and to make material + available to the public including in ways that members of the + public may access the material from a place and at a time + individually chosen by them. + + k. Sui Generis Database Rights means rights other than copyright + resulting from Directive 96/9/EC of the European Parliament and of + the Council of 11 March 1996 on the legal protection of databases, + as amended and/or succeeded, as well as other essentially + equivalent rights anywhere in the world. + + l. You means the individual or entity exercising the Licensed Rights + under this Public License. Your has a corresponding meaning. + +Section 2 -- Scope. + + a. License grant. + + 1. Subject to the terms and conditions of this Public License, + the Licensor hereby grants You a worldwide, royalty-free, + non-sublicensable, non-exclusive, irrevocable license to + exercise the Licensed Rights in the Licensed Material to: + + a. reproduce and Share the Licensed Material, in whole or + in part, for NonCommercial purposes only; and + + b. produce, reproduce, and Share Adapted Material for + NonCommercial purposes only. + + 2. Exceptions and Limitations. For the avoidance of doubt, where + Exceptions and Limitations apply to Your use, this Public + License does not apply, and You do not need to comply with + its terms and conditions. + + 3. Term. The term of this Public License is specified in Section + 6(a). + + 4. Media and formats; technical modifications allowed. The + Licensor authorizes You to exercise the Licensed Rights in + all media and formats whether now known or hereafter created, + and to make technical modifications necessary to do so. The + Licensor waives and/or agrees not to assert any right or + authority to forbid You from making technical modifications + necessary to exercise the Licensed Rights, including + technical modifications necessary to circumvent Effective + Technological Measures. For purposes of this Public License, + simply making modifications authorized by this Section 2(a) + (4) never produces Adapted Material. + + 5. Downstream recipients. + + a. Offer from the Licensor -- Licensed Material. Every + recipient of the Licensed Material automatically + receives an offer from the Licensor to exercise the + Licensed Rights under the terms and conditions of this + Public License. + + b. No downstream restrictions. You may not offer or impose + any additional or different terms or conditions on, or + apply any Effective Technological Measures to, the + Licensed Material if doing so restricts exercise of the + Licensed Rights by any recipient of the Licensed + Material. + + 6. No endorsement. Nothing in this Public License constitutes or + may be construed as permission to assert or imply that You + are, or that Your use of the Licensed Material is, connected + with, or sponsored, endorsed, or granted official status by, + the Licensor or others designated to receive attribution as + provided in Section 3(a)(1)(A)(i). + + b. Other rights. + + 1. Moral rights, such as the right of integrity, are not + licensed under this Public License, nor are publicity, + privacy, and/or other similar personality rights; however, to + the extent possible, the Licensor waives and/or agrees not to + assert any such rights held by the Licensor to the limited + extent necessary to allow You to exercise the Licensed + Rights, but not otherwise. + + 2. Patent and trademark rights are not licensed under this + Public License. + + 3. To the extent possible, the Licensor waives any right to + collect royalties from You for the exercise of the Licensed + Rights, whether directly or through a collecting society + under any voluntary or waivable statutory or compulsory + licensing scheme. In all other cases the Licensor expressly + reserves any right to collect such royalties, including when + the Licensed Material is used other than for NonCommercial + purposes. + +Section 3 -- License Conditions. + +Your exercise of the Licensed Rights is expressly made subject to the +following conditions. + + a. Attribution. + + 1. If You Share the Licensed Material (including in modified + form), You must: + + a. retain the following if it is supplied by the Licensor + with the Licensed Material: + + i. identification of the creator(s) of the Licensed + Material and any others designated to receive + attribution, in any reasonable manner requested by + the Licensor (including by pseudonym if + designated); + + ii. a copyright notice; + + iii. a notice that refers to this Public License; + + iv. a notice that refers to the disclaimer of + warranties; + + v. a URI or hyperlink to the Licensed Material to the + extent reasonably practicable; + + b. indicate if You modified the Licensed Material and + retain an indication of any previous modifications; and + + c. indicate the Licensed Material is licensed under this + Public License, and include the text of, or the URI or + hyperlink to, this Public License. + + 2. You may satisfy the conditions in Section 3(a)(1) in any + reasonable manner based on the medium, means, and context in + which You Share the Licensed Material. For example, it may be + reasonable to satisfy the conditions by providing a URI or + hyperlink to a resource that includes the required + information. + + 3. If requested by the Licensor, You must remove any of the + information required by Section 3(a)(1)(A) to the extent + reasonably practicable. + + 4. If You Share Adapted Material You produce, the Adapter's + License You apply must not prevent recipients of the Adapted + Material from complying with this Public License. + +Section 4 -- Sui Generis Database Rights. + +Where the Licensed Rights include Sui Generis Database Rights that +apply to Your use of the Licensed Material: + + a. for the avoidance of doubt, Section 2(a)(1) grants You the right + to extract, reuse, reproduce, and Share all or a substantial + portion of the contents of the database for NonCommercial purposes + only; + + b. if You include all or a substantial portion of the database + contents in a database in which You have Sui Generis Database + Rights, then the database in which You have Sui Generis Database + Rights (but not its individual contents) is Adapted Material; and + + c. You must comply with the conditions in Section 3(a) if You Share + all or a substantial portion of the contents of the database. + +For the avoidance of doubt, this Section 4 supplements and does not +replace Your obligations under this Public License where the Licensed +Rights include other Copyright and Similar Rights. + +Section 5 -- Disclaimer of Warranties and Limitation of Liability. + + a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE + EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS + AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF + ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS, + IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION, + WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR + PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS, + ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT + KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT + ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU. + + b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE + TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION, + NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT, + INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES, + COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR + USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN + ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR + DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR + IN PART, THIS LIMITATION MAY NOT APPLY TO YOU. + + c. The disclaimer of warranties and limitation of liability provided + above shall be interpreted in a manner that, to the extent + possible, most closely approximates an absolute disclaimer and + waiver of all liability. + +Section 6 -- Term and Termination. + + a. This Public License applies for the term of the Copyright and + Similar Rights licensed here. However, if You fail to comply with + this Public License, then Your rights under this Public License + terminate automatically. + + b. Where Your right to use the Licensed Material has terminated under + Section 6(a), it reinstates: + + 1. automatically as of the date the violation is cured, provided + it is cured within 30 days of Your discovery of the + violation; or + + 2. upon express reinstatement by the Licensor. + + For the avoidance of doubt, this Section 6(b) does not affect any + right the Licensor may have to seek remedies for Your violations + of this Public License. + + c. For the avoidance of doubt, the Licensor may also offer the + Licensed Material under separate terms or conditions or stop + distributing the Licensed Material at any time; however, doing so + will not terminate this Public License. + + d. Sections 1, 5, 6, 7, and 8 survive termination of this Public + License. + +Section 7 -- Other Terms and Conditions. + + a. The Licensor shall not be bound by any additional or different + terms or conditions communicated by You unless expressly agreed. + + b. Any arrangements, understandings, or agreements regarding the + Licensed Material not stated herein are separate from and + independent of the terms and conditions of this Public License. + +Section 8 -- Interpretation. + + a. For the avoidance of doubt, this Public License does not, and + shall not be interpreted to, reduce, limit, restrict, or impose + conditions on any use of the Licensed Material that could lawfully + be made without permission under this Public License. + + b. To the extent possible, if any provision of this Public License is + deemed unenforceable, it shall be automatically reformed to the + minimum extent necessary to make it enforceable. If the provision + cannot be reformed, it shall be severed from this Public License + without affecting the enforceability of the remaining terms and + conditions. + + c. No term or condition of this Public License will be waived and no + failure to comply consented to unless expressly agreed to by the + Licensor. + + d. Nothing in this Public License constitutes or may be interpreted + as a limitation upon, or waiver of, any privileges and immunities + that apply to the Licensor or You, including from the legal + processes of any jurisdiction or authority. + +======================================================================= + +Creative Commons is not a party to its public +licenses. Notwithstanding, Creative Commons may elect to apply one of +its public licenses to material it publishes and in those instances +will be considered the “Licensor.” The text of the Creative Commons +public licenses is dedicated to the public domain under the CC0 Public +Domain Dedication. Except for the limited purpose of indicating that +material is shared under a Creative Commons public license or as +otherwise permitted by the Creative Commons policies published at +creativecommons.org/policies, Creative Commons does not authorize the +use of the trademark "Creative Commons" or any other trademark or logo +of Creative Commons without its prior written consent including, +without limitation, in connection with any unauthorized modifications +to any of its public licenses or any other arrangements, +understandings, or agreements concerning use of licensed material. For +the avoidance of doubt, this paragraph does not form part of the +public licenses. + +Creative Commons may be contacted at creativecommons.org. \ No newline at end of file diff --git a/viper/README.md b/viper/README.md new file mode 100644 index 0000000..2753ab7 --- /dev/null +++ b/viper/README.md @@ -0,0 +1,168 @@ +# ViperGPT: Visual Inference via Python Execution for Reasoning + +This is the code for the paper [ViperGPT: Visual Inference via Python Execution for Reasoning](https://viper.cs.columbia.edu) by [Dídac Surís](https://www.didacsuris.com/)\*, [Sachit Menon](https://sachit-menon.github.io/)\* and [Carl Vondrick](https://www.cs.columbia.edu/~vondrick/). + +## Quickstart +Clone recursively: +```bash +git clone --recurse-submodules https://github.com/cvlab-columbia/viper.git +``` + +After cloning: +```bash +cd viper +export PATH=/usr/local/cuda/bin:$PATH +bash setup.sh # This may take a while. Make sure the vipergpt environment is active +cd GLIP +python setup.py clean --all build develop --user +cd .. +echo YOUR_OPENAI_API_KEY_HERE > api.key +``` +Then you can start exploring with the `main_simple.ipynb` notebook. For running on datasets instead of individual +examples, use `main_batch.py` as discussed later on. + +> :warning: WARNING: ViperGPT runs code generated by a large language model. We do not have direct control over this +> code, so it can be dangerous to run it, especially if modifications to the API are made (the current prompts do not +> have any dangerous functions like interaction with the filesystem, so it is unlikely that any malicious code can be +> generated). We cannot guarantee that the code is safe, so use at your own risk, or run in a sandboxed environment. +> For this reason, the default `execute_code` parameter in the config is `False`. Set it to `True` if you would like the +> generated code to be executed automatically in `main_batch.py`, otherwise you can execute it yourself (as in +> `main_simple.ipynb`). + + +> :information_source: NOTE: OpenAI discontinued support for the Codex API on March 23rd, 2023. This repository implements +> GPT-3.5 Turbo and GPT-4 as alternatives, but we have not tested them extensively; as they are chat models and not completion, their behavior likely differs. + +## Detailed Installation +The easiest way to get started exploring ViperGPT is through `main_simple.ipynb`. To run it, you will need to do the following: +1. Clone this repository with its submodules. +2. Install the dependencies. See the see [Dependencies](#Dependencies). +3. Download two pretrained models (the rest are downloaded automatically). See [Pretrained models](#Pretrained-models). +4. Set up the OpenAI key. See [OpenAI key](#OpenAI-key). + +### Cloning this Repo + +```bash +git clone --recurse-submodules https://github.com/cvlab-columbia/viper.git +``` + +### Dependencies + +First, create a conda environment using `setup_env.sh` and then install our modified version of GLIP. +To do so, just `cd` into the `viper` directory, and run: + +```bash +export PATH=/usr/local/cuda/bin:$PATH +bash setup_env.sh +conda activate vipergpt +cd GLIP +python setup.py clean --all build develop --user +``` + +Please make sure to install GLIP as described (i.e., from our provided repo) as we have updated the CUDA kernels to be +compatible with newer versions of PyTorch, which are required for other models. + +### Pretrained models + +Note that ViperGPT may inherit biases from the pretrained models it uses. These biases may be reflected in the outputs +generated by our model. It is recommended to consider this potential bias when using ViperGPT and interpreting its +outputs. + +This repository implements more models than the ones described in the paper, which can be useful for further research. +Most of the implemented modules automatically download the pretrained models. However, there are four models that +need to be downloaded manually, if they are to be used. They have to be stored in the same directory +`/path/to/pretrained_models`, by default `./pretrained_models/`, which has to be specified in the configuration (see [Configuration](#Configuration)). + +We provide the convenience script `download_models.sh` to perform this download for you; you can set the variable $PRETRAINED_MODEL_PATH match your config's `/path/to/pretrained_models/`. + +#### Pretrained model system requirements + +Many of the models used are very large, and require quite a bit of GPU memory. In particular, GLIP and BLIP2 are especially large. Please use smaller variants of those models if running on hardware that cannot support the larger ones; however, this comes at the expense of performance. + +### OpenAI key + +To run the OpenAI models, you will need to configure an OpenAI key. This can be done by signing up for an account [e.g. here](https://platform.openai.com/), and then creating a key in [account/api-keys](https://platform.openai.com/account/api-keys). +**Create a file `api.key` and store the key in it.** + +## Running the code + +Once the previous steps are done, you can run the Jupyter Notebook `main_simple.ipynb`. This notebook contains +the code to try ViperGPT on your own images. The notebook is well documented, and it describes how to use the code. + +## Dataset + +You can run ViperGPT on a pre-defined set of query-image/video pairs as well. In order to do that, you will have to +create a `queries.csv` file, which contains the queries and the filenames for the corresponding images/videos. The format of the file is +`query,answer,image_name/video_name`. The answer is optional, and only needed for evaluation. See `data` for an example. + +Your dataset directory will contain the `queries.csv` file as well as the images/videos in the `images`/`videos` +directory. Add the path to the dataset directory in the configuration (see [Configuration](#Configuration)). + +## Configuration + +All the configuration parameters are defined in `configs/base_config.yaml`. In order to run the code, +modify the paths in the parameters `path_pretrained_models` and optionally `dataset.data_path` to point to the correct +directories. + +For every new configuration you need to run, create a new yaml file in the `configs` directory (like `my_config.yaml`), +and modify the parameters you need to change. The parameters in the new file will overwrite +the ones in `base_config.yaml`. Any number of configuration files can be specified, they will be merged in the order +they are specified in the command line. + +The `multiprocessing` parameter refers to *both* the batch (every sample is run by a different worker) and the models +(every model runs in its own process). + +## Running the code on a dataset, without the Jupyter notebook + +The code can be run using the following command: + +```bash +CONFIG_NAMES=your_config_name python main_batch_generate.py +``` + +`CONFIG_NAMES` is an environment variable that specifies the configuration files to use. + +If you want to run the code using multiprocessing, set `multiprocessing: True` in the config file. + +It is especially important to consider the risks of executing arbitrary code when running in a batch; in particular, if you modify the API or any inputs to Codex, be mindful to not include potentially damaging abilities such as file modification/deletion. + +## Code structure + +The code is prepared to run in a multiprocessing manner, from two points of view. First, it runs the models in parallel, +meaning that each pretrained model runs in its own process. Second, it runs the samples in parallel, meaning that +several workers are created to run the samples for a given batch. There is a producer-consumer queuing mechanism where +the processes controlling the models are the consumers of inputs coming from the workers that run each sample +(producer). Our implementation allows for batching of samples, which means that several workers can send their inputs to +the same model process, which will run them as a batch, and return the output to each worker separately. + +The code has comments and docstrings, but here is a brief overview of the code structure: +- `vision_models.py`: Contains the code for the pretrained models. Each one of them is a subclass of `BaseModel`. +Implementing a new model is easy. Just create a new class that inherits from `BaseModel` and implement the `forward` +method, as well as the `name` method. The latter will be used to call the model. +- `vision_processes.py`: Acts as a bridge between the models and the rest of the code. It contains the code for to start +all the required processes, whether multiprocessing or not. It automatically detects all the new models implemented in +`vision_models.py`. It defines a `forward` method that takes a name as input (as well as arguments), and calls the +appropriate model. +- `main_batch.py` and `main_simple.ipynb`: These are the main files to run the code. The former runs the whole dataset and +is suited for parallel processing of samples, while the latter runs a single image/video and is suited for debugging. +- `image_patch.py` and `video_segment.py`: These are the classes that represent the image patches and video segments. +They contain all the methods that call the `forward` method of `vision_processes.py` and therefore call the models. +- `configs`: Directory containing the configuration files. The configuration files are in YAML format, and read using +OmegaConf. +- `datasets`: Directory containing the code for the datasets. The datasets are subclasses of `torch.utils.data.Dataset`. +- `prompts`: Directory containing the prompts for Codex and GPT-3. The Codex ones define the API specifications. +- `utils.py`, `useful_lists` and `base_models`: Auxiliary files containing useful functions, lists and pretrained model +implementations. + +## Citation + +If you use this code, please consider citing the paper as: + +``` +@article{surismenon2023vipergpt, + title={ViperGPT: Visual Inference via Python Execution for Reasoning}, + author={D\'idac Sur\'is and Sachit Menon and Carl Vondrick}, + journal={arXiv preprint arXiv:2303.08128}, + year={2023} +} +``` \ No newline at end of file diff --git a/viper/api.key b/viper/api.key new file mode 100644 index 0000000..e69de29 diff --git a/viper/configs/base_config.yaml b/viper/configs/base_config.yaml new file mode 100644 index 0000000..b32b179 --- /dev/null +++ b/viper/configs/base_config.yaml @@ -0,0 +1,72 @@ +multiprocessing: False # Run the models and samples in parallel +path_pretrained_models: './pretrained_models' # Path to the pretrained models +execute_code: False # Execute the code after generating it. Only applies to main_batch + +dataset: # Dataset configuration + dataset_name: 'MyDataset' # Dataset name + data_path: 'data' # Dataset path + split: '' # Dataset split. If '', it assumes there is only one split + max_samples: # Maximum number of samples to load + batch_size: 20 # Batch size + start_sample: 0 # Start sample index. Only used if max_samples is not None + +load_models: # Which pretrained models to load + maskrcnn: False + clip: False + glip: True + owlvit: False + tcl: False + gpt3_qa: True + gpt3_general: True + depth: True + blip: True + saliency: False + xvlm: True + codex: True + codellama: False + +detect_thresholds: # Thresholds for the models that perform detection + glip: 0.5 + maskrcnn: 0.8 + owlvit: 0.1 +ratio_box_area_to_image_area: 0.0 # Any detected patch under this size will not be returned +crop_larger_margin: True # Increase size of crop by 10% to include more context + +verify_property: # Parameters for verify_property + model: xvlm # Model to use for verify_property + thresh_clip: 0.6 + thresh_tcl: 0.25 + thresh_xvlm: 0.6 + +best_match_model: xvlm # Which model to use for best_[image, text]_match + +gpt3: # GPT-3 configuration + n_votes: 1 # Number of tries to use for GPT-3. Use with temperature > 0 + qa_prompt: ./prompts/gpt3/gpt3_qa.txt + guess_prompt: ./prompts/gpt3/gpt3_process_guess.txt + temperature: 0. # Temperature for GPT-3. Almost deterministic if 0 + model: text-davinci-003 # See openai.Model.list() for available models + +codex: + temperature: 0. # Temperature for Codex. (Almost) deterministic if 0 + best_of: 1 # Number of tries to choose from. Use when temperature > 0 + max_tokens: 512 # Maximum number of tokens to generate for Codex + prompt: ./prompts/chatapi.prompt # Codex prompt file, which defines the API. (doesn't support video for now due to token limits) + model: gpt-3.5-turbo # Codex model to use. [code-davinci-002, gpt-3.5-turbo, gpt-4]. See openai.Model.list() for available models + +# Saving and loading parameters +save: True # Save the results to a file +save_new_results: True # If False, overwrite the results file +results_dir: ./results/ # Directory to save the results +use_cache: True # Use cache for the models that support it (now, GPT-3) +clear_cache: False # Clear stored cache +use_cached_codex: False # Use previously-computed Codex results +cached_codex_path: '' # Path to the csv results file from which to load Codex results +log_every: 100 # Log accuracy every n batches +wandb: False # Use Weights and Biases + +blip_half_precision: True # Use 8bit (Faster but slightly less accurate) for BLIP if True +blip_v2_model_type: blip2-flan-t5-xxl # Which model to use for BLIP-2 + +use_fixed_code: False # Use a fixed code for all samples (do not generate with Codex) +fixed_code_file: ./prompts/fixed_code/blip2.prompt # Path to the fixed code file diff --git a/viper/configs/execute/covr.yaml b/viper/configs/execute/covr.yaml new file mode 100644 index 0000000..9e31649 --- /dev/null +++ b/viper/configs/execute/covr.yaml @@ -0,0 +1,31 @@ +execute_code: True + +dataset: + data_path: YOUR_DATA_DIR/covr + dataset_name: COVR + split: val + batch_size: 1 + paraphrased: True + +load_models: + maskrcnn: True + clip: False + glip: True + owlvit: False + tcl: False + gpt3_qa: True + gpt3_general: True + gpt3_guess: True + depth: True + blip: True + saliency: False + xvlm: True + codex: True + codellama: False + +gpt3: # emmm, davinci is discontinued + model: chatgpt + +use_cached_codex: True +cached_codex_path: YOUR_PATH +fixed_code_file: ./prompts/fixed_code/blip2_covr.prompt \ No newline at end of file diff --git a/viper/configs/execute/gqa.yaml b/viper/configs/execute/gqa.yaml new file mode 100644 index 0000000..6035197 --- /dev/null +++ b/viper/configs/execute/gqa.yaml @@ -0,0 +1,32 @@ +execute_code: True + +dataset: + data_path: YOUR_DATA_DIR/gqa + dataset_name: GQA + split: testdev + testing: False + batch_size: 1 + start_sample: 0 + +load_models: + maskrcnn: True + clip: False + glip: True + owlvit: False + tcl: False + gpt3_qa: True + gpt3_general: True + gpt3_guess: True + depth: True + blip: True + saliency: False + xvlm: True + codex: True + codellama: False + +gpt3: # emmm, davinci is discontinued + model: chatgpt + +use_cached_codex: True +cached_codex_path: YOUR_PATH +fixed_code_file: ./prompts/fixed_code/blip2.prompt \ No newline at end of file diff --git a/viper/configs/execute/nlvr.yaml b/viper/configs/execute/nlvr.yaml new file mode 100644 index 0000000..06b9ac6 --- /dev/null +++ b/viper/configs/execute/nlvr.yaml @@ -0,0 +1,30 @@ +execute_code: True + +dataset: + data_path: YOUR_DATA_DIR/nlvr2 + dataset_name: NLVR + split: test1 + batch_size: 1 + +load_models: + maskrcnn: True + clip: False + glip: True + owlvit: False + tcl: False + gpt3_qa: True + gpt3_general: True + gpt3_guess: True + depth: True + blip: True + saliency: False + xvlm: True + codex: True + codellama: False + +gpt3: # emmm, davinci is discontinued + model: chatgpt + +use_cached_codex: True +cached_codex_path: YOUR_PATH +fixed_code_file: './prompts/fixed_code/blip2_nlvr.prompt' \ No newline at end of file diff --git a/viper/configs/execute/refcoco+.yaml b/viper/configs/execute/refcoco+.yaml new file mode 100644 index 0000000..7e332fc --- /dev/null +++ b/viper/configs/execute/refcoco+.yaml @@ -0,0 +1,32 @@ +execute_code: True + +dataset: + data_path: YOUR_DATA_DIR/refer + dataset_name: RefCOCO + split_by: unc + split: testA + version: refcoco+ + batch_size: 1 + +load_models: + maskrcnn: True + clip: False + glip: True + owlvit: False + tcl: False + gpt3_qa: True + gpt3_general: True + gpt3_guess: True + depth: True + blip: True + saliency: False + xvlm: True + codex: True + codellama: False + +gpt3: # emmm, davinci is discontinued + model: chatgpt + +use_cached_codex: True +cached_codex_path: YOUR_PATH +fixed_code_file: ./prompts/fixed_code/glip.prompt \ No newline at end of file diff --git a/viper/configs/execute/refcoco.yaml b/viper/configs/execute/refcoco.yaml new file mode 100644 index 0000000..89a9898 --- /dev/null +++ b/viper/configs/execute/refcoco.yaml @@ -0,0 +1,32 @@ +execute_code: True + +dataset: + data_path: YOUR_DATA_DIR/refer + dataset_name: RefCOCO + split_by: unc + split: testA + version: refcoco + batch_size: 1 + +load_models: + maskrcnn: True + clip: False + glip: True + owlvit: False + tcl: False + gpt3_qa: True + gpt3_general: True + gpt3_guess: True + depth: True + blip: True + saliency: False + xvlm: True + codex: True + codellama: False + +gpt3: # emmm, davinci is discontinued + model: chatgpt + +use_cached_codex: True +cached_codex_path: YOUR_PATH +fixed_code_file: ./prompts/fixed_code/glip.prompt \ No newline at end of file diff --git a/viper/configs/execute/refcocog.yaml b/viper/configs/execute/refcocog.yaml new file mode 100644 index 0000000..2f37677 --- /dev/null +++ b/viper/configs/execute/refcocog.yaml @@ -0,0 +1,32 @@ +execute_code: True + +dataset: + data_path: YOUR_DATA_DIR/refer + dataset_name: RefCOCO + split_by: umd + split: test + version: refcocog + batch_size: 1 + +load_models: + maskrcnn: True + clip: False + glip: True + owlvit: False + tcl: False + gpt3_qa: True + gpt3_general: True + gpt3_guess: True + depth: True + blip: True + saliency: False + xvlm: True + codex: True + codellama: False + +gpt3: # emmm, davinci is discontinued + model: chatgpt + +use_cached_codex: True +cached_codex_path: YOUR_PATH +fixed_code_file: ./prompts/fixed_code/glip.prompt \ No newline at end of file diff --git a/viper/configs/execute/rsvg.yaml b/viper/configs/execute/rsvg.yaml new file mode 100644 index 0000000..08bddc5 --- /dev/null +++ b/viper/configs/execute/rsvg.yaml @@ -0,0 +1,30 @@ +execute_code: True + +dataset: + data_path: YOUR_DATA_DIR/rsvg + dataset_name: RSVG + split: test + batch_size: 1 + +load_models: + maskrcnn: True + clip: False + glip: True + owlvit: False + tcl: False + gpt3_qa: True + gpt3_general: True + gpt3_guess: True + depth: True + blip: True + saliency: False + xvlm: True + codex: True + codellama: False + +gpt3: # emmm, davinci is discontinued + model: chatgpt + +use_cached_codex: True +cached_codex_path: YOUR_PATH +fixed_code_file: ./prompts/fixed_code/glip2.prompt \ No newline at end of file diff --git a/viper/configs/execute/tallyqa.yaml b/viper/configs/execute/tallyqa.yaml new file mode 100644 index 0000000..f89ec9e --- /dev/null +++ b/viper/configs/execute/tallyqa.yaml @@ -0,0 +1,31 @@ +execute_code: True + +dataset: + data_path: YOUR_DATA_DIR/tallyqa + dataset_name: TallyQA + split: test + is_simple: False + batch_size: 1 + +load_models: + maskrcnn: True + clip: False + glip: True + owlvit: False + tcl: False + gpt3_qa: True + gpt3_general: True + gpt3_guess: True + depth: True + blip: True + saliency: False + xvlm: True + codex: True + codellama: False + +gpt3: # emmm, davinci is discontinued + model: chatgpt + +use_cached_codex: True +cached_codex_path: YOUR_PATH +fixed_code_file: ./prompts/fixed_code/blip2.prompt \ No newline at end of file diff --git a/viper/configs/generate/covr.yaml b/viper/configs/generate/covr.yaml new file mode 100644 index 0000000..b72a81e --- /dev/null +++ b/viper/configs/generate/covr.yaml @@ -0,0 +1,34 @@ +dataset: + data_path: YOUR_DATA_DIR/covr + dataset_name: COVR + split: val + batch_size: 12800 + paraphrased: True + +results_dir: ./results/joint/covr/ + +load_models: + maskrcnn: False + clip: False + glip: False + owlvit: False + tcl: False + gpt3_list: False + gpt3_qa: False + gpt3_guess: False + depth: False + blip: False + saliency: False + xvlm: False + codellama: True + +codex: + prompt: ./prompts/benchmarks/joint.py + model: codellama + codellama_model_name: codellama/CodeLlama-7b-Python-hf + max_new_tokens: 256 + max_batch_size: 12800 + use_tqdm: True + overgenerate: False + do_sample: False + temperature: 0. \ No newline at end of file diff --git a/viper/configs/generate/gqa.yaml b/viper/configs/generate/gqa.yaml new file mode 100644 index 0000000..ffdeb06 --- /dev/null +++ b/viper/configs/generate/gqa.yaml @@ -0,0 +1,35 @@ +dataset: + data_path: YOUR_DATA_DIR/gqa + dataset_name: GQA + split: testdev + testing: False + batch_size: 15000 + start_sample: 0 + +results_dir : ./results/joint/gqa/ + +load_models: + maskrcnn: False + clip: False + glip: False + owlvit: False + tcl: False + gpt3_list: False + gpt3_qa: False + gpt3_guess: False + depth: False + blip: False + saliency: False + xvlm: False + codellama: True + +codex: + prompt: ./prompts/benchmarks/joint.py + model: codellama + codellama_model_name: codellama/CodeLlama-7b-Python-hf + max_new_tokens: 256 + max_batch_size: 15000 + overgenerate: False + do_sample: False + temperature: 0. + use_tqdm: True \ No newline at end of file diff --git a/viper/configs/generate/nlvr.yaml b/viper/configs/generate/nlvr.yaml new file mode 100644 index 0000000..2987673 --- /dev/null +++ b/viper/configs/generate/nlvr.yaml @@ -0,0 +1,33 @@ +dataset: + data_path: YOUR_DATA_DIR/nlvr2 + dataset_name: NLVR + split: test1 + batch_size: 12800 + +results_dir: ./results/joint2/nlvr/ + +load_models: + maskrcnn: False + clip: False + glip: False + owlvit: False + tcl: False + gpt3_list: False + gpt3_qa: False + gpt3_guess: False + depth: False + blip: False + saliency: False + xvlm: False + codellama: True + +codex: + prompt: ./prompts/benchmarks/nlvr.py + model: codellama + codellama_model_name: codellama/CodeLlama-7b-Python-hf + max_new_tokens: 256 + max_batch_size: 400000 + overgenerate: False + use_tqdm: True + do_sample: False + temperature: 0. \ No newline at end of file diff --git a/viper/configs/generate/refcoco+.yaml b/viper/configs/generate/refcoco+.yaml new file mode 100644 index 0000000..f14d2be --- /dev/null +++ b/viper/configs/generate/refcoco+.yaml @@ -0,0 +1,35 @@ +dataset: + data_path: YOUR_DATA_DIR/refer + dataset_name: RefCOCO + split_by: unc + split: testA + version: refcoco+ + batch_size: 12800 + +results_dir: ./results/joint/refcocop/ + +load_models: + maskrcnn: False + clip: False + glip: False + owlvit: False + tcl: False + gpt3_list: False + gpt3_qa: False + gpt3_guess: False + depth: False + blip: False + saliency: False + xvlm: False + codellama: True + +codex: + prompt: ./prompts/benchmarks/joint.py + model: codellama + codellama_model_name: codellama/CodeLlama-7b-Python-hf + max_new_tokens: 256 + max_batch_size: 12800 + use_tqdm: True + overgenerate: False + do_sample: False + temperature: 0. \ No newline at end of file diff --git a/viper/configs/generate/refcoco.yaml b/viper/configs/generate/refcoco.yaml new file mode 100644 index 0000000..921a8c4 --- /dev/null +++ b/viper/configs/generate/refcoco.yaml @@ -0,0 +1,35 @@ +dataset: + data_path: YOUR_DATA_DIR/refer + dataset_name: RefCOCO + split_by: unc + split: testA + version: refcoco + batch_size: 12800 + +results_dir: ./results/joint/refcoco/ + +load_models: + maskrcnn: False + clip: False + glip: False + owlvit: False + tcl: False + gpt3_list: False + gpt3_qa: False + gpt3_guess: False + depth: False + blip: False + saliency: False + xvlm: False + codellama: True + +codex: + prompt: ./prompts/benchmarks/joint.py + model: codellama + codellama_model_name: codellama/CodeLlama-7b-Python-hf + max_new_tokens: 256 + max_batch_size: 12800 + use_tqdm: True + overgenerate: False + do_sample: False + temperature: 0. \ No newline at end of file diff --git a/viper/configs/generate/refcocog.yaml b/viper/configs/generate/refcocog.yaml new file mode 100644 index 0000000..2966230 --- /dev/null +++ b/viper/configs/generate/refcocog.yaml @@ -0,0 +1,35 @@ +dataset: + data_path: YOUR_DATA_DIR/refer + dataset_name: RefCOCO + split_by: umd + split: test + version: refcocog + batch_size: 12800 + +results_dir: ./results/joint/refcocog/ + +load_models: + maskrcnn: False + clip: False + glip: False + owlvit: False + tcl: False + gpt3_list: False + gpt3_qa: False + gpt3_guess: False + depth: False + blip: False + saliency: False + xvlm: False + codellama: True + +codex: + prompt: ./prompts/benchmarks/joint.py + model: codellama + codellama_model_name: codellama/CodeLlama-13b-Python-hf + max_new_tokens: 256 + max_batch_size: 12800 + overgenerate: False + do_sample: False + temperature: 0. + use_tqdm: True \ No newline at end of file diff --git a/viper/configs/generate/rsvg.yaml b/viper/configs/generate/rsvg.yaml new file mode 100644 index 0000000..bff10ef --- /dev/null +++ b/viper/configs/generate/rsvg.yaml @@ -0,0 +1,33 @@ +dataset: + data_path: YOUR_DATA_DIR/rsvg + dataset_name: RSVG + split: test + batch_size: 1000 + +results_dir: ./results/joint/refcocog/ + +load_models: + maskrcnn: False + clip: False + glip: False + owlvit: False + tcl: False + gpt3_list: False + gpt3_qa: False + gpt3_guess: False + depth: False + blip: False + saliency: False + xvlm: False + codellama: True + +codex: + prompt: ./prompts/benchmarks/joint.py + model: codellama + codellama_model_name: codellama/CodeLlama-7b-Python-hf + max_new_tokens: 256 + max_batch_size: 12800 + overgenerate: False + do_sample: False + temperature: 0. + use_tqdm: True \ No newline at end of file diff --git a/viper/configs/generate/tallyqa.yaml b/viper/configs/generate/tallyqa.yaml new file mode 100644 index 0000000..e338189 --- /dev/null +++ b/viper/configs/generate/tallyqa.yaml @@ -0,0 +1,34 @@ +dataset: + data_path: YOUR_DATA_DIR/tallyqa + dataset_name: TallyQA + split: test + is_simple: False + batch_size: 1000000 + +results_dir: ./results/joint/tallyqa/ + +load_models: + maskrcnn: False + clip: False + glip: False + owlvit: False + tcl: False + gpt3_list: False + gpt3_qa: False + gpt3_guess: False + depth: False + blip: False + saliency: False + xvlm: False + codellama: True + +codex: + prompt: ./prompts/benchmarks/joint.py + model: codellama + codellama_model_name: codellama/CodeLlama-7b-Python-hf + max_new_tokens: 256 + max_batch_size: 1000000 + overgenerate: False + do_sample: False + temperature: 0. + use_tqdm: True \ No newline at end of file diff --git a/viper/datasets/__init__.py b/viper/datasets/__init__.py new file mode 100644 index 0000000..3795e21 --- /dev/null +++ b/viper/datasets/__init__.py @@ -0,0 +1,124 @@ +""" +Data loaders +Adapted in part from https://github.com/phiyodr/vqaloader/blob/master/vqaloader/loaders.py +""" +import copy + +import torch +from torchvision import transforms + + +# ----------------------------- General for all datasets ----------------------------- # +def get_dataset(config_dataset, load_image: bool, orig_query: bool = False): + dataset_name = config_dataset.dataset_name + if 'orig_query' in config_dataset: + config_dataset = copy.deepcopy(config_dataset) + orig_query = config_dataset.pop('orig_query') + + if dataset_name == 'RefCOCO': + from datasets.refcoco import RefCOCODataset + dataset = RefCOCODataset( + **config_dataset, image_transforms=transforms.Compose([transforms.ToTensor()]), load_image=load_image, + orig_query=orig_query, + ) + elif dataset_name == 'RSVG': + from datasets.rsvg import RSVGDataset + dataset = RSVGDataset( + **config_dataset, image_transforms=transforms.Compose([transforms.ToTensor()]), load_image=load_image, + orig_query=orig_query, + ) + elif dataset_name == 'MultiVersionRefCOCO': + from datasets.refcoco import MultiVersionRefCOCODataset + dataset = MultiVersionRefCOCODataset( + **config_dataset, image_transforms=transforms.Compose([transforms.ToTensor()]), load_image=load_image, + orig_query=orig_query, + ) + elif dataset_name == 'GQA': + from datasets.gqa import GQADataset + dataset = GQADataset( + **config_dataset, balanced=True, image_transforms=transforms.Compose([transforms.ToTensor()]), + load_image=load_image, orig_query=orig_query, + ) + elif dataset_name == 'TallyQA': + from datasets.tallyqa import TallyQADataset + dataset = TallyQADataset( + **config_dataset, image_transforms=transforms.Compose([transforms.ToTensor()]), load_image=load_image, + orig_query=orig_query, + ) + elif dataset_name == 'VSR': + from datasets.vsr import VSRDataset + dataset = VSRDataset( + **config_dataset, image_transforms=transforms.Compose([transforms.ToTensor()]), load_image=load_image, + orig_query=orig_query, + ) + elif dataset_name == 'COVR': + from datasets.covr import COVRDataset + dataset = COVRDataset( + **config_dataset, image_transforms=transforms.Compose([transforms.ToTensor()]), load_image=load_image, + orig_query=orig_query, + ) + elif dataset_name == 'NLVR': + from datasets.nlvr import NLVRDataset + dataset = NLVRDataset( + **config_dataset, image_transforms=transforms.Compose([transforms.ToTensor()]), load_image=load_image, + orig_query=orig_query, + ) + else: + raise ValueError(f"Unknown dataset {dataset_name}") + return dataset + + +def general_postprocessing(prediction): + try: + if type(prediction).__name__ == 'ImagePatch': + prediction = prediction.classify_object() + + if isinstance(prediction, list): + prediction = prediction[0] if len(prediction) > 0 else "no" + + if isinstance(prediction, torch.Tensor): + prediction = prediction.item() + # if prediction is None: + # prediction = "no" + if isinstance(prediction, bool): + if prediction: + prediction = "yes" + else: + prediction = "no" + elif isinstance(prediction, int): + prediction = str(prediction) + # print("No answer is a number, so this will be wrong") + except: + prediction = str(prediction) + + prediction = str(prediction) + + prediction = prediction.replace('\n', ' ') + prediction = prediction.replace('\t', ' ') + prediction = prediction.strip() + prediction = prediction.lower() + + if prediction == 'true': + prediction = 'yes' + elif prediction == 'false': + prediction = 'no' + return prediction + + +def accuracy(prediction, ground_truth, *args): + """ + Args: + prediction (list): List of predicted answers. + ground_truth (list): List of ground truth answers. + Returns: + score (float): Score of the prediction. + """ + if len(prediction) == 0: # if no prediction, return 0 + return 0 + assert len(prediction) == len(ground_truth) + pred_gt_filtered = [(pred, gt) for pred, gt in zip(prediction, ground_truth) if gt != ''] + score = 0 + for p, g in pred_gt_filtered: + if general_postprocessing(p) == g: + score += 1 + return score / len(pred_gt_filtered) diff --git a/viper/datasets/covr.py b/viper/datasets/covr.py new file mode 100644 index 0000000..337c017 --- /dev/null +++ b/viper/datasets/covr.py @@ -0,0 +1,197 @@ +import json +import os +import re +import string + +import numpy as np +from PIL import Image +from torch.utils.data import Dataset + +from datasets import general_postprocessing + + +class COVRDataset(Dataset): + def __init__(self, split, data_path="", image_transforms=None, max_samples=None, paraphrased=True, + start_sample=None, end_sample=None, orig_query=False, **kwargs): + self.split = split + self.data_path = data_path + self.image_transforms = image_transforms + self.max_samples = max_samples + self.paraphrased = paraphrased + if paraphrased: + assert split == 'val' + + self.input_type = 'image_list' + self.output_type = 'str' + self.orig_query = orig_query + + with open(os.path.join(data_path, f"{split}.jsonl")) as f: + self.samples = [json.loads(line) for line in f] + + if max_samples is not None: + np.random.seed(4) + np.random.shuffle(self.samples) + self.samples = self.samples[:max_samples] + if end_sample is not None: + self.samples = self.samples[:end_sample] + if start_sample is not None: + self.samples = self.samples[start_sample:] + print("Length:", len(self.samples)) + + # For evaluation + self.contractions = {"aint": "ain't", "arent": "aren't", "cant": "can't", "couldve": "could've", + "couldnt": "couldn't", "couldn'tve": "couldn't've", "couldnt've": "couldn't've", + "didnt": "didn't", "doesnt": "doesn't", "dont": "don't", "hadnt": "hadn't", + "hadnt've": "hadn't've", "hadn'tve": "hadn't've", "hasnt": "hasn't", "havent": "haven't", + "hed": "he'd", "hed've": "he'd've", "he'dve": "he'd've", "hes": "he's", "howd": "how'd", + "howll": "how'll", "hows": "how's", "Id've": "I'd've", "I'dve": "I'd've", "Im": "I'm", + "Ive": "I've", "isnt": "isn't", "itd": "it'd", "itd've": "it'd've", "it'dve": "it'd've", + "itll": "it'll", "let's": "let's", "maam": "ma'am", "mightnt": "mightn't", + "mightnt've": "mightn't've", "mightn'tve": "mightn't've", "mightve": "might've", + "mustnt": "mustn't", "mustve": "must've", "neednt": "needn't", "notve": "not've", + "oclock": "o'clock", "oughtnt": "oughtn't", "ow's'at": "'ow's'at", "'ows'at": "'ow's'at", + "'ow'sat": "'ow's'at", "shant": "shan't", "shed've": "she'd've", "she'dve": "she'd've", + "she's": "she's", "shouldve": "should've", "shouldnt": "shouldn't", + "shouldnt've": "shouldn't've", "shouldn'tve": "shouldn't've", "somebody'd": "somebodyd", + "somebodyd've": "somebody'd've", "somebody'dve": "somebody'd've", + "somebodyll": "somebody'll", "somebodys": "somebody's", "someoned": "someone'd", + "someoned've": "someone'd've", "someone'dve": "someone'd've", "someonell": "someone'll", + "someones": "someone's", "somethingd": "something'd", "somethingd've": "something'd've", + "something'dve": "something'd've", "somethingll": "something'll", "thats": "that's", + "thered": "there'd", "thered've": "there'd've", "there'dve": "there'd've", + "therere": "there're", "theres": "there's", "theyd": "they'd", "theyd've": "they'd've", + "they'dve": "they'd've", "theyll": "they'll", "theyre": "they're", "theyve": "they've", + "twas": "'twas", "wasnt": "wasn't", "wed've": "we'd've", "we'dve": "we'd've", + "weve": "we've", "werent": "weren't", "whatll": "what'll", "whatre": "what're", + "whats": "what's", "whatve": "what've", "whens": "when's", "whered": "where'd", + "wheres": "where's", "whereve": "where've", "whod": "who'd", "whod've": "who'd've", + "who'dve": "who'd've", "wholl": "who'll", "whos": "who's", "whove": "who've", + "whyll": "why'll", "whyre": "why're", "whys": "why's", "wont": "won't", + "wouldve": "would've", "wouldnt": "wouldn't", "wouldnt've": "wouldn't've", + "wouldn'tve": "wouldn't've", "yall": "y'all", "yall'll": "y'all'll", "y'allll": "y'all'll", + "yall'd've": "y'all'd've", "y'alld've": "y'all'd've", "y'all'dve": "y'all'd've", + "youd": "you'd", "youd've": "you'd've", "you'dve": "you'd've", "youll": "you'll", + "youre": "you're", "youve": "you've"} + self.manualMap = {'none': '0', + 'zero': '0', + 'one': '1', + 'two': '2', + 'three': '3', + 'four': '4', + 'five': '5', + 'six': '6', + 'seven': '7', + 'eight': '8', + 'nine': '9', + 'ten': '10' + } + self.articles = ['a', + 'an', + 'the' + ] + + self.periodStrip = re.compile("(?!<=\d)(\.)(?!\d)") + self.commaStrip = re.compile("(\d)(\,)(\d)") + self.punct = [';', r"/", '[', ']', '"', '{', '}', + '(', ')', '=', '+', '\\', '_', '-', + '>', '<', '@', '`', ',', '?', '!'] + + def __getitem__(self, index): + sample = self.samples[index] + + text = sample['paraphrased'] if self.paraphrased else sample['utterance'] + if not text.endswith("?"): + text = "Is the statement true? " + text + if not self.orig_query: + text = f"Given a list of images: {text}\ndef execute_command(image_list) -> str:" + + img_list = [] + for img in sample['scenes']: + with open(self.get_image_path(img), "rb") as f: + img = Image.open(f).convert("RGB") + if self.image_transforms: + img = self.image_transforms(img) + img_list.append(img) + answer = sample['answer'] + + return {'question': text, 'img': img_list, 'sample_id': index, 'answer': answer, 'index': index, + 'possible_answers': [], 'info_to_prompt': text, "question_type": -1, 'extra_context': ''} + + def get_image_path(self, img): + if img[0] in string.ascii_letters: + return os.path.join(self.data_path, 'imSitu_images', img.split("_")[0], f'{img}.jpg') + else: + return os.path.join(self.data_path, 'gqa_images', f'{img}.jpg') + + def __len__(self): + return len(self.samples) + + def accuracy(self, prediction, ground_truth, *args, **kwargs): + """ + Args: + prediction (list): List of predicted answers. + ground_truth (list): List of ground truth answers. + Returns: + score (float): Score of the prediction. + """ + if len(prediction) == 0: # if no prediction, return 0 + return 0 + assert len(prediction) == len(ground_truth) + score = 0 + for p, g in zip(prediction, ground_truth): + if self.post_process(p) == self.post_process_gt(g): + score += 1 + return score / len(prediction) + + def post_process_gt(self, x): + if x == 'False' or x == 'True': + x = eval(x) + if isinstance(x, bool): + if x: + return 'yes' + else: + return 'no' + return str(x) + + def post_process(self, prediction, stem=True): + """ + Code from https://github.com/GT-Vision-Lab/VQA/blob/master/PythonEvaluationTools/vqaEvaluation/vqaEval.py, + as indicated here https://okvqa.allenai.org/leaderboard.html + :return: + """ + if prediction is None: + return None + + prediction = general_postprocessing(prediction) + + prediction = prediction.replace('\n', ' ') + prediction = prediction.replace('\t', ' ') + prediction = prediction.strip() + prediction = self.processPunctuation(prediction) + prediction = self.processDigitArticle(prediction) + return prediction + + def processPunctuation(self, inText): + outText = inText + for p in self.punct: + if (p + ' ' in inText or ' ' + p in inText) or (re.search(self.commaStrip, inText) != None): + outText = outText.replace(p, '') + else: + outText = outText.replace(p, ' ') + outText = self.periodStrip.sub("", outText, re.UNICODE) + return outText + + def processDigitArticle(self, inText): + outText = [] + tempText = inText.lower().split() + for word in tempText: + word = self.manualMap.setdefault(word, word) + if word not in self.articles: + outText.append(word) + else: + pass + for wordId, word in enumerate(outText): + if word in self.contractions: + outText[wordId] = self.contractions[word] + outText = ' '.join(outText) + return outText diff --git a/viper/datasets/gqa.py b/viper/datasets/gqa.py new file mode 100644 index 0000000..7ea1a03 --- /dev/null +++ b/viper/datasets/gqa.py @@ -0,0 +1,295 @@ +import os +import re +import time + +import numpy as np +import pandas as pd +from PIL import Image +from torch.utils.data import Dataset + +from datasets import general_postprocessing + + +class GQADataset(Dataset): + BALANCED_TYPE = { + True: "balanced", + False: "all" + } + + def __init__(self, split, balanced=True, data_path="", image_transforms=None, question_transforms=None, + tokenize=None, verbose=False, testing=False, max_samples=None, first_n=None, return_pil=True, + extra_context=None, sample_n=None, start_sample=None, load_image=False, orig_query=False, **kwargs): + """ + Args: + split (str): Data split. One of ["challenge", "submission", "test", "testdev", "train", "val"] + balanced (bool): You balanced version or full version. + image_transforms: + question_transforms: + tokenize (fct): + verbose (bool): Print some infos. Default=True + testing (bool): Set to true for data splits without targets. Default=False. + first_n (int): Only use the first n samples. Default=None. Only valid if loading from hdf. + """ + print("Unused config params:", kwargs.keys()) + + start_time = time.time() + self.split = split + self.testing = testing + # assert split in ["challenge", "submission", "test", "testdev", "testdev-1000subset", "train", "val"] + self.balanced = balanced + self.balanced_type = self.BALANCED_TYPE[balanced] + self.data_path = data_path + self.image_transforms = image_transforms + self.question_transforms = question_transforms + self.tokenize = tokenize + self.input_type = 'image' + self.output_type = 'str' + self.load_image = load_image + self.orig_query = orig_query + + self.return_pil = return_pil + + if not balanced and split == "train": + raise NotImplementedError + else: + # check path to cached df exists + if self.split == 'train' and self.balanced_type == 'balanced' and os.path.exists( + os.path.join(data_path, f"questions/{self.split}_{self.balanced_type}_questions.h5")): + if verbose: + print(f"Loading GQA Dataset from {data_path}", flush=True) + self.df = pd.read_hdf( + os.path.join(data_path, f"questions/{self.split}_{self.balanced_type}_questions.h5"), "table", + stop=first_n) + else: + self.file_name = f"questions/{self.split}_{self.balanced_type}_questions.json" + path = os.path.expanduser(os.path.join(data_path, self.file_name)) + if verbose: + print(f"Loading GQA Dataset from {path}", flush=True) + self.df = pd.read_json(path, orient="index") + + if sample_n is not None: + self.df = self.df.sample(n=sample_n) + if max_samples is not None: + if sample_n is not None: + print("Warning: sample first, then select max") + self.df = self.df[:max_samples] + if start_sample is not None: + self.df = self.df[start_sample:] + print("Data frame shape:", self.df.shape) + + if extra_context is not None: + if 'code_for_correction' in extra_context: + df_for_correction = pd.read_csv(extra_context['code_for_correction']) + if len(df_for_correction) > len(self.df): + print("Emmmmm, your provided code df has %d rows. It's larger than %d. Will truncate" % ( + len(df_for_correction), len(self.df))) + df_for_correction = df_for_correction[:len(self.df)] + + extra_context_list = ["def execute_command(image) -> str:" + x for x in + df_for_correction['code'].tolist()] + + if extra_context.get("trace", False): + for i in range(len(extra_context_list)): + extra_context[i] = '{}\n\n--- Trace\n\n{}'.format( + extra_context_list[i], df_for_correction['traced'].iloc[i] + ) + + self.df.insert(len(self.df.columns), 'extra_context', extra_context_list) + else: + raise KeyError + + self.n_samples = self.df.shape[0] + if verbose: + print( + f"Loading GQA Dataset done in {time.time() - start_time:.1f} seconds. Loaded {self.n_samples} samples.") + + # For evaluation + self.contractions = {"aint": "ain't", "arent": "aren't", "cant": "can't", "couldve": "could've", + "couldnt": "couldn't", "couldn'tve": "couldn't've", "couldnt've": "couldn't've", + "didnt": "didn't", "doesnt": "doesn't", "dont": "don't", "hadnt": "hadn't", + "hadnt've": "hadn't've", "hadn'tve": "hadn't've", "hasnt": "hasn't", "havent": "haven't", + "hed": "he'd", "hed've": "he'd've", "he'dve": "he'd've", "hes": "he's", "howd": "how'd", + "howll": "how'll", "hows": "how's", "Id've": "I'd've", "I'dve": "I'd've", "Im": "I'm", + "Ive": "I've", "isnt": "isn't", "itd": "it'd", "itd've": "it'd've", "it'dve": "it'd've", + "itll": "it'll", "let's": "let's", "maam": "ma'am", "mightnt": "mightn't", + "mightnt've": "mightn't've", "mightn'tve": "mightn't've", "mightve": "might've", + "mustnt": "mustn't", "mustve": "must've", "neednt": "needn't", "notve": "not've", + "oclock": "o'clock", "oughtnt": "oughtn't", "ow's'at": "'ow's'at", "'ows'at": "'ow's'at", + "'ow'sat": "'ow's'at", "shant": "shan't", "shed've": "she'd've", "she'dve": "she'd've", + "she's": "she's", "shouldve": "should've", "shouldnt": "shouldn't", + "shouldnt've": "shouldn't've", "shouldn'tve": "shouldn't've", "somebody'd": "somebodyd", + "somebodyd've": "somebody'd've", "somebody'dve": "somebody'd've", + "somebodyll": "somebody'll", "somebodys": "somebody's", "someoned": "someone'd", + "someoned've": "someone'd've", "someone'dve": "someone'd've", "someonell": "someone'll", + "someones": "someone's", "somethingd": "something'd", "somethingd've": "something'd've", + "something'dve": "something'd've", "somethingll": "something'll", "thats": "that's", + "thered": "there'd", "thered've": "there'd've", "there'dve": "there'd've", + "therere": "there're", "theres": "there's", "theyd": "they'd", "theyd've": "they'd've", + "they'dve": "they'd've", "theyll": "they'll", "theyre": "they're", "theyve": "they've", + "twas": "'twas", "wasnt": "wasn't", "wed've": "we'd've", "we'dve": "we'd've", + "weve": "we've", "werent": "weren't", "whatll": "what'll", "whatre": "what're", + "whats": "what's", "whatve": "what've", "whens": "when's", "whered": "where'd", + "wheres": "where's", "whereve": "where've", "whod": "who'd", "whod've": "who'd've", + "who'dve": "who'd've", "wholl": "who'll", "whos": "who's", "whove": "who've", + "whyll": "why'll", "whyre": "why're", "whys": "why's", "wont": "won't", + "wouldve": "would've", "wouldnt": "wouldn't", "wouldnt've": "wouldn't've", + "wouldn'tve": "wouldn't've", "yall": "y'all", "yall'll": "y'all'll", "y'allll": "y'all'll", + "yall'd've": "y'all'd've", "y'alld've": "y'all'd've", "y'all'dve": "y'all'd've", + "youd": "you'd", "youd've": "you'd've", "you'dve": "you'd've", "youll": "you'll", + "youre": "you're", "youve": "you've"} + self.manualMap = {'none': '0', + 'zero': '0', + 'one': '1', + 'two': '2', + 'three': '3', + 'four': '4', + 'five': '5', + 'six': '6', + 'seven': '7', + 'eight': '8', + 'nine': '9', + 'ten': '10' + } + self.articles = ['a', + 'an', + 'the' + ] + + self.periodStrip = re.compile("(?!<=\d)(\.)(?!\d)") + self.commaStrip = re.compile("(\d)(\,)(\d)") + self.punct = [';', r"/", '[', ']', '"', '{', '}', + '(', ')', '=', '+', '\\', '_', '-', + '>', '<', '@', '`', ',', '?', '!'] + + self.max_words = 50 + + def processPunctuation(self, inText): + outText = inText + for p in self.punct: + if (p + ' ' in inText or ' ' + p in inText) or (re.search(self.commaStrip, inText) != None): + outText = outText.replace(p, '') + else: + outText = outText.replace(p, ' ') + outText = self.periodStrip.sub("", outText, re.UNICODE) + return outText + + def processDigitArticle(self, inText): + outText = [] + tempText = inText.lower().split() + for word in tempText: + word = self.manualMap.setdefault(word, word) + if word not in self.articles: + outText.append(word) + else: + pass + for wordId, word in enumerate(outText): + if word in self.contractions: + outText[wordId] = self.contractions[word] + outText = ' '.join(outText) + return outText + + def get_img_path(self, index): + if "imageId" in self.df.columns: + image_id = self.df.iloc[index]["imageId"] + else: + image_id = self.df.iloc[index]["image_id"] + return os.path.expanduser(os.path.join(self.data_path, "../images", f"{image_id}.jpg")) + + def get_index_from_sample_id(self, sample_id): + return np.where(self.df.index == sample_id)[0][0].item() + + def __getitem__(self, index): + # image input + sample_id = self.df.iloc[index].name + if "imageId" in self.df.columns: + image_id = self.df.iloc[index]["imageId"] + else: + image_id = self.df.iloc[index]["image_id"] + question = self.df.iloc[index]["question"] + + if self.orig_query: + question = question + else: + question = f"Given an image: {question}\ndef execute_command(image) -> str:" + + extra_context = self.df['extra_context'].iloc[index] if 'extra_context' in self.df else None + + question_type = -1 + answer = -1 + if not self.testing: + answer = self.df.iloc[index]["answer"] + question_type = self.df.iloc[index]["groups"]["global"] + if question_type is None: + question_type = -1 # can't have None for DataLoader + + img = pil_img = None + # Load and transform image + if self.load_image: + image_path = os.path.expanduser(os.path.join(self.data_path, "images", f"{image_id}.jpg")) + with open(image_path, "rb") as f: + pil_img = Image.open(f).convert("RGB") + if self.image_transforms: + img = self.image_transforms(pil_img) + else: + img = pil_img + + # Load, transform and tokenize question + if self.question_transforms: + question = self.question_transforms(question) + if self.tokenize: + question = self.tokenize(question) + + # Return + if self.testing: + if (sample_id is None) or (img is None) or (question is None): + raise Exception(f"Error in GQA Dataset: sample_id={sample_id}, img={img}, question={question}") + out_dict = {"sample_id": sample_id, "img": img, "question": question, 'index': index} + if self.return_pil: + out_dict["pil_img"] = pil_img + else: + out_dict = {"sample_id": sample_id, "answer": answer, "img": img, "question": question, 'pil_img': pil_img, + "question_type": question_type, 'index': index, 'possible_answers': [], + 'info_to_prompt': question, } + + if extra_context is not None: + out_dict['extra_context'] = extra_context + return out_dict + + def post_process(self, prediction, stem=True): + """ + Code from https://github.com/GT-Vision-Lab/VQA/blob/master/PythonEvaluationTools/vqaEvaluation/vqaEval.py, + as indicated here https://okvqa.allenai.org/leaderboard.html + :return: + """ + if prediction is None: + return None + + prediction = general_postprocessing(prediction) + + prediction = prediction.replace('\n', ' ') + prediction = prediction.replace('\t', ' ') + prediction = prediction.strip() + prediction = self.processPunctuation(prediction) + prediction = self.processDigitArticle(prediction) + return prediction + + def accuracy(self, prediction, ground_truth, *args, **kwargs): + """ + Args: + prediction (list): List of predicted answers. + ground_truth (list): List of ground truth answers. + Returns: + score (float): Score of the prediction. + """ + if len(prediction) == 0: # if no prediction, return 0 + return 0 + assert len(prediction) == len(ground_truth) + score = 0 + for p, g in zip(prediction, ground_truth): + if self.post_process(p) == g: + score += 1 + return score / len(prediction) + + # we can call len(dataset) to return the size + def __len__(self): + return self.n_samples diff --git a/viper/datasets/nlvr.py b/viper/datasets/nlvr.py new file mode 100644 index 0000000..3acd487 --- /dev/null +++ b/viper/datasets/nlvr.py @@ -0,0 +1,182 @@ +import json +import os +import re + +import numpy as np +from PIL import Image +from datasets import general_postprocessing +from torch.utils.data import Dataset + + +class NLVRDataset(Dataset): + def __init__(self, split, data_path="", image_transforms=None, max_samples=None, start_sample=None, + orig_query=False, **kwargs): + self.split = split + self.data_path = data_path + self.image_transforms = image_transforms + self.max_samples = max_samples + self.input_type = 'image_dict' + self.output_type = 'str' + self.orig_query = orig_query + + if split == "dev": + split = "balanced_dev" + elif split == "test1": + split = "balanced_test1" + elif split != "train": + raise NotImplementedError + # update here + with open(os.path.join(data_path, f"{split}.jsonl")) as f: + self.samples = [json.loads(line) for line in f] + + if max_samples is not None: + np.random.seed(4) + np.random.shuffle(self.samples) + self.samples = self.samples[:max_samples] + if start_sample is not None: + self.samples = self.samples[start_sample:] + + # For evaluation + self.contractions = {"aint": "ain't", "arent": "aren't", "cant": "can't", "couldve": "could've", + "couldnt": "couldn't", "couldn'tve": "couldn't've", "couldnt've": "couldn't've", + "didnt": "didn't", "doesnt": "doesn't", "dont": "don't", "hadnt": "hadn't", + "hadnt've": "hadn't've", "hadn'tve": "hadn't've", "hasnt": "hasn't", "havent": "haven't", + "hed": "he'd", "hed've": "he'd've", "he'dve": "he'd've", "hes": "he's", "howd": "how'd", + "howll": "how'll", "hows": "how's", "Id've": "I'd've", "I'dve": "I'd've", "Im": "I'm", + "Ive": "I've", "isnt": "isn't", "itd": "it'd", "itd've": "it'd've", "it'dve": "it'd've", + "itll": "it'll", "let's": "let's", "maam": "ma'am", "mightnt": "mightn't", + "mightnt've": "mightn't've", "mightn'tve": "mightn't've", "mightve": "might've", + "mustnt": "mustn't", "mustve": "must've", "neednt": "needn't", "notve": "not've", + "oclock": "o'clock", "oughtnt": "oughtn't", "ow's'at": "'ow's'at", "'ows'at": "'ow's'at", + "'ow'sat": "'ow's'at", "shant": "shan't", "shed've": "she'd've", "she'dve": "she'd've", + "she's": "she's", "shouldve": "should've", "shouldnt": "shouldn't", + "shouldnt've": "shouldn't've", "shouldn'tve": "shouldn't've", "somebody'd": "somebodyd", + "somebodyd've": "somebody'd've", "somebody'dve": "somebody'd've", + "somebodyll": "somebody'll", "somebodys": "somebody's", "someoned": "someone'd", + "someoned've": "someone'd've", "someone'dve": "someone'd've", "someonell": "someone'll", + "someones": "someone's", "somethingd": "something'd", "somethingd've": "something'd've", + "something'dve": "something'd've", "somethingll": "something'll", "thats": "that's", + "thered": "there'd", "thered've": "there'd've", "there'dve": "there'd've", + "therere": "there're", "theres": "there's", "theyd": "they'd", "theyd've": "they'd've", + "they'dve": "they'd've", "theyll": "they'll", "theyre": "they're", "theyve": "they've", + "twas": "'twas", "wasnt": "wasn't", "wed've": "we'd've", "we'dve": "we'd've", + "weve": "we've", "werent": "weren't", "whatll": "what'll", "whatre": "what're", + "whats": "what's", "whatve": "what've", "whens": "when's", "whered": "where'd", + "wheres": "where's", "whereve": "where've", "whod": "who'd", "whod've": "who'd've", + "who'dve": "who'd've", "wholl": "who'll", "whos": "who's", "whove": "who've", + "whyll": "why'll", "whyre": "why're", "whys": "why's", "wont": "won't", + "wouldve": "would've", "wouldnt": "wouldn't", "wouldnt've": "wouldn't've", + "wouldn'tve": "wouldn't've", "yall": "y'all", "yall'll": "y'all'll", "y'allll": "y'all'll", + "yall'd've": "y'all'd've", "y'alld've": "y'all'd've", "y'all'dve": "y'all'd've", + "youd": "you'd", "youd've": "you'd've", "you'dve": "you'd've", "youll": "you'll", + "youre": "you're", "youve": "you've"} + self.manualMap = {'none': '0', + 'zero': '0', + 'one': '1', + 'two': '2', + 'three': '3', + 'four': '4', + 'five': '5', + 'six': '6', + 'seven': '7', + 'eight': '8', + 'nine': '9', + 'ten': '10' + } + self.articles = ['a', + 'an', + 'the' + ] + + self.periodStrip = re.compile("(?!<=\d)(\.)(?!\d)") + self.commaStrip = re.compile("(\d)(\,)(\d)") + self.punct = [';', r"/", '[', ']', '"', '{', '}', + '(', ')', '=', '+', '\\', '_', '-', + '>', '<', '@', '`', ',', '?', '!'] + + def __getitem__(self, index): + sample = self.samples[index] + text = sample['sentence'] + if not text.endswith("?"): + text = "Is the statement true? " + text + if not self.orig_query: + text = f"Given two images, one on the left and one on the right: {text}\ndef execute_command(image_dict) -> str:" + img_list = [] + img_f_base = os.path.join(self.data_path, self.split) + if self.split == "train": + dir = sample["directory"] + img_f_base = os.path.join(img_f_base, str(dir)) + sample_base = sample["identifier"][:-1] + "img" + for i in ["0", "1"]: + with open(os.path.join(img_f_base, sample_base + i + ".png"), "rb") as f: + img = Image.open(f).convert("RGB") + if self.image_transforms: + img = self.image_transforms(img) + img_list.append(img) + answer = 'yes' if sample['label'] == "True" else 'no' + return {'question': text, 'img': {'left': img_list[0], 'right': img_list[1]}, + 'sample_id': index, 'answer': answer, 'index': index, 'possible_answers': [], 'info_to_prompt': text, + "question_type": -1, 'extra_context': ''} + + def __len__(self): + return len(self.samples) + + def accuracy(self, prediction, ground_truth, *args, **kwargs): + """ + Args: + prediction (list): List of predicted answers. + ground_truth (list): List of ground truth answers. + Returns: + score (float): Score of the prediction. + """ + if len(prediction) == 0: # if no prediction, return 0 + return 0 + assert len(prediction) == len(ground_truth) + score = 0 + for p, g in zip(prediction, ground_truth): + if self.post_process(p) == g: + score += 1 + return score / len(prediction) + + def post_process(self, prediction, stem=True): + """ + Code from https://github.com/GT-Vision-Lab/VQA/blob/master/PythonEvaluationTools/vqaEvaluation/vqaEval.py, + as indicated here https://okvqa.allenai.org/leaderboard.html + :return: + """ + if prediction is None: + return None + + prediction = general_postprocessing(prediction) + + prediction = prediction.replace('\n', ' ') + prediction = prediction.replace('\t', ' ') + prediction = prediction.strip() + prediction = self.processPunctuation(prediction) + prediction = self.processDigitArticle(prediction) + return prediction + + def processPunctuation(self, inText): + outText = inText + for p in self.punct: + if (p + ' ' in inText or ' ' + p in inText) or (re.search(self.commaStrip, inText) != None): + outText = outText.replace(p, '') + else: + outText = outText.replace(p, ' ') + outText = self.periodStrip.sub("", outText, re.UNICODE) + return outText + + def processDigitArticle(self, inText): + outText = [] + tempText = inText.lower().split() + for word in tempText: + word = self.manualMap.setdefault(word, word) + if word not in self.articles: + outText.append(word) + else: + pass + for wordId, word in enumerate(outText): + if word in self.contractions: + outText[wordId] = self.contractions[word] + outText = ' '.join(outText) + return outText diff --git a/viper/datasets/refcoco.py b/viper/datasets/refcoco.py new file mode 100644 index 0000000..1c88f6e --- /dev/null +++ b/viper/datasets/refcoco.py @@ -0,0 +1,220 @@ +import json +import os +import pickle + +import numpy as np +import torch +from PIL import Image +from torch.utils.data import Dataset +from torchvision.ops import box_iou + + +def load_samples(data_path, version, split, split_by): + assert version in ['refcoco', 'refcoco+', 'refcocog'] + + # load refs from data/dataset/refs(dataset).json + ref_file = os.path.join(data_path, version, 'refs(' + split_by + ').p') + with open(ref_file, 'rb') as f: + refs = pickle.load(f) + # refs index + refs_index = {} + for ref in refs: + refs_index[ref['ref_id']] = ref + + # load annotations from data/dataset/instances.json + instances_file = os.path.join(data_path, version, 'instances.json') + with open(instances_file, 'r') as f: + instances = json.load(f) + + # image index: just for computing heights, not really useful + img_index = {} + for img in instances['images']: + img_index[img['id']] = img + + # annotation index + annotations_index = {} + for ann in instances['annotations']: + annotations_index[ann['id']] = ann + height = img_index[ann['image_id']]['height'] # adjust coordinates + ann['bbox'] = [ann['bbox'][0], height - (ann['bbox'][1] + ann['bbox'][3]), ann['bbox'][2] + ann['bbox'][0], + height - ann['bbox'][1]] + + # ref to annotation + ref_to_ann = {} + for ref in refs: + ref_id = ref['ref_id'] + ann_id = ref['ann_id'] + ref_to_ann[ref_id] = annotations_index[ann_id] + + def get_sample(ref_id, sent_id): + ref = refs_index[ref_id] + return { + 'img_path': get_sample_path(ref=ref), + 'text': ref['sentences'][sent_id]['sent'], + 'answer': ref_to_ann[ref_id]['bbox'], + } + + def get_ref_ids(split): + if split in ['testA', 'testB', 'testC']: + split_refs = [ref for ref in refs if split[-1] in ref['split']] # we also consider testAB, testBC, ... + elif split in ['testAB', 'testBC', 'testAC']: + split_refs = [ref for ref in refs if ref['split'] == split] # rarely used I guess... + elif split == 'test': + split_refs = [ref for ref in refs if 'test' in ref['split']] + elif split == 'train' or split == 'val': + split_refs = [ref for ref in refs if ref['split'] == split] + else: + raise KeyError(f'No split {split}') + + ref_ids = [ref['ref_id'] for ref in split_refs] + return ref_ids + + def get_sample_path(index=None, ref=None): + if ref is None: + assert index is not None + ref_id, i = samples[index] + ref = refs_index[ref_id] + + file_name = '_'.join(ref['file_name'].split('_')[:-1]) + '.' + ref['file_name'].split('.')[-1] + coco_split = file_name.split('_')[1] + + img_path = os.path.join(data_path, 'mscoco', coco_split, file_name) + return img_path + + ref_ids = get_ref_ids(split=split) + samples = [] + for ref_id in ref_ids: + ref = refs_index[ref_id] + for i in range(len(ref['sent_ids'])): + samples.append(get_sample(ref_id, i)) + return samples + + +class RefCOCODataset(Dataset): + """ + Used code from https://github.com/lichengunc/refer/blob/master/refer.py + """ + + def __init__(self, split, data_path="", image_transforms=None, question_transforms=None, tokenize=None, + max_samples=None, version='refcoco', split_by='unc', orig_query=False, **kwargs): + self.split = split + self.data_path = data_path + self.max_samples = max_samples + self.image_transforms = image_transforms + self.question_transforms = question_transforms + self.tokenize = tokenize + self.input_type = 'image' + self.output_type = 'ImagePatch' + self.orig_query = orig_query + + self.samples = load_samples(data_path, version, split, split_by) + + np.random.seed(4) + np.random.shuffle(self.samples) + + if max_samples is not None: + self.samples = self.samples[:max_samples] + + def __getitem__(self, index): + item = self.samples[index] + img_path = item['img_path'] + text = item['text'] + answer = item['answer'] + + with open(img_path, "rb") as f: + pil_img = Image.open(f).convert("RGB") + if self.image_transforms: + img = self.image_transforms(pil_img) + else: + img = pil_img + + # There are different texts associated to every image + if self.orig_query: + text = text + else: + text = f"Given an image: Find {text}.\ndef execute_command(image) -> ImagePatch:" + + return {'question': text, 'img': img, 'sample_id': index, 'answer': answer, 'index': index, + 'possible_answers': [], 'info_to_prompt': text, "question_type": -1, 'extra_context': ''} + + def __len__(self): + return len(self.samples) + + @classmethod + def accuracy(cls, prediction, ground_truth, *args, strict=True): + """ + Compute IoU score + Args: + prediction (list): List of predicted answers. + ground_truth (list): List of ground truth answers. + Returns: + score (float): Score of the prediction. It is an IoU score + """ + assert len(prediction) == len(ground_truth) + num_samples = 0 + iou = 0 + acc = 0 + for p, g in zip(prediction, ground_truth): + num_samples += 1 + + try: + try: + if type(p) == list: + p = torch.tensor(p)[None] + assert tuple(p.shape) == (1, 4) + elif type(p) == str: + p = torch.tensor([float(x) for x in p.split('(')[1].split(')')[0].split(',')])[None] + else: + p = torch.tensor([p.left, p.lower, p.right, p.upper])[None] + except: + if not strict: + p = torch.tensor([50.9, 39.1, 493.5, 356.5])[None] # Mean IoU is 22.64% + else: + continue + + if type(g) == str: + g = [float(x) for x in g.split('[')[1].split(']')[0].split(',')] + g = torch.tensor([g[0], g[1], g[2], g[3]])[None] + iou_ = box_iou(p, g).item() # Expects (x1, y1, x2, y2) format. So (left, lower, right, upper) + iou += iou_ + if iou_ > 0.7: + acc += 1 + + except: + pass + + return iou / max(num_samples, 1), acc / max(num_samples, 1) + + +class MultiVersionRefCOCODataset(RefCOCODataset): + def __init__(self, data_path="", image_transforms=None, question_transforms=None, tokenize=None, max_samples=None, + start_sample=None, versions=0, **kwargs): + self.data_path = data_path + self.max_samples = max_samples + self.image_transforms = image_transforms + self.question_transforms = question_transforms + self.tokenize = tokenize + self.input_type = 'image' + self.output_type = 'ImagePatch' + + self.version_infos = [] + self.samples = [] + cache_set = set() + for version_id in range(1, versions + 1): + version_info = kwargs[f'version_{version_id}'] + self.version_infos.append(version_info) + samples = load_samples(data_path, version_info['version'], version_info['split'], version_info['split_by']) + for sample in samples: + jsample = json.dumps(sample) + if jsample not in cache_set: + cache_set.add(jsample) + self.samples.append(sample) + + np.random.seed(4) + np.random.shuffle(self.samples) + + if max_samples is not None: + self.samples = self.samples[:max_samples] + if start_sample is not None: + self.samples = self.samples[start_sample:] + print("Length:", len(self.samples)) diff --git a/viper/datasets/rsvg.py b/viper/datasets/rsvg.py new file mode 100644 index 0000000..403c8a1 --- /dev/null +++ b/viper/datasets/rsvg.py @@ -0,0 +1,121 @@ +import os +import xml.etree.ElementTree as ET + +import numpy as np +import torch +from PIL import Image +from torch.utils.data import Dataset +from torchvision.ops import box_iou + + +def filelist(root, file_type): + return [os.path.join(directory_path, f) for directory_path, directory_name, files in os.walk(root) for f in files if + f.endswith(file_type)] + + +class RSVGDataset(Dataset): + def __init__(self, data_path, imsize=640, image_transforms=None, split='train', max_samples=None, orig_query=False, + load_image=False, **kwargs): + self.images = [] + self.images_path = os.path.join(data_path, 'JPEGImages') + self.anno_path = os.path.join(data_path, 'Annotations') + self.imsize = imsize + + self.split = split + self.max_samples = max_samples + self.image_transforms = image_transforms + self.input_type = 'image' + self.output_type = 'ImagePatch' + self.orig_query = orig_query + self.load_image = load_image + + with open(os.path.join(data_path, split + '.txt'), "r") as f: + file = f.readlines() + Index = [int(index.strip('\n')) for index in file] + count = 0 + annotations = filelist(self.anno_path, '.xml') + for anno_path in annotations: + root = ET.parse(anno_path).getroot() + for member in root.findall('object'): + if count in Index: + imageFile = str(self.images_path) + '/' + root.find("./filename").text + box = np.array([int(member[2][0].text), int(member[2][1].text), int(member[2][2].text), + int(member[2][3].text)], dtype=np.float32) + text = member[3].text + self.images.append((imageFile, box, text)) + count += 1 + + if self.max_samples is not None: + self.images = self.images[:self.max_samples] + + def __getitem__(self, index): + img_path, bbox, text = self.images[index] + + if self.load_image: + with open(img_path, "rb") as f: + pil_img = Image.open(f).convert("RGB") + _, height = pil_img.size + if self.image_transforms: + img = self.image_transforms(pil_img) + else: + img = pil_img + + bbox = [bbox[0], height - bbox[3], bbox[2], height - bbox[1], ] + + else: + img = None + bbox = [0, 0, 0, 0] + + # There are different texts associated to every image + if self.orig_query: + text = text.lower() + else: + text = f"Given an image: Find {text}.\ndef execute_command(image) -> ImagePatch:" + + return {'question': text, 'img': img, 'sample_id': index, 'answer': bbox, 'index': index, + 'possible_answers': [], 'info_to_prompt': text, "question_type": -1, 'extra_context': ''} + + def __len__(self): + return len(self.images) + + @classmethod + def accuracy(cls, prediction, ground_truth, *args, **kwargs): + """ + Compute IoU score + Args: + prediction (list): List of predicted answers. + ground_truth (list): List of ground truth answers. + Returns: + score (float): Score of the prediction. It is an IoU score + """ + assert len(prediction) == len(ground_truth) + num_samples = 0 + iou = 0 + acc = 0 + for p, g in zip(prediction, ground_truth): + num_samples += 1 + + try: + try: + if type(p) == list: + p = torch.tensor(p)[None] + assert tuple(p.shape) == (1, 4) + elif type(p) == str: + p = torch.tensor([float(x) for x in p.split('(')[1].split(')')[0].split(',')])[None] + else: + p = torch.tensor([p.left, p.lower, p.right, p.upper])[None] + except: + continue + + if type(g) == str: + g = [float(x) for x in g.split('[')[1].split(']')[0].split(',')] + g = torch.tensor([g[0], g[1], g[2], g[3]])[None] + iou_ = box_iou(p, g).item() # Expects (x1, y1, x2, y2) format. So (left, lower, right, upper) + iou += iou_ + if iou_ > 0.7: + acc += 1 + + except: + pass + + return iou / max(num_samples, 1), acc / max(num_samples, 1) diff --git a/viper/datasets/tallyqa.py b/viper/datasets/tallyqa.py new file mode 100644 index 0000000..59b275c --- /dev/null +++ b/viper/datasets/tallyqa.py @@ -0,0 +1,93 @@ +import json +import os + +import numpy as np +from PIL import Image +from torch.utils.data import Dataset +from word2number import w2n + + +class TallyQADataset(Dataset): + def __init__(self, split, data_path="", image_transforms=None, max_samples=None, is_simple=None, orig_query=True, + **kwargs): + print("Unused config params:", kwargs.keys()) + + self.split = split + self.data_path = data_path + self.image_transforms = image_transforms + self.input_type = 'image' + self.output_type = 'str' + self.orig_query = orig_query + + with open(os.path.join(self.data_path, self.split + '.json')) as f: + samples = json.load(f) + + if is_simple is None: + self.samples = samples + else: + if is_simple: + self.samples = [s for s in samples if s['issimple']] + print("Select simple samples: %d out of %d" % (len(self.samples), len(samples))) + else: + self.samples = [s for s in samples if not s['issimple']] + print("Select complex samples: %d out of %d" % (len(self.samples), len(samples))) + + if max_samples is not None: + np.random.seed(4) + np.random.shuffle(self.samples) + self.samples = self.samples[:max_samples] + + def __getitem__(self, index): + item = self.samples[index] + + question = item['question'] + if not self.orig_query: + question = f"Given an image: {question}\ndef execute_command(image) -> str:" + + question_type = -1 + image_path = os.path.join(self.data_path, item['image']) + with open(image_path, "rb") as f: + pil_img = Image.open(f).convert("RGB") + if self.image_transforms: + img = self.image_transforms(pil_img) + else: + img = pil_img + + out_dict = {"sample_id": index, "answer": item['answer'], "img": img, "question": question, + 'pil_img': pil_img, "question_type": question_type, 'index': index, 'possible_answers': [], + 'info_to_prompt': question, } + return out_dict + + def post_process(self, prediction, strict): + prediction = str(prediction).strip() + try: + return int(prediction) + except: + try: + return w2n(prediction) + except: + if not strict: + return 0 + else: + return None + + def accuracy(self, prediction, ground_truth, *args, strict=True, **kwargs): + """ + Args: + prediction (list): List of predicted answers. + ground_truth (list): List of ground truth answers. + Returns: + score (float): Score of the prediction. + """ + if len(prediction) == 0: # if no prediction, return 0 + return 0 + assert len(prediction) == len(ground_truth) + score = 0 + for p, g in zip(prediction, ground_truth): + if self.post_process(p, strict=strict) == g: + score += 1 + return score / len(prediction) + + # we can call len(dataset) to return the size + def __len__(self): + return len(self.samples) diff --git a/viper/download_models.sh b/viper/download_models.sh new file mode 100644 index 0000000..0150775 --- /dev/null +++ b/viper/download_models.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +# change this to your preferred download location +PRETRAINED_MODELS_PATH=./pretrained_models + +# GLIP model +mkdir -p $PRETRAINED_MODELS_PATH/GLIP/checkpoints +mkdir -p $PRETRAINED_MODELS_PATH/GLIP/configs +wget -nc -P $PRETRAINED_MODELS_PATH/GLIP/checkpoints https://huggingface.co/GLIPModel/GLIP/resolve/main/glip_large_model.pth +wget -nc -P $PRETRAINED_MODELS_PATH/GLIP/configs https://raw.githubusercontent.com/microsoft/GLIP/main/configs/pretrain/glip_Swin_L.yaml + +# X-VLM model +mkdir -p $PRETRAINED_MODELS_PATH/xvlm +gdown "https://drive.google.com/u/0/uc?id=1bv6_pZOsXW53EhlwU0ZgSk03uzFI61pN" -O $PRETRAINED_MODELS_PATH/xvlm/retrieval_mscoco_checkpoint_9.pth + +# TCL model +mkdir -p $PRETRAINED_MODELS_PATH/TCL +gdown "https://drive.google.com/uc?id=1Cb1azBdcdbm0pRMFs-tupKxILTCXlB4O" -O $PRETRAINED_MODELS_PATH/TCL/TCL_4M.pth + +# InSPyReNet model +mkdir -p $PRETRAINED_MODELS_PATH/saliency_inspyrenet_plus_ultra +gdown "https://drive.google.com/uc?id=13oBl5MTVcWER3YU4fSxW3ATlVfueFQPY" -O $PRETRAINED_MODELS_PATH/saliency_inspyrenet_plus_ultra/latest.pth diff --git a/viper/image_patch.py b/viper/image_patch.py new file mode 100644 index 0000000..db59c13 --- /dev/null +++ b/viper/image_patch.py @@ -0,0 +1,510 @@ +from __future__ import annotations + +import numpy as np +import re +import torch +from PIL import Image +from dateutil import parser as dateparser +from rich.console import Console +from torchvision import transforms +from torchvision.ops import box_iou +from typing import Union, List +from word2number import w2n + +from utils import show_single_image, load_json +from vision_processes import forward, config + +console = Console(highlight=False) + + +class ImagePatch: + """A Python class containing a crop of an image centered around a particular object, as well as relevant + information. + Attributes + ---------- + cropped_image : array_like + An array-like of the cropped image taken from the original image. + left : int + An int describing the position of the left border of the crop's bounding box in the original image. + lower : int + An int describing the position of the bottom border of the crop's bounding box in the original image. + right : int + An int describing the position of the right border of the crop's bounding box in the original image. + upper : int + An int describing the position of the top border of the crop's bounding box in the original image. + + Methods + ------- + find(object_name: str)->List[ImagePatch] + Returns a list of new ImagePatch objects containing crops of the image centered around any objects found in the + image matching the object_name. + exists(object_name: str)->bool + Returns True if the object specified by object_name is found in the image, and False otherwise. + verify_property(property: str)->bool + Returns True if the property is met, and False otherwise. + best_text_match(option_list: List[str], prefix: str)->str + Returns the string that best matches the image. + simple_query(question: str=None)->str + Returns the answer to a basic question asked about the image. If no question is provided, returns the answer + to "What is this?". + compute_depth()->float + Returns the median depth of the image crop. + crop(left: int, lower: int, right: int, upper: int)->ImagePatch + Returns a new ImagePatch object containing a crop of the image at the given coordinates. + """ + + def __init__(self, image: Union[Image.Image, torch.Tensor, np.ndarray], left: int = None, lower: int = None, + right: int = None, upper: int = None, parent_left=0, parent_lower=0, queues=None, + parent_img_patch=None): + """Initializes an ImagePatch object by cropping the image at the given coordinates and stores the coordinates as + attributes. If no coordinates are provided, the image is left unmodified, and the coordinates are set to the + dimensions of the image. + + Parameters + ------- + image : array_like + An array-like of the original image. + left : int + An int describing the position of the left border of the crop's bounding box in the original image. + lower : int + An int describing the position of the bottom border of the crop's bounding box in the original image. + right : int + An int describing the position of the right border of the crop's bounding box in the original image. + upper : int + An int describing the position of the top border of the crop's bounding box in the original image. + + """ + + if isinstance(image, Image.Image): + image = transforms.ToTensor()(image) + elif isinstance(image, np.ndarray): + image = torch.tensor(image).permute(1, 2, 0) + elif isinstance(image, torch.Tensor) and image.dtype == torch.uint8: + image = image / 255 + + if left is None and right is None and upper is None and lower is None: + self.cropped_image = image + self.left = 0 + self.lower = 0 + self.right = image.shape[2] # width + self.upper = image.shape[1] # height + else: + self.cropped_image = image[:, image.shape[1] - upper:image.shape[1] - lower, left:right] + self.left = left + parent_left + self.upper = upper + parent_lower + self.right = right + parent_left + self.lower = lower + parent_lower + + self.height = self.cropped_image.shape[1] + self.width = self.cropped_image.shape[2] + + self.cache = {} + self.queues = (None, None) if queues is None else queues + + self.parent_img_patch = parent_img_patch + + self.horizontal_center = (self.left + self.right) / 2 + self.vertical_center = (self.lower + self.upper) / 2 + + if self.cropped_image.shape[1] == 0 or self.cropped_image.shape[2] == 0: + raise Exception("ImagePatch has no area") + + self.possible_options = load_json('./useful_lists/possible_options.json') + + def forward(self, model_name, *args, **kwargs): + return forward(model_name, *args, queues=self.queues, **kwargs) + + @property + def original_image(self): + if self.parent_img_patch is None: + return self.cropped_image + else: + return self.parent_img_patch.original_image + + def find(self, object_name: str, confidence_threshold: float = None, return_confidence: bool = False) -> List: + """Returns a list of ImagePatch objects matching object_name contained in the crop if any are found. + Otherwise, returns an empty list. + Parameters + ---------- + object_name : str + the name of the object to be found + + Returns + ------- + List[ImagePatch] + a list of ImagePatch objects matching object_name contained in the crop + """ + if confidence_threshold is not None: + confidence_threshold = float(confidence_threshold) + + if object_name in ["object", "objects"]: + all_object_coordinates, all_object_scores = self.forward('maskrcnn', self.cropped_image, + confidence_threshold=confidence_threshold) + all_object_coordinates = all_object_coordinates[0] + all_object_scores = all_object_scores[0] + else: + if object_name == 'person': + object_name = 'people' # GLIP does better at people than person + + all_object_coordinates, all_object_scores = self.forward('glip', self.cropped_image, object_name, + confidence_threshold=confidence_threshold) + if len(all_object_coordinates) == 0: + return [] + + threshold = config.ratio_box_area_to_image_area + if threshold > 0: + area_im = self.width * self.height + all_areas = torch.tensor([(coord[2] - coord[0]) * (coord[3] - coord[1]) / area_im + for coord in all_object_coordinates]) + mask = all_areas > threshold + # if not mask.any(): + # mask = all_areas == all_areas.max() # At least return one element + all_object_coordinates = all_object_coordinates[mask] + all_object_scores = all_object_scores[mask] + + boxes = [self.crop(*coordinates) for coordinates in all_object_coordinates] + if return_confidence: + return [(box, float(score)) for box, score in zip(boxes, all_object_scores.reshape(-1))] + else: + return boxes + + def exists(self, object_name) -> bool: + """Returns True if the object specified by object_name is found in the image, and False otherwise. + Parameters + ------- + object_name : str + A string describing the name of the object to be found in the image. + """ + if object_name.isdigit() or object_name.lower().startswith("number"): + object_name = object_name.lower().replace("number", "").strip() + + object_name = w2n.word_to_num(object_name) + answer = self.simple_query("What number is written in the image (in digits)?") + return w2n.word_to_num(answer) == object_name + + patches = self.find(object_name) + + filtered_patches = [] + for patch in patches: + if "yes" in patch.simple_query(f"Is this a {object_name}?"): + filtered_patches.append(patch) + return len(filtered_patches) > 0 + + def _score(self, category: str, negative_categories=None, model='clip') -> float: + """ + Returns a binary score for the similarity between the image and the category. + The negative categories are used to compare to (score is relative to the scores of the negative categories). + """ + if model == 'clip': + res = self.forward('clip', self.cropped_image, category, task='score', + negative_categories=negative_categories) + elif model == 'tcl': + res = self.forward('tcl', self.cropped_image, category, task='score') + else: # xvlm + task = 'binary_score' if negative_categories is not None else 'score' + res = self.forward('xvlm', self.cropped_image, category, task=task, negative_categories=negative_categories) + res = res.item() + + return res + + def _detect(self, category: str, thresh, negative_categories=None, model='clip') -> Tuple[bool, float]: + score = self._score(category, negative_categories, model) + return score > thresh, float(score) + + def verify_property(self, object_name: str, attribute: str, return_confidence: bool = False): + """Returns True if the object possesses the property, and False otherwise. + Differs from 'exists' in that it presupposes the existence of the object specified by object_name, instead + checking whether the object possesses the property. + Parameters + ------- + object_name : str + A string describing the name of the object to be found in the image. + attribute : str + A string describing the property to be checked. + """ + name = f"{attribute} {object_name}" + model = config.verify_property.model + negative_categories = [f"{att} {object_name}" for att in self.possible_options['attributes']] + if model == 'clip': + ret, score = self._detect(name, negative_categories=negative_categories, + thresh=config.verify_property.thresh_clip, model='clip') + elif model == 'tcl': + ret, score = self._detect(name, thresh=config.verify_property.thresh_tcl, model='tcl') + else: # 'xvlm' + ret, score = self._detect(name, negative_categories=negative_categories, + thresh=config.verify_property.thresh_xvlm, model='xvlm') + + if return_confidence: + return ret, score + else: + return ret + + def best_text_match(self, option_list: list[str] = None, prefix: str = None) -> str: + """Returns the string that best matches the image. + Parameters + ------- + option_list : str + A list with the names of the different options + prefix : str + A string with the prefixes to append to the options + """ + option_list_to_use = option_list + if prefix is not None: + option_list_to_use = [prefix + " " + option for option in option_list] + + model_name = config.best_match_model + image = self.cropped_image + text = option_list_to_use + if model_name in ('clip', 'tcl'): + selected = self.forward(model_name, image, text, task='classify') + elif model_name == 'xvlm': + res = self.forward(model_name, image, text, task='score') + res = res.argmax().item() + selected = res + else: + raise NotImplementedError + + return option_list[selected] + + def simple_query(self, question: str, return_confidence: bool = False): + """Returns the answer to a basic question asked about the image. If no question is provided, returns the answer + to "What is this?". The questions are about basic perception, and are not meant to be used for complex reasoning + or external knowledge. + Parameters + ------- + question : str + A string describing the question to be asked. + """ + text, score = self.forward('blip', self.cropped_image, question, task='qa') + if return_confidence: + return text, score + else: + return text + + def compute_depth(self): + """Returns the median depth of the image crop + Parameters + ---------- + Returns + ------- + float + the median depth of the image crop + """ + original_image = self.original_image + depth_map = self.forward('depth', original_image) + depth_map = depth_map[original_image.shape[1] - self.upper:original_image.shape[1] - self.lower, + self.left:self.right] + return depth_map.median() # Ideally some kind of mode, but median is good enough for now + + def crop(self, left: int, lower: int, right: int, upper: int) -> ImagePatch: + """Returns a new ImagePatch containing a crop of the original image at the given coordinates. + Parameters + ---------- + left : int + the position of the left border of the crop's bounding box in the original image + lower : int + the position of the bottom border of the crop's bounding box in the original image + right : int + the position of the right border of the crop's bounding box in the original image + upper : int + the position of the top border of the crop's bounding box in the original image + + Returns + ------- + ImagePatch + a new ImagePatch containing a crop of the original image at the given coordinates + """ + # make all inputs ints + left = int(left) + lower = int(lower) + right = int(right) + upper = int(upper) + + if config.crop_larger_margin: + left = max(0, left - 10) + lower = max(0, lower - 10) + right = min(self.width, right + 10) + upper = min(self.height, upper + 10) + + return ImagePatch(self.cropped_image, left, lower, right, upper, self.left, self.lower, queues=self.queues, + parent_img_patch=self) + + def overlaps_with(self, left, lower, right, upper): + """Returns True if a crop with the given coordinates overlaps with this one, + else False. + Parameters + ---------- + left : int + the left border of the crop to be checked + lower : int + the lower border of the crop to be checked + right : int + the right border of the crop to be checked + upper : int + the upper border of the crop to be checked + + Returns + ------- + bool + True if a crop with the given coordinates overlaps with this one, else False + """ + return self.left <= right and self.right >= left and self.lower <= upper and self.upper >= lower + + def llm_query(self, question: str, long_answer: bool = True) -> str: + return llm_query(question, None, long_answer) + + def print_image(self, size: tuple[int, int] = None): + show_single_image(self.cropped_image, size) + + def __repr__(self): + return "ImagePatch(left={}, right={}, upper={}, lower={}, height={}, width={}, horizontal_center={}, vertical_center={})".format( + self.left, self.right, self.upper, self.lower, self.height, self.width, + self.horizontal_center, self.vertical_center, + ) + # return "ImagePatch({}, {}, {}, {})".format(self.left, self.lower, self.right, self.upper) + + +def best_image_match(list_patches: list[ImagePatch], content: List[str], return_index: bool = False) -> \ + Union[ImagePatch, None]: + """Returns the patch most likely to contain the content. + Parameters + ---------- + list_patches : List[ImagePatch] + content : List[str] + the object of interest + return_index : bool + if True, returns the index of the patch most likely to contain the object + + Returns + ------- + int + Patch most likely to contain the object + """ + if len(list_patches) == 0: + return None + + model = config.best_match_model + + scores = [] + for cont in content: + if model == 'clip': + res = list_patches[0].forward(model, [p.cropped_image for p in list_patches], cont, task='compare', + return_scores=True) + else: + res = list_patches[0].forward(model, [p.cropped_image for p in list_patches], cont, task='score') + scores.append(res) + scores = torch.stack(scores).mean(dim=0) + scores = scores.argmax().item() # Argmax over all image patches + + if return_index: + return scores + return list_patches[scores] + + +def distance(patch_a: Union[ImagePatch, float], patch_b: Union[ImagePatch, float]) -> float: + """ + Returns the distance between the edges of two ImagePatches, or between two floats. + If the patches overlap, it returns a negative distance corresponding to the negative intersection over union. + """ + + if isinstance(patch_a, ImagePatch) and isinstance(patch_b, ImagePatch): + a_min = np.array([patch_a.left, patch_a.lower]) + a_max = np.array([patch_a.right, patch_a.upper]) + b_min = np.array([patch_b.left, patch_b.lower]) + b_max = np.array([patch_b.right, patch_b.upper]) + + u = np.maximum(0, a_min - b_max) + v = np.maximum(0, b_min - a_max) + + dist = np.sqrt((u ** 2).sum() + (v ** 2).sum()) + + if dist == 0: + box_a = torch.tensor([patch_a.left, patch_a.lower, patch_a.right, patch_a.upper])[None] + box_b = torch.tensor([patch_b.left, patch_b.lower, patch_b.right, patch_b.upper])[None] + dist = - box_iou(box_a, box_b).item() + + else: + dist = abs(patch_a - patch_b) + + return dist + + +def bool_to_yesno(bool_answer: bool) -> str: + """Returns a yes/no answer to a question based on the boolean value of bool_answer. + Parameters + ---------- + bool_answer : bool + a boolean value + + Returns + ------- + str + a yes/no answer to a question based on the boolean value of bool_answer + """ + return "yes" if bool_answer else "no" + + +def llm_query(query, context=None, long_answer=True, queues=None): + """Answers a text question using GPT-3. The input question is always a formatted string with a variable in it. + + Parameters + ---------- + query: str + the text question to ask. Must not contain any reference to 'the image' or 'the photo', etc. + """ + if long_answer: + return forward(model_name='gpt3_general', prompt=query, queues=queues) + else: + return forward(model_name='gpt3_qa', prompt=[query, context], queues=queues) + + +def process_guesses(prompt, guess1=None, guess2=None, queues=None): + return forward(model_name='gpt3_guess', prompt=[prompt, guess1, guess2], queues=queues) + + +def coerce_to_numeric(string, no_string=False): + """ + This function takes a string as input and returns a numeric value after removing any non-numeric characters. + If the input string contains a range (e.g. "10-15"), it returns the first value in the range. + # TODO: Cases like '25to26' return 2526, which is not correct. + """ + if any(month in string.lower() for month in ['january', 'february', 'march', 'april', 'may', 'june', 'july', + 'august', 'september', 'october', 'november', 'december']): + try: + return dateparser.parse(string).timestamp().year + except: # Parse Error + pass + + try: + # If it is a word number (e.g. 'zero') + numeric = w2n.word_to_num(string) + return numeric + except ValueError: + pass + + # Remove any non-numeric characters except the decimal point and the negative sign + string_re = re.sub("[^0-9\.\-]", "", string) + + if string_re.startswith('-'): + string_re = '&' + string_re[1:] + + # Check if the string includes a range + if "-" in string_re: + # Split the string into parts based on the dash character + parts = string_re.split("-") + return coerce_to_numeric(parts[0].replace('&', '-')) + else: + string_re = string_re.replace('&', '-') + + try: + # Convert the string to a float or int depending on whether it has a decimal point + if "." in string_re: + numeric = float(string_re) + else: + numeric = int(string_re) + except: + if no_string: + raise ValueError + # No numeric values. Return input + return string + return numeric diff --git a/viper/main_batch_exec.py b/viper/main_batch_exec.py new file mode 100644 index 0000000..5ed0614 --- /dev/null +++ b/viper/main_batch_exec.py @@ -0,0 +1,253 @@ +import os +import signal +import traceback +import warnings +from functools import partial + +import pandas as pd +import torch.multiprocessing as mp +from joblib import Memory +from omegaconf import OmegaConf +from rich.console import Console +from torch.utils.data import DataLoader +from tqdm import tqdm + +from configs import config +from main_utils import save_results_csv, my_collate, TimeOutException +from utils import seed_everything + +# See https://github.com/pytorch/pytorch/issues/11201, https://github.com/pytorch/pytorch/issues/973 +# Not for dataloader, but for multiprocessing batches +mp.set_sharing_strategy('file_system') +queue_results = None + +cache = Memory('cache/' if config.use_cache else None, verbose=0) +runs_dict = {} +seed_everything() +console = Console(highlight=False) + + +def handler(signum, frame): + print("Code execution timeout") + raise TimeOutException() + + +def run_program(parameters, queues_in_, input_type_, retrying=False): + from image_patch import ImagePatch, llm_query, best_image_match, distance, bool_to_yesno + from video_segment import VideoSegment + + global queue_results + + code, sample_id, image, possible_answers, query = parameters + code = str(code) + assert 'def ' not in str(query) + + if code.startswith("\ndef"): + code = code[1:] # TODO: just a temporary fix + + code_header = f'def execute_command_{sample_id}(' \ + f'{input_type_}, possible_answers, query, ' \ + f'ImagePatch, VideoSegment, ' \ + 'llm_query, bool_to_yesno, distance, best_image_match):\n' \ + f' # Answer is:' + if not isinstance(code, str): + print("Warning! code:") + print(code) + code = str(code) + code = code_header + code + + try: + exec(compile(code, 'Codex', 'exec'), globals()) + except Exception as e: + print(f'Sample {sample_id} failed at compilation time with error: {e}') + try: + with open(config.fixed_code_file, 'r') as f: + fixed_code = f.read() + code = code_header + fixed_code + exec(compile(code, 'Codex', 'exec'), globals()) + except Exception as e2: + print(f'Not even the fixed code worked. Sample {sample_id} failed at compilation time with error: {e2}') + return None, code + + queues = [queues_in_, queue_results] + + image_patch_partial = partial(ImagePatch, queues=queues) + video_segment_partial = partial(VideoSegment, queues=queues) + llm_query_partial = partial(llm_query, queues=queues) + + try: + signal.signal(signal.SIGALRM, handler) + signal.alarm(60 * 20) # timeout = 10min, just in case while True + result = globals()[f'execute_command_{sample_id}']( + # Inputs to the function + image, possible_answers, query, + # Classes to be used + image_patch_partial, video_segment_partial, + # Functions to be used + llm_query_partial, bool_to_yesno, distance, best_image_match) + except Exception as e: + # print full traceback + traceback.print_exc() + if retrying: + return None, code + print(f'Sample {sample_id} failed with error: {e}. Next you will see an "expected an indented block" error. ') + # Retry again with fixed code + new_code = "[" # This code will break upon execution, and it will be caught by the except clause + result = run_program((new_code, sample_id, image, possible_answers, query), queues_in_, input_type_, + retrying=True)[0] + finally: + signal.alarm(0) + + # The function run_{sample_id} is defined globally (exec doesn't work locally). A cleaner alternative would be to + # save it in a global dict (replace globals() for dict_name in exec), but then it doesn't detect the imported + # libraries for some reason. Because defining it globally is not ideal, we just delete it after running it. + if f'execute_command_{sample_id}' in globals(): + del globals()[f'execute_command_{sample_id}'] # If it failed to compile the code, it won't be defined + return result, code + + +def worker_init(queue_results_): + global queue_results + index_queue = mp.current_process()._identity[0] % len(queue_results_) + queue_results = queue_results_[index_queue] + + +def main(): + print(config) + + mp.set_start_method('spawn') + + from vision_processes import queues_in, finish_all_consumers, forward, manager + from datasets import get_dataset + + batch_size = config.dataset.batch_size + num_processes = min(batch_size, 50) + + if config.multiprocessing: + queue_results_main = manager.Queue() + queues_results = [manager.Queue() for _ in range(batch_size)] + else: + queue_results_main = None + queues_results = [None for _ in range(batch_size)] + + model_name_codex = 'codellama' if config.codex.model == 'codellama' else 'codex' + codex = partial(forward, model_name=model_name_codex, queues=[queues_in, queue_results_main]) + + if config.clear_cache: + cache.clear() + + if config.wandb: + import wandb + wandb.init(project="viper", config=OmegaConf.to_container(config)) + # log the prompt file + wandb.save(config.codex.prompt) + + assert config.execute_code + dataset = get_dataset(config.dataset, load_image=True, orig_query=True) + + with open(config.codex.prompt) as f: + base_prompt = f.read().strip() + + codes_all = None + if config.use_cached_codex: + results = pd.read_csv(config.cached_codex_path) + # codes_all = [r.split('# Answer is:')[1] for r in results['code']] + codes_all = results['code'].tolist() + # python -c "from joblib import Memory; cache = Memory('cache/', verbose=0); cache.clear()" + dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=False, num_workers=0, pin_memory=True, + collate_fn=my_collate) + input_type = dataset.input_type + + all_results = [] + all_answers = [] + all_codes = [] + all_ids = [] + all_queries = [] + all_img_paths = [] + all_possible_answers = [] + all_query_types = [] + + with mp.Pool(processes=num_processes, initializer=worker_init, initargs=(queues_results,)) \ + if config.multiprocessing else open(os.devnull, "w") as pool: + try: + n_batches = len(dataloader) + + for i, batch in tqdm(enumerate(dataloader), total=n_batches): + + # Combine all queries and get Codex predictions for them + # TODO compute Codex for next batch as current batch is being processed + + if not config.use_cached_codex: + raise NotImplementedError("Please only use this script for executing visual programs") + + else: + codes = codes_all[i * batch_size:(i + 1) * batch_size] # If cache + + # Run the code + if config.execute_code: + if not config.multiprocessing: + # Otherwise, we would create a new model for every process + results = [] + for c, sample_id, img, possible_answers, query in \ + zip(codes, batch['sample_id'], batch['img'], batch['possible_answers'], + batch['question']): + result = run_program([c, sample_id, img, possible_answers, query], queues_in, input_type) + results.append(result) + else: + results = list(pool.imap(partial( + run_program, queues_in_=queues_in, input_type_=input_type), + zip(codes, batch['sample_id'], batch['img'], batch['possible_answers'], batch['question']))) + else: + results = [(None, c) for c in codes] + warnings.warn("Not executing code! This is only generating the code. We set the flag " + "'execute_code' to False by default, because executing code generated by a language " + "model can be dangerous. Set the flag 'execute_code' to True if you want to execute " + "it.") + + all_results += [r[0] for r in results] + all_codes += [r[1] for r in results] + all_ids += batch['sample_id'] + all_answers += batch['answer'] + all_possible_answers += batch['possible_answers'] + all_query_types += batch['question_type'] + all_queries += batch['question'] + all_img_paths += ['' for idx in batch['index']] + if i % config.log_every == 0: + try: + accuracy = dataset.accuracy(all_results, all_answers, all_possible_answers, all_query_types, + strict=False) + console.print(f'Accuracy at Batch {i}/{n_batches}: {accuracy}') + except Exception as e: + console.print(f'Error computing accuracy: {e}') + + except Exception as e: + # print full stack trace + traceback.print_exc() + console.print(f'Exception: {e}') + console.print("Completing logging and exiting...") + + try: + accuracy = dataset.accuracy(all_results, all_answers, all_possible_answers, all_query_types, strict=False) + console.print(f'Final accuracy: {accuracy}') + except Exception as e: + print(f'Error computing accuracy: {e}') + + if config.save: + df = pd.DataFrame([all_results, all_answers, all_codes, all_ids, all_queries, all_img_paths, + all_possible_answers]).T + df.columns = ['result', 'answer', 'code', 'id', 'query', 'img_path', 'possible_answers'] + # make the result column a string + df['result'] = df['result'].apply(str) + # df['error'] = df['error'].apply(str) + filename = save_results_csv(config, df) + config['py_file'] = __file__ + OmegaConf.save(config=config, f=filename.replace(".csv", ".yaml")) + print("Dump finished") + + assert not config.wandb + + finish_all_consumers() + + +if __name__ == '__main__': + main() diff --git a/viper/main_batch_generate.py b/viper/main_batch_generate.py new file mode 100644 index 0000000..a4c4d1e --- /dev/null +++ b/viper/main_batch_generate.py @@ -0,0 +1,213 @@ +import json +import os +import pickle +import traceback +from functools import partial + +import pandas as pd +import torch.multiprocessing as mp +from joblib import Memory +from omegaconf import OmegaConf +from rich.console import Console +from torch.utils.data import DataLoader +from tqdm import tqdm + +from configs import config +from main_utils import CompileTimeError, ProgramRuntimeError, save_results_csv, my_collate +from utils import seed_everything + +# See https://github.com/pytorch/pytorch/issues/11201, https://github.com/pytorch/pytorch/issues/973 +# Not for dataloader, but for multiprocessing batches +mp.set_sharing_strategy('file_system') +queue_results = None + +cache = Memory('cache/' if config.use_cache else None, verbose=0) +# STAMP = sys.argv[1] +# if os.path.exists(f'cache/{STAMP}'): +# os.removedirs(f'cache/{STAMP}') +# cache = Memory(f'cache/{STAMP}', verbose=0) +runs_dict = {} +seed_everything() +console = Console(highlight=False) + + +def worker_init(queue_results_): + global queue_results + index_queue = mp.current_process()._identity[0] % len(queue_results_) + queue_results = queue_results_[index_queue] + + +def main(): + print(config) + + mp.set_start_method('spawn') + + from vision_processes import queues_in, finish_all_consumers, forward, manager + from datasets import get_dataset + + batch_size = config.dataset.batch_size + num_processes = min(batch_size, 50) + + if config.multiprocessing: + queue_results_main = manager.Queue() + queues_results = [manager.Queue() for _ in range(batch_size)] + else: + queue_results_main = None + queues_results = [None for _ in range(batch_size)] + + if config.codex.model == 'codellama': + if getattr(config.codex, 'multi_turn', False): + model_name_codex = 'multiturn_codellama' + else: + model_name_codex = 'codellama' + else: + if getattr(config.codex, 'multi_turn', False): + model_name_codex = 'multiturn_codex' + else: + model_name_codex = 'codex' + codex = partial(forward, model_name=model_name_codex, queues=[queues_in, queue_results_main]) + + if config.clear_cache: + cache.clear() + + if config.wandb: + import wandb + wandb.init(project="viper", config=OmegaConf.to_container(config)) + # log the prompt file + wandb.save(config.codex.prompt) + + dataset = get_dataset(config.dataset, load_image=config.execute_code) + + with open(config.codex.prompt) as f: + base_prompt = f.read().strip() + if getattr(config.codex, 'multi_turn', False): + base_prompt = base_prompt.split("<<<>>>") + base_prompt = [p.strip() for p in base_prompt] + + codes_all = None + if config.use_cached_codex: + if config.cached_codex_path.endswith(".csv"): + results = pd.read_csv(config.cached_codex_path) + else: + assert config.cached_codex_path.endswith(".pkl") + with open(config.cached_codex_path, 'rb') as f: + results = pickle.load(f) + # codes_all = [r.split('# Answer is:')[1] for r in results['code']] + codes_all = results['code'].tolist() + # python -c "from joblib import Memory; cache = Memory('cache/', verbose=0); cache.clear()" + dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=False, num_workers=0, pin_memory=True, + collate_fn=my_collate) + input_type = dataset.input_type + + all_results = [] + all_errors = [] + all_answers = [] + all_codes = [] + all_codes_candidates = [] + all_ids = [] + all_queries = [] + all_img_paths = [] + all_possible_answers = [] + all_query_types = [] + filename = None + + with mp.Pool(processes=num_processes, initializer=worker_init, initargs=(queues_results,)) \ + if config.multiprocessing else open(os.devnull, "w") as pool: + try: + n_batches = len(dataloader) + + for i, batch in tqdm(enumerate(dataloader), total=n_batches): + codes_candidates = [[] for _ in range(len(batch['question']))] + if not config.use_cached_codex: + codes = codex(prompt=batch['question'], base_prompt=base_prompt, input_type=input_type, + extra_context=batch.get( + 'extra_context', ['' for _ in range(len(batch['question']))] + )) + if getattr(config.codex, "overgenerate", False): + codes_candidates = [json.dumps(c) for c in codes] + codes = [c[0] for c in codes] + + else: + codes = codes_all[i * batch_size:(i + 1) * batch_size] # If cache + assert len(codes) == len(batch['answer']), "Cached code breaks" + + if i * len(codes) < 4: + print("First %d - %d sample:" % (i * len(codes), (i + 1) * len(codes))) + for c in codes: + print(c) + print() + + # Run the code + assert not config.execute_code, "Please only use this script for generating visual programs" + results = [(None, c) for c in codes] + + all_results += [r[0] for r in results] + all_errors += [r[-1] for r in results] + all_codes += [r[1] for r in results] + all_codes_candidates += codes_candidates + all_ids += batch['sample_id'] + all_answers += batch['answer'] + all_possible_answers += batch['possible_answers'] + all_query_types += batch['question_type'] + all_queries += batch['question'] + # all_img_paths += [dataset.get_sample_path(idx) for idx in batch['index']] + all_img_paths += ['' for idx in batch['index']] + if i % config.log_every == 0: + try: + accuracy = dataset.accuracy(all_results, all_answers, all_possible_answers, all_query_types) + console.print(f'Accuracy at Batch {i}/{n_batches}: {accuracy}') + except Exception as e: + console.print(f'Error computing accuracy: {e}') + + if config.save: + df = pd.DataFrame([all_results, all_answers, all_codes, all_ids, all_queries, all_img_paths, + all_possible_answers, all_codes_candidates, all_errors]).T + df.columns = ['result', 'answer', 'code', 'id', 'query', 'img_path', 'possible_answers', + 'code_candidates', + 'error', ] + # make the result column a string + df['result'] = df['result'].apply(str) + df['error'] = df['error'].apply(str) + filename = save_results_csv(config, df, filename) + config['py_file'] = __file__ + OmegaConf.save(config=config, f=filename.replace(".csv", ".yaml")) + print("Dump finished") + + + except Exception as e: + # print full stack trace + traceback.print_exc() + console.print(f'Exception: {e}') + console.print("Completing logging and exiting...") + + n = sum([isinstance(e, CompileTimeError) for e in all_errors]) + print("%.2f%% (%d out of %d) code execution has compile error" % (n / len(all_answers) * 100, n, len(all_answers))) + n = sum([isinstance(e, ProgramRuntimeError) for e in all_errors]) + print("%.2f%% (%d out of %d) code execution has runtime error" % (n / len(all_answers) * 100, n, len(all_answers))) + + try: + accuracy = dataset.accuracy(all_results, all_answers, all_possible_answers, all_query_types) + console.print(f'Final accuracy: {accuracy}') + except Exception as e: + print(f'Error computing accuracy: {e}') + + finish_all_consumers() + + if config.save: + df = pd.DataFrame([all_results, all_answers, all_codes, all_ids, all_queries, all_img_paths, + all_possible_answers, all_codes_candidates, all_errors]).T + df.columns = ['result', 'answer', 'code', 'id', 'query', 'img_path', 'possible_answers', 'code_candidates', + 'error', ] + # make the result column a string + df['result'] = df['result'].apply(str) + df['error'] = df['error'].apply(str) + filename = save_results_csv(config, df, filename) + config['py_file'] = __file__ + OmegaConf.save(config=config, f=filename.replace(".csv", ".yaml")) + print("Dump finished") + + assert not config.wandb + + +if __name__ == '__main__': + main() diff --git a/viper/main_batch_trace.py b/viper/main_batch_trace.py new file mode 100644 index 0000000..4da685a --- /dev/null +++ b/viper/main_batch_trace.py @@ -0,0 +1,339 @@ +import ast +import importlib +import io +import json +import os +import pickle +import re +import signal +import string +import sys +import time +from functools import partial +from typing import List + +import pandas as pd +import pysnooper +import torch.multiprocessing as mp +from joblib import Memory +from omegaconf import OmegaConf +from rich.console import Console +from torch.utils.data import DataLoader +from tqdm import tqdm + +from configs import config +from main_utils import CompileTimeError, ProgramRuntimeError, TimeOutException, save_results_csv, my_collate +from utils import seed_everything + +# See https://github.com/pytorch/pytorch/issues/11201, https://github.com/pytorch/pytorch/issues/973 +# Not for dataloader, but for multiprocessing batches +mp.set_sharing_strategy('file_system') +queue_results = None + +cache = Memory('cache/' if config.use_cache else None, verbose=0) +runs_dict = {} +seed_everything() +console = Console(highlight=False) + +FUNCTION_HEAD = "def execute_command({input_type}) -> {output_type}:" +EXEC_FUNCTION_HEAD = 'def execute_command({input_type}, possible_answers, query, ImagePatch, VideoSegment,' \ + ' llm_query, bool_to_yesno, distance, best_image_match):' + +STAMP = sys.argv[1] +print("STAMP =", STAMP) + + +def process_trace(text, function_head, execution_function_head): + def remove_indent(lines): + n_space = 0 + for i, c in enumerate(lines[0]): + if c == ' ': + n_space += 1 + else: + break + return [line[n_space:] if line[0] == ' ' else line for line in lines] + + def remove_pre_context(lines: List[str]): # lol, just a random use of List + for i in range(len(lines) - 1, -1, -1): + line = lines[i] + if execution_function_head in line: + # assert "call" in line # TODO: further double-check? + content = [line.replace(execution_function_head, function_head)] + lines[i + 1:] + if line[0] == ' ': + return remove_indent(content) + else: + return content + return [] + + def remove_post_context(lines): + for i, line in enumerate(lines): + if line.startswith("Source path:") and line.endswith(__file__): + return lines[:i] + elif line.startswith("Elapsed time"): + return lines[:i] + return lines + + def remove_timestamp(lines): + ret = [] + for line in lines: + if len(line) > 0 and line[0] in string.digits: + line = line[16:] # remove timestamp + ret.append(line) + return ret + + def remove_tensor(line): + return re.sub(r"tensor\(\[\[\[.*?\]\]\]\)", "tensor([[[...]]])", line) + + lines = text.splitlines() + lines = remove_pre_context(lines) + lines = remove_post_context(lines) + lines = remove_timestamp(lines) + lines = [remove_tensor(line) for line in lines] + + return '\n'.join(lines) + + +def handler(signum, frame): + print("Code execution timeout") + raise TimeOutException() + + +def run_program_with_trace(parameters, queues_in_, input_type_, output_type_): + from image_patch import ImagePatch, llm_query, best_image_match, distance, bool_to_yesno # noqa + from video_segment import VideoSegment # noqa + + function_head = FUNCTION_HEAD.format(input_type=input_type_, output_type=output_type_) + execution_function_head = EXEC_FUNCTION_HEAD.format(input_type=input_type_, output_type=output_type_) + + global queue_results + code, sample_id, image, possible_answers, query = parameters + + code = str(code) + if code.startswith("\ndef"): + code = code[1:] # TODO: just a temporary fix + + if code.startswith('def'): + if code.startswith(function_head): + code = code.replace(function_head, '') + else: + print("--- Code with invalid format\n") + print(code) + code = execution_function_head + code + try: + code = ast.unparse(ast.parse(code)) + except: + return None, CompileTimeError(), None + + name = f'x_{STAMP}{sample_id}' + with open(f'{name}.py', 'w') as f: + f.write(code) + + for _ in range(20): + try: + x = importlib.import_module(name) + except ModuleNotFoundError: + print("Errrr, import error. Wait a bit while.") + time.sleep(60) # I have no idea why it sometimes fails. Probably file system error + except Exception as e: + print("Import has error:", e) + break + else: + break + + queues = [queues_in_, queue_results] + + image_patch_partial = partial(ImagePatch, queues=queues) + video_segment_partial = partial(VideoSegment, queues=queues) + llm_query_partial = partial(llm_query, queues=queues) + + signal.signal(signal.SIGALRM, handler) + signal.alarm(60 * 20) # timeout = 10min, just in case while True + with io.StringIO() as f: + with pysnooper.snoop(output=f, color=False, depth=2, max_variable_length=1000): + result = None + error = None + try: + result = x.execute_command(image, possible_answers, query, image_patch_partial, video_segment_partial, + llm_query_partial, bool_to_yesno, distance, best_image_match) + except: + error = ProgramRuntimeError() + finally: + signal.alarm(0) + os.remove(f'{name}.py') + f.seek(0) + traced = f.read(100000) + traced_processed = process_trace(traced, function_head, execution_function_head) + + try: + _ = pickle.dumps(result) + except: + print("Pickle dump fails for {} type object".format(type(result))) + print("Convert result to str") + result = str(result) + + return result, error, traced_processed + + +def worker_init(queue_results_): + global queue_results + index_queue = mp.current_process()._identity[0] % len(queue_results_) + queue_results = queue_results_[index_queue] + + +def main(): + print(config) + + mp.set_start_method('spawn') + + from vision_processes import queues_in, finish_all_consumers, manager + from datasets import get_dataset + + batch_size = config.dataset.batch_size + num_processes = min(batch_size, 50) + + if config.multiprocessing: + queue_results_main = manager.Queue() + queues_results = [manager.Queue() for _ in range(batch_size)] + else: + queue_results_main = None + queues_results = [None for _ in range(batch_size)] + + if config.clear_cache: + cache.clear() + + dataset = get_dataset(config.dataset, load_image=True) + + assert config.use_cached_codex + if config.cached_codex_path.endswith(".csv"): + results = pd.read_csv(config.cached_codex_path) + else: + assert config.cached_codex_path.endswith(".pkl") + with open(config.cached_codex_path, 'rb') as f: + results = pickle.load(f) + # codes_all = [r.split('# Answer is:')[1] for r in results['code']] + codes_all = results['code'].tolist() + # python -c "from joblib import Memory; cache = Memory('cache/', verbose=0); cache.clear()" + dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=False, num_workers=0, pin_memory=True, + collate_fn=my_collate) + input_type = dataset.input_type + output_type = dataset.output_type + + all_codes = results['code'].tolist() + if 'code_candidates' in results: + all_codes_candidates = results['code_candidates'].tolist() + try: + all_codes_candidates = [json.loads(x) for x in all_codes_candidates] + except: + print("`code_candidates` corrupted, shame") + all_codes_candidates = [None for x in all_codes_candidates] + else: + all_codes_candidates = [[] for _ in all_codes] + + all_results = [] + all_errors = [] + all_results_traced = [] + all_answers = [] + all_ids = [] + all_queries = [] + all_img_paths = [] + all_possible_answers = [] + all_query_types = [] + filename = None + + n_batches = len(dataloader) + with mp.Pool(processes=num_processes, initializer=worker_init, initargs=(queues_results,)) \ + if config.multiprocessing else open(os.devnull, "w") as pool: + for i, batch in tqdm(enumerate(dataloader), total=n_batches): + assert config.use_cached_codex + codes = codes_all[i * batch_size:(i + 1) * batch_size] # If cache + assert len(codes) == len(batch['answer']), "Cached code breaks" + + if i * len(codes) < 4: + print("First %d - %d sample:" % (i * len(codes), (i + 1) * len(codes))) + for c in codes: + print(c) + print() + + # Run the code + assert config.execute_code + if not config.multiprocessing: + # Otherwise, we would create a new model for every process + results = [] + for c, sample_id, img, possible_answers, query in zip( + codes, batch['sample_id'], batch['img'], batch['possible_answers'], batch['question']): + result = run_program_with_trace([c, sample_id, img, possible_answers, query], queues_in, + input_type, output_type) + results.append(result) + else: + # Note: not recommended to use multiprocessing + results = list(pool.imap(partial( + run_program_with_trace, queues_in_=queues_in, input_type_=input_type, output_type_=output_type), + zip(codes, batch['sample_id'], batch['img'], batch['possible_answers'], batch['question']))) + + if i * len(codes) < 4: + print("First %d - %d sample, traced:" % (i * len(codes), (i + 1) * len(codes))) + for _, error, traced in results: + print(str(traced)[:4000]) + print('Error =', error) + print() + + all_results += [r[0] for r in results] + all_errors += [r[1] for r in results] + all_results_traced += [r[2] for r in results] + all_ids += batch['sample_id'] + all_answers += batch['answer'] + all_possible_answers += batch['possible_answers'] + all_query_types += batch['question_type'] + all_queries += batch['question'] + all_img_paths += ['' for idx in batch['index']] + if i % config.log_every == 0: + try: + accuracy = dataset.accuracy(all_results, all_answers, all_possible_answers, all_query_types) + console.print(f'Accuracy at Batch {i}/{n_batches}: {accuracy}') + except Exception as e: + console.print(f'Error computing accuracy: {e}') + + if config.save: + df = pd.DataFrame( + [all_results, all_answers, all_results_traced, all_errors, all_codes, all_ids, all_queries, + all_img_paths, all_possible_answers, all_codes_candidates]).T + df.columns = ['result', 'answer', 'traced', 'error', 'code', 'id', 'query', 'img_path', + 'possible_answers', + 'code_candidates', ] + # make the result column a string + df['result'] = df['result'].apply(str) + df['error'] = df['error'].apply(str) + filename = save_results_csv(config, df, filename) + config['py_file'] = __file__ + OmegaConf.save(config=config, f=filename.replace(".csv", ".yaml")) + print("Dump finished") + + n_compile_error = sum([isinstance(e, CompileTimeError) for e in all_errors]) + print("%.2f%% (%d out of %d) code execution has compile error" % ( + n_compile_error / len(all_answers) * 100, n_compile_error, len(all_answers), + )) + + try: + accuracy = dataset.accuracy(all_results, all_answers, all_possible_answers, all_query_types) + console.print(f'Final accuracy: {accuracy}') + except Exception as e: + print(f'Error computing accuracy: {e}') + + finish_all_consumers() + + if config.save: + df = pd.DataFrame([all_results, all_answers, all_results_traced, all_errors, all_codes, all_ids, all_queries, + all_img_paths, all_possible_answers, all_codes_candidates]).T + df.columns = ['result', 'answer', 'traced', 'error', 'code', 'id', 'query', 'img_path', 'possible_answers', + 'code_candidates', ] + # make the result column a string + df['result'] = df['result'].apply(str) + df['error'] = df['error'].apply(str) + filename = save_results_csv(config, df, filename) + config['py_file'] = __file__ + OmegaConf.save(config=config, f=filename.replace(".csv", ".yaml")) + print("Dump finished") + + +if __name__ == '__main__': + main() diff --git a/viper/main_utils.py b/viper/main_utils.py new file mode 100644 index 0000000..5ad223a --- /dev/null +++ b/viper/main_utils.py @@ -0,0 +1,42 @@ +import pathlib + + +def my_collate(batch): + # Avoid stacking images (different size). Return everything as a list + to_return = {k: [d[k] for d in batch] for k in batch[0].keys()} + return to_return + + +class CompileTimeError(Exception): + pass + + +class ProgramRuntimeError(Exception): + pass + + +class TimeOutException(Exception): + pass + + +def save_results_csv(config, df, filename=None): + results_dir = pathlib.Path(config['results_dir']) + results_dir = results_dir / config.dataset.split + results_dir.mkdir(parents=True, exist_ok=True) + + if filename is None: + if not config.save_new_results: + filename = 'results.csv' + else: + existing_files = list(results_dir.glob('results_*.csv')) + if len(existing_files) == 0: + filename = 'results_0.csv' + else: + filename = 'results_' + str(max([int(ef.stem.split('_')[-1]) for ef in existing_files if + str.isnumeric(ef.stem.split('_')[-1])]) + 1) + '.csv' + filename = results_dir / filename + + print('Saving results to', filename) + df.to_csv(filename, header=True, index=False, encoding='utf-8') + + return str(filename) diff --git a/viper/prompts/benchmarks/gqa.prompt b/viper/prompts/benchmarks/gqa.prompt new file mode 100644 index 0000000..9918033 --- /dev/null +++ b/viper/prompts/benchmarks/gqa.prompt @@ -0,0 +1,279 @@ +from PIL import Image +from vision_functions import find_in_image, simple_qa, verify_property, best_text_match + +def bool_to_yesno(bool_answer: bool)->str: + return "yes" if bool_answer else "no" + +class ImagePatch: + """A Python class containing a crop of an image centered around a particular object, as well as relevant information. + Attributes + ---------- + cropped_image : array_like + An array-like of the cropped image taken from the original image. + left : int + An int describing the position of the left border of the crop's bounding box in the original image. + lower : int + An int describing the position of the bottom border of the crop's bounding box in the original image. + right : int + An int describing the position of the right border of the crop's bounding box in the original image. + upper : int + An int describing the position of the top border of the crop's bounding box in the original image. + + Methods + ------- + find(object_name: str)->List[ImagePatch] + Returns a list of new ImagePatch objects containing crops of the image centered around any objects found in the image matching the object_name. + simple_query(question: str=None)->str + Returns the answer to a basic question asked about the image. If no question is provided, returns the answer to "What is this?". + exists(object_name: str)->bool + Returns True if the object specified by object_name is found in the image, and False otherwise. + verify_property(property: str)->bool + Returns True if the property is met, and False otherwise. + best_text_match(string1: str, string2: str)->str + Returns the string that best matches the image. + crop(left: int, lower: int, right: int, upper: int)->ImagePatch + Returns a new ImagePatch object containing a crop of the image at the given coordinates. + """ + + def __init__(self, image, left: int=None, lower: int=None, right: int=None, upper: int=None): + """Initializes an ImagePatch object by cropping the image at the given coordinates and stores the coordinates as attributes. + If no coordinates are provided, the image is left unmodified, and the coordinates are set to the dimensions of the image. + Parameters + ------- + image : array_like + An array-like of the original image. + left : int + An int describing the position of the left border of the crop's bounding box in the original image. + lower : int + An int describing the position of the bottom border of the crop's bounding box in the original image. + right : int + An int describing the position of the right border of the crop's bounding box in the original image. + upper : int + An int describing the position of the top border of the crop's bounding box in the original image. + + """ + if left is None and right is None and upper is None and lower is None: + self.cropped_image = image + self.left = 0 + self.lower = 0 + self.right = image.shape[2] # width + self.upper = image.shape[1] # height + else: + self.cropped_image = image[:, lower:upper, left:right] + self.left = left + self.upper = upper + self.right = right + self.lower = lower + + self.width = self.cropped_image.shape[2] + self.height = self.cropped_image.shape[1] + + self.horizontal_center = (self.left + self.right) / 2 + self.vertical_center = (self.lower + self.upper) / 2 + + def find(self, object_name: str)->List["ImagePatch"]: + """Returns a new ImagePatch object containing the crop of the image centered around the object specified by object_name. + Parameters + ------- + object_name : str + A string describing the name of the object to be found in the image. + """ + return find_in_image(self.cropped_image, object_name) + + def simple_query(self, question: str=None)->str: + """Returns the answer to a basic question asked about the image. If no question is provided, returns the answer to "What is this?". + Parameters + ------- + question : str + A string describing the question to be asked. + + Examples + ------- + + >>> # Which kind of animal is not eating? + >>> def execute_command(image)->str: + >>> image_patch = ImagePatch(image) + >>> animal_patches = image_patch.find("animal") + >>> for animal_patch in animal_patches: + >>> if not animal_patch.verify_property("animal", "eating"): + >>> return animal_patch.simple_query("What kind of animal is eating?") # crop would include eating so keep it in the query + >>> # If no animal is not eating, query the image directly + >>> return image_patch.simple_query("Which kind of animal is not eating?") + + >>> # What is in front of the horse? + >>> # contains a relation (around, next to, on, near, on top of, in front of, behind, etc), so ask directly + >>> return image_patch.simple_query("What is in front of the horse?") + >>> + """ + return simple_qa(self.cropped_image, question) + + def exists(self, object_name: str)->bool: + """Returns True if the object specified by object_name is found in the image, and False otherwise. + Parameters + ------- + object_name : str + A string describing the name of the object to be found in the image. + + Examples + ------- + >>> # Are there both cakes and gummy bears in the photo? + >>> def execute_command(image)->str: + >>> image_patch = ImagePatch(image) + >>> is_cake = image_patch.exists("cake") + >>> is_gummy_bear = image_patch.exists("gummy bear") + >>> return bool_to_yesno(is_cake and is_gummy_bear) + """ + return len(self.find(object_name)) > 0 + + def verify_property(self, object_name: str, property: str)->bool: + """Returns True if the object possesses the property, and False otherwise. + Differs from 'exists' in that it presupposes the existence of the object specified by object_name, instead checking whether the object possesses the property. + Parameters + ------- + object_name : str + A string describing the name of the object to be found in the image. + property : str + A string describing the property to be checked. + + Examples + ------- + >>> # Do the letters have blue color? + >>> def execute_command(image)->str: + >>> image_patch = ImagePatch(image) + >>> letters_patches = image_patch.find("letters") + >>> # Question assumes only one letter patch + >>> if len(letters_patches) == 0: + >>> # If no letters are found, query the image directly + >>> return image_patch.simple_query("Do the letters have blue color?") + >>> return bool_to_yesno(letters_patches[0].verify_property("letters", "blue")) + """ + return verify_property(self.cropped_image, object_name, property) + + def best_text_match(self, option_list: List[str]) -> str: + """Returns the string that best matches the image. + Parameters + ------- + option_list : str + A list with the names of the different options + prefix : str + A string with the prefixes to append to the options + + Examples + ------- + >>> # Is the cap gold or white? + >>> def execute_command(image)->str: + >>> image_patch = ImagePatch(image) + >>> cap_patches = image_patch.find("cap") + >>> # Question assumes one cap patch + >>> if len(cap_patches) == 0: + >>> # If no cap is found, query the image directly + >>> return image_patch.simple_query("Is the cap gold or white?") + >>> return cap_patches[0].best_text_match(["gold", "white"]) + """ + return best_text_match(self.cropped_image, option_list) + + def crop(self, left: int, lower: int, right: int, upper: int)->"ImagePatch": + """Returns a new ImagePatch cropped from the current ImagePatch. + Parameters + ------- + left : int + The leftmost pixel of the cropped image. + lower : int + The lowest pixel of the cropped image. + right : int + The rightmost pixel of the cropped image. + upper : int + The uppermost pixel of the cropped image. + ------- + """ + return ImagePatch(self.cropped_image, left, lower, right, upper) + +# Examples of using ImagePatch +# Is there a backpack to the right of the man? +def execute_command(image)->str: + image_patch = ImagePatch(image) + man_patches = image_patch.find("man") + # Question assumes one man patch + if len(man_patches) == 0: + # If no man is found, query the image directly + return image_patch.simple_query("Is there a backpack to the right of the man?") + man_patch = man_patches[0] + backpack_patches = image_patch.find("backpack") + # Question assumes one backpack patch + if len(backpack_patches) == 0: + return "no" + for backpack_patch in backpack_patches: + if backpack_patch.horizontal_center > man_patch.horizontal_center: + return "yes" + return "no" + +# In which part is the bread, the bottom or the top? +def execute_command(image)->str: + image_patch = ImagePatch(image) + bread_patches = image_patch.find("bread") + # Question assumes only one bread patch + if len(bread_patches) == 0: + # If no bread is found, query the image directly + return image_patch.simple_query("In which part is the bread, the bottom or the top?") + if bread_patches[0].vertical_center < image_patch.vertical_center: + return "bottom" + else: + return "top" + +# What type of weather do you see in the photograph? +def execute_command(image)->str: + image_patch = ImagePatch(image) + return image_patch.simple_query("What type of weather do you see in the photograph?") + +# Who is the man staring at? +def execute_command(image)->str: + # asks for the predicate of a relational verb (staring at), so ask directly + image_patch = ImagePatch(image) + return image_patch.simple_query("Who is the man staring at?") + +# What toy is wearing a shirt? +def execute_command(image)->str: + # not a relational verb so go step by step + image_patch = ImagePatch(image) + toy_patches = image_patch.find("toy") + # Question assumes only one toy patch + if len(toy_patches) == 0: + # If no toy is found, query the image directly + return image_patch.simple_query("What toy is wearing a shirt?") + for toy_patch in toy_patches: + is_wearing_shirt = (toy_patch.simple_query("Is the toy wearing a shirt?") == "yes") + if is_wearing_shirt: + return toy_patch.simple_query("What toy is wearing a shirt?") # crop would include the shirt so keep it in the query + # If no toy is wearing a shirt, pick the first toy + return toy_patches[0].simple_query("What toy is wearing a shirt?") + +# What is behind the pole? +def execute_command(image)->str: + image_patch = ImagePatch(image) + # contains a relation (around, next to, on, near, on top of, in front of, behind, etc), so ask directly + return image_patch.simple_query("What is behind the pole?") + +# Are there bagels or lemons? +def execute_command(image)->str: + image_patch = ImagePatch(image) + is_bagel = image_patch.exists("bagel") + is_lemon = image_patch.exists("lemon") + return bool_to_yesno(is_bagel or is_lemon) + +# Is that blanket to the right of a pillow? +def execute_command(image)->str: + image_patch = ImagePatch(image) + blanket_patches = image_patch.find("blanket") + # Question assumes only one blanket patch + if len(blanket_patches) == 0: + # If no blanket is found, query the image directly + return image_patch.simple_query("Is that blanket to the right of a pillow?") + for blanket_patch in blanket_patches: + pillow_patches = image_patch.find("pillow") + for pillow_patch in pillow_patches: + if pillow_patch.horizontal_center > blanket_patch.horizontal_center: + return "yes" + return "no" + +# INSERT_QUERY_HERE +def execute_command(image)->str: \ No newline at end of file diff --git a/viper/prompts/benchmarks/joint.py b/viper/prompts/benchmarks/joint.py new file mode 100644 index 0000000..3666711 --- /dev/null +++ b/viper/prompts/benchmarks/joint.py @@ -0,0 +1,768 @@ +from typing import List, Union + +from vision_functions import find_in_image, simple_qa, verify_property, best_text_match, compute_depth + + +def bool_to_yesno(bool_answer: bool) -> str: + return "yes" if bool_answer else "no" + + +class ImagePatch: + """A Python class containing a crop of an image centered around a particular object, as well as relevant information. + Attributes + ---------- + cropped_image : array_like + An array-like of the cropped image taken from the original image. + left : int + An int describing the position of the left border of the crop's bounding box in the original image. + lower : int + An int describing the position of the bottom border of the crop's bounding box in the original image. + right : int + An int describing the position of the right border of the crop's bounding box in the original image. + upper : int + An int describing the position of the top border of the crop's bounding box in the original image. + + Methods + ------- + find(object_name: str) -> List[ImagePatch] + Returns a list of new ImagePatch objects containing crops of the image centered around any objects found in the image matching the object_name. + simple_query(question: str=None) -> str + Returns the answer to a basic question asked about the image. If no question is provided, returns the answer to "What is this?". + exists(object_name: str) -> bool + Returns True if the object specified by object_name is found in the image, and False otherwise. + verify_property(property: str) -> bool + Returns True if the property is met, and False otherwise. + compute_depth()->float + Returns the median depth of the image crop. + best_text_match(string1: str, string2: str) -> str + Returns the string that best matches the image. + crop(left: int, lower: int, right: int, upper: int) -> ImagePatch + Returns a new ImagePatch object containing a crop of the image at the given coordinates. + """ + + def __init__(self, image, left: int = None, lower: int = None, right: int = None, upper: int = None): + """Initializes an ImagePatch object by cropping the image at the given coordinates and stores the coordinates as attributes. + If no coordinates are provided, the image is left unmodified, and the coordinates are set to the dimensions of the image. + Parameters + ------- + image : array_like + An array-like of the original image. + left : int + An int describing the position of the left border of the crop's bounding box in the original image. + lower : int + An int describing the position of the bottom border of the crop's bounding box in the original image. + right : int + An int describing the position of the right border of the crop's bounding box in the original image. + upper : int + An int describing the position of the top border of the crop's bounding box in the original image. + """ + if left is None and right is None and upper is None and lower is None: + self.cropped_image = image + self.left = 0 + self.lower = 0 + self.right = image.shape[2] # width + self.upper = image.shape[1] # height + else: + self.cropped_image = image[:, lower:upper, left:right] + self.left = left + self.upper = upper + self.right = right + self.lower = lower + + self.width = self.cropped_image.shape[2] + self.height = self.cropped_image.shape[1] + + self.horizontal_center = (self.left + self.right) / 2 + self.vertical_center = (self.lower + self.upper) / 2 + + def find(self, object_name: str) -> List["ImagePatch"]: + """Returns a new ImagePatch object containing the crop of the image centered around the object specified by object_name. + Parameters + ------- + object_name : str + A string describing the name of the object to be found in the image. + + Examples + -------- + >>> # Given an image: Find the foo. + >>> def execute_command(image) -> List[ImagePatch]: + >>> image_patch = ImagePatch(image) + >>> foo_patches = image_patch.find("foo") + >>> return foo_patches + """ + return find_in_image(self.cropped_image, object_name) + + def simple_query(self, question: str = None) -> str: + """Returns the answer to a basic question asked about the image. If no question is provided, returns the answer to "What is this?". + Parameters + ------- + question : str + A string describing the question to be asked. + + Examples + ------- + >>> # Given an image: Which kind of animal is not eating? + >>> def execute_command(image) -> str: + >>> image_patch = ImagePatch(image) + >>> animal_patches = image_patch.find("animal") + >>> for animal_patch in animal_patches: + >>> if not animal_patch.verify_property("animal", "eating"): + >>> return animal_patch.simple_query("What kind of animal is eating?") # crop would include eating so keep it in the query + >>> # If no animal is not eating, query the image directly + >>> return image_patch.simple_query("Which kind of animal is not eating?") + + >>> # Given an image: What is in front of the horse? + >>> def execute_command(image) -> str: + >>> image_patch = ImagePatch(image) + >>> # contains a relation (around, next to, on, near, on top of, in front of, behind, etc), so ask directly + >>> return image_patch.simple_query("What is in front of the horse?") + """ + return simple_qa(self.cropped_image, question) + + def exists(self, object_name: str) -> bool: + """Returns True if the object specified by object_name is found in the image, and False otherwise. + Parameters + ------- + object_name : str + A string describing the name of the object to be found in the image. + + Examples + ------- + >>> # Given an image: Are there both cakes and gummy bears in the photo? + >>> def execute_command(image) -> str: + >>> image_patch = ImagePatch(image) + >>> is_cake = image_patch.exists("cake") + >>> is_gummy_bear = image_patch.exists("gummy bear") + >>> return bool_to_yesno(is_cake and is_gummy_bear) + """ + return len(self.find(object_name)) > 0 + + def verify_property(self, object_name: str, property: str) -> bool: + """Returns True if the object possesses the property, and False otherwise. + Differs from 'exists' in that it presupposes the existence of the object specified by object_name, instead checking whether the object possesses the property. + Parameters + ------- + object_name : str + A string describing the name of the object to be found in the image. + property : str + A string describing the property to be checked. + + Examples + ------- + >>> # Given an image: Do the letters have blue color? + >>> def execute_command(image) -> str: + >>> image_patch = ImagePatch(image) + >>> letters_patches = image_patch.find("letters") + >>> # Question assumes only one letter patch + >>> if len(letters_patches) == 0: + >>> # If no letters are found, query the image directly + >>> return image_patch.simple_query("Do the letters have blue color?") + >>> return bool_to_yesno(letters_patches[0].verify_property("letters", "blue")) + """ + return verify_property(self.cropped_image, object_name, property) + + def compute_depth(self): + """Returns the median depth of the image crop + Parameters + ---------- + Returns + ------- + float + the median depth of the image crop + + Examples + -------- + >>> # Given an image: Find the bar furthest away. + >>> def execute_command(image)->ImagePatch: + >>> image_patch = ImagePatch(image) + >>> bar_patches = image_patch.find("bar") + >>> bar_patches.sort(key=lambda bar: bar.compute_depth()) + >>> return bar_patches[-1] + """ + depth_map = compute_depth(self.cropped_image) + return depth_map.median() + + def best_text_match(self, option_list: List[str]) -> str: + """Returns the string that best matches the image. + Parameters + ------- + option_list : str + A list with the names of the different options + prefix : str + A string with the prefixes to append to the options + + Examples + ------- + >>> # Given an image: Is the cap gold or white? + >>> def execute_command(image) -> str: + >>> image_patch = ImagePatch(image) + >>> cap_patches = image_patch.find("cap") + >>> # Question assumes one cap patch + >>> if len(cap_patches) == 0: + >>> # If no cap is found, query the image directly + >>> return image_patch.simple_query("Is the cap gold or white?") + >>> return cap_patches[0].best_text_match(["gold", "white"]) + """ + return best_text_match(self.cropped_image, option_list) + + def crop(self, left: int, lower: int, right: int, upper: int) -> "ImagePatch": + """Returns a new ImagePatch cropped from the current ImagePatch. + Parameters + ------- + left : int + The leftmost pixel of the cropped image. + lower : int + The lowest pixel of the cropped image. + right : int + The rightmost pixel of the cropped image. + upper : int + The uppermost pixel of the cropped image. + ------- + """ + return ImagePatch(self.cropped_image, left, lower, right, upper) + + +def best_image_match(list_patches: List[ImagePatch], content: List[str], return_index=False) -> Union[ImagePatch, int]: + """Returns the patch most likely to contain the content. + Parameters + ---------- + list_patches : List[ImagePatch] + content : List[str] + the object of interest + return_index : bool + if True, returns the index of the patch most likely to contain the object + + Returns + ------- + int + Patch most likely to contain the object + """ + return best_image_match(list_patches, content, return_index) + + +def distance(patch_a: ImagePatch, patch_b: ImagePatch) -> float: + """ + Returns the distance between the edges of two ImagePatches. If the patches overlap, it returns a negative distance + corresponding to the negative intersection over union. + + Parameters + ---------- + patch_a : ImagePatch + patch_b : ImagePatch + + Examples + -------- + # Return the qux that is closest to the foo + >>> def execute_command(image): + >>> image_patch = ImagePatch(image) + >>> qux_patches = image_patch.find('qux') + >>> foo_patches = image_patch.find('foo') + >>> foo_patch = foo_patches[0] + >>> qux_patches.sort(key=lambda x: distance(x, foo_patch)) + >>> return qux_patches[0] + """ + return distance(patch_a, patch_b) + + +# Examples of using ImagePatch + + +# Given an image: What toy is wearing a shirt? +def execute_command(image) -> str: + # not a relational verb so go step by step + image_patch = ImagePatch(image) + toy_patches = image_patch.find("toy") + # Question assumes only one toy patch + if len(toy_patches) == 0: + # If no toy is found, query the image directly + return image_patch.simple_query("What toy is wearing a shirt?") + for toy_patch in toy_patches: + is_wearing_shirt = (toy_patch.simple_query("Is the toy wearing a shirt?") == "yes") + if is_wearing_shirt: + return toy_patch.simple_query( + "What toy is wearing a shirt?") # crop would include the shirt so keep it in the query + # If no toy is wearing a shirt, pick the first toy + return toy_patches[0].simple_query("What toy is wearing a shirt?") + + +# Given an image: Who is the man staring at? +def execute_command(image) -> str: + # asks for the predicate of a relational verb (staring at), so ask directly + image_patch = ImagePatch(image) + return image_patch.simple_query("Who is the man staring at?") + + +# Given an image: Find more visible chair. +def execute_command(image) -> ImagePatch: + # Return the chair + image_patch = ImagePatch(image) + # Remember: return the chair + return image_patch.find("chair")[0] + + +# Given an image: Find lamp on the bottom. +def execute_command(image) -> ImagePatch: + # Return the lamp + image_patch = ImagePatch(image) + lamp_patches = image_patch.find("lamp") + lamp_patches.sort(key=lambda lamp: lamp.vertical_center) + # Remember: return the lamp + return lamp_patches[0] # Return the bottommost lamp + + +# Given a list of images: Does the pole that is near a building that is near a green sign and the pole that is near bushes that are near a green sign have the same material? +def execute_command(image_list) -> str: + material_1 = None + material_2 = None + for image in image_list: + image = ImagePatch(image) + # find the building + building_patches = image.find("building") + for building_patch in building_patches: + poles = building_patch.find("pole") + signs = building_patch.find("sign") + greensigns = [sign for sign in signs if sign.verify_property('sign', 'green')] + if len(poles) > 0 and len(greensigns) > 0: + material_1 = poles[0].simple_query("What is the material of the pole?") + # find the bush + bushes_patches = image.find("bushes") + for bushes_patch in bushes_patches: + poles = bushes_patch.find("pole") + signs = bushes_patch.find("sign") + greensigns = [sign for sign in signs if sign.verify_property('sign', 'green')] + if len(poles) > 0 and len(greensigns) > 0: + material_2 = poles[0].simple_query("What is the material of the pole?") + return bool_to_yesno(material_1 == material_2) + + +# Given an image: Find middle kid. +def execute_command(image) -> ImagePatch: + # Return the kid + image_patch = ImagePatch(image) + kid_patches = image_patch.find("kid") + if len(kid_patches) == 0: + kid_patches = [image_patch] + kid_patches.sort(key=lambda kid: kid.horizontal_center) + # Remember: return the kid + return kid_patches[len(kid_patches) // 2] # Return the middle kid + + +# Given an image: Is that blanket to the right of a pillow? +def execute_command(image) -> str: + image_patch = ImagePatch(image) + blanket_patches = image_patch.find("blanket") + # Question assumes only one blanket patch + if len(blanket_patches) == 0: + # If no blanket is found, query the image directly + return image_patch.simple_query("Is that blanket to the right of a pillow?") + for blanket_patch in blanket_patches: + pillow_patches = image_patch.find("pillow") + for pillow_patch in pillow_patches: + if pillow_patch.horizontal_center > blanket_patch.horizontal_center: + return "yes" + return "no" + + +# Given an image: How many people are there? +def execute_command(image) -> str: + image_patch = ImagePatch(image) + person_patches = image_patch.find("person") + return str(len(person_patches)) + + +# Given a list of images: Is the man that is wearing dark pants driving?. +def execute_command(image_list) -> str: + for image in image_list: + image = ImagePatch(image) + man_patches = image.find("man") + for man_patch in man_patches: + pants = man_patch.find("pants") + if len(pants) == 0: + continue + if pants[0].verify_property("pants", "dark"): + return man_patch.simple_query("Is this man driving?") + return ImagePatch(image_list[0]).simple_query("Is the man that is wearing dark pants driving?") + + +# Given an image: Is there a backpack to the right of the man? +def execute_command(image) -> str: + image_patch = ImagePatch(image) + man_patches = image_patch.find("man") + # Question assumes one man patch + if len(man_patches) == 0: + # If no man is found, query the image directly + return image_patch.simple_query("Is there a backpack to the right of the man?") + man_patch = man_patches[0] + backpack_patches = image_patch.find("backpack") + # Question assumes one backpack patch + if len(backpack_patches) == 0: + return "no" + for backpack_patch in backpack_patches: + if backpack_patch.horizontal_center > man_patch.horizontal_center: + return "yes" + return "no" + + +# Given a list of images: What is the pizza with red tomato on it on? +def execute_command(image_list) -> str: + for image in image_list: + image = ImagePatch(image) + pizza_patches = image.find("pizza") + for pizza_patch in pizza_patches: + tomato_patches = pizza_patch.find("tomato") + has_red_tomato = False + for tomato_patch in tomato_patches: + if tomato_patch.verify_property("tomato", "red"): + has_red_tomato = True + if has_red_tomato: + return pizza_patch.simple_query("What is the pizza on?") + return ImagePatch(image_list[0]).simple_query("What is the pizza with red tomato on it on?") + + +# Given an image: Find chair to the right near the couch. +def execute_command(image) -> ImagePatch: + # Return the chair + image_patch = ImagePatch(image) + chair_patches = image_patch.find("chair") + if len(chair_patches) == 0: + chair_patches = [image_patch] + elif len(chair_patches) == 1: + return chair_patches[0] + chair_patches_right = [c for c in chair_patches if c.horizontal_center > image_patch.horizontal_center] + couch_patches = image_patch.find("couch") + if len(couch_patches) == 0: + couch_patches = [image_patch] + couch_patch = couch_patches[0] + chair_patches_right.sort(key=lambda c: distance(c, couch_patch)) + chair_patch = chair_patches_right[0] + # Remember: return the chair + return chair_patch + + +# Given an image: Are there bagels or lemons? +def execute_command(image) -> str: + image_patch = ImagePatch(image) + is_bagel = image_patch.exists("bagel") + is_lemon = image_patch.exists("lemon") + return bool_to_yesno(is_bagel or is_lemon) + + +# Given an image: In which part is the bread, the bottom or the top? +def execute_command(image) -> str: + image_patch = ImagePatch(image) + bread_patches = image_patch.find("bread") + # Question assumes only one bread patch + if len(bread_patches) == 0: + # If no bread is found, query the image directly + return image_patch.simple_query("In which part is the bread, the bottom or the top?") + if bread_patches[0].vertical_center < image_patch.vertical_center: + return "bottom" + else: + return "top" + + +# Given an image: Find foo to bottom left. +def execute_command(image) -> ImagePatch: + # Return the foo + image_patch = ImagePatch(image) + foo_patches = image_patch.find("foo") + lowermost_coordinate = min([patch.vertical_center for patch in foo_patches]) + foo_patches_bottom = [patch for patch in foo_patches if patch.vertical_center - lowermost_coordinate < 100] + if len(foo_patches_bottom) == 0: + foo_patches_bottom = foo_patches + elif len(foo_patches_bottom) == 1: + return foo_patches_bottom[0] + foo_patches_bottom.sort(key=lambda foo: foo.horizontal_center) + foo_patch = foo_patches_bottom[0] + # Remember: return the foo + return foo_patch + + +# Given an image: Find number 17. +def execute_command(image) -> ImagePatch: + # Return the person + image_patch = ImagePatch(image) + person_patches = image_patch.find("person") + for patch in person_patches: + if patch.exists("17"): + return patch + # Remember: return the person + return person_patches[0] + + +# Given a list of images: Is the statement true? There is at least 1 image with a brown dog that is near a bicycle and is wearing a collar. +def execute_command(image_list) -> str: + for image in image_list: + image = ImagePatch(image) + dog_patches = image.find("dog") + for dog in dog_patches: + near_bicycle = dog.simple_query("Is the dog near a bicycle?") + wearing_collar = dog.simple_query("Is the dog wearing a collar?") + if near_bicycle == "yes" and wearing_collar == "yes": + return 'yes' + return 'no' + + +# Given an image: Find dog to the left of the post who is closest to girl wearing a shirt with text that says "I love you". +def execute_command(image) -> ImagePatch: + # Return the dog + image_patch = ImagePatch(image) + shirt_patches = image_patch.find("shirt") + if len(shirt_patches) == 0: + shirt_patches = [image_patch] + shirt_patch = best_image_match(list_patches=shirt_patches, content=["I love you shirt"]) + post_patches = image_patch.find("post") + post_patches.sort(key=lambda post: distance(post, shirt_patch)) + post_patch = post_patches[0] + dog_patches = image_patch.find("dog") + dogs_left_patch = [dog for dog in dog_patches if dog.left < post_patch.left] + if len(dogs_left_patch) == 0: + dogs_left_patch = dog_patches + dogs_left_patch.sort(key=lambda dog: distance(dog, post_patch)) + dog_patch = dogs_left_patch[0] + # Remember: return the dog + return dog_patch + + +# Given an image: Find balloon on the right and second from the bottom. +def execute_command(image) -> ImagePatch: + # Return the balloon + image_patch = ImagePatch(image) + balloon_patches = image_patch.find("balloon") + if len(balloon_patches) == 0: + balloon_patches = [image_patch] + elif len(balloon_patches) == 1: + return balloon_patches[0] + leftmost_coordinate = min([patch.horizontal_center for patch in balloon_patches]) + balloon_patches_right = [patch for patch in balloon_patches if patch.horizontal_center - leftmost_coordinate < 100] + if len(balloon_patches_right) == 0: + balloon_patches_right = balloon_patches + balloon_patches_right.sort(key=lambda p: p.vertical_center) + balloon_patch = balloon_patches_right[1] + # Remember: return the balloon + return balloon_patch + + +# Given an image: Find girl in white next to man in left. +def execute_command(image) -> ImagePatch: + # Return the girl + image_patch = ImagePatch(image) + girl_patches = image_patch.find("girl") + girl_in_white_patches = [g for g in girl_patches if g.verify_property("girl", "white clothing")] + if len(girl_in_white_patches) == 0: + girl_in_white_patches = girl_patches + man_patches = image_patch.find("man") + man_patches.sort(key=lambda man: man.horizontal_center) + leftmost_man = man_patches[0] # First from the left + girl_in_white_patches.sort(key=lambda girl: distance(girl, leftmost_man)) + girl_patch = girl_in_white_patches[0] + # Remember: return the girl + return girl_patch + + +# Given a list of images: Is the statement true? There is 1 table that is in front of woman that is wearing jacket. +def execute_command(image_list) -> str: + for image in image_list: + image = ImagePatch(image) + woman_patches = image.find("woman") + for woman in woman_patches: + if woman.simple_query("Is the woman wearing jacket?") == "yes": + tables = woman.find("table") + return bool_to_yesno(len(tables) == 1) + return 'no' + + +# Given an image: Find top left. +def execute_command(image) -> ImagePatch: + # Return the person + image_patch = ImagePatch(image) + # Figure out what thing the caption is referring to. We need a subject for every caption + persons = image_patch.find("person") + top_all_objects = max([obj.vertical_center for obj in persons]) + # Select objects that are close to the top + # We do this because the caption is asking first about vertical and then about horizontal + persons_top = [p for p in persons if top_all_objects - p.vertical_center < 100] + if len(persons_top) == 0: + persons_top = persons + # And after that, obtain the leftmost object among them + persons_top.sort(key=lambda obj: obj.horizontal_center) + person_leftmost = persons_top[0] + # Remember: return the person + return person_leftmost + + +# Given an image: What type of weather do you see in the photograph? +def execute_command(image) -> str: + image_patch = ImagePatch(image) + return image_patch.simple_query("What type of weather do you see in the photograph?") + + +# Given an image: How many orange life vests can be seen? +def execute_command(image) -> str: + image_patch = ImagePatch(image) + life_vest_patches = image_patch.find("life vest") + orange_life_vest_patches = [] + for life_vest_patch in life_vest_patches: + if life_vest_patch.verify_property('life vest', 'orange'): + orange_life_vest_patches.append(life_vest_patch) + return str(len(orange_life_vest_patches)) + + +# Given an image: What is behind the pole? +def execute_command(image) -> str: + image_patch = ImagePatch(image) + # contains a relation (around, next to, on, near, on top of, in front of, behind, etc), so ask directly + return image_patch.simple_query("What is behind the pole?") + + +# Given an image: Find second to top flower. +def execute_command(image) -> ImagePatch: + # Return the flower + image_patch = ImagePatch(image) + flower_patches = image_patch.find("flower") + flower_patches.sort(key=lambda flower: flower.vertical_center) + flower_patch = flower_patches[-2] + # Remember: return the flower + return flower_patch + + +# Given an image: Find back. +def execute_command(image) -> ImagePatch: + # Return the person + image_patch = ImagePatch(image) + person_patches = image_patch.find("person") + person_patches.sort(key=lambda person: person.compute_depth()) + person_patch = person_patches[-1] + # Remember: return the person + return person_patch + + +# Given an image: Find chair at the front. +def execute_command(image) -> ImagePatch: + # Return the chair + image_patch = ImagePatch(image) + chair_patches = image_patch.find("chair") + chair_patches.sort(key=lambda chair: chair.compute_depth()) + chair_patch = chair_patches[0] + # Remember: return the chair + return chair_patch + + +# Given an image: Find white and yellow pants. +def execute_command(image) -> ImagePatch: + # Return the person + image_patch = ImagePatch(image) + # Clothing always requires returning the person + person_patches = image_patch.find("person") + person_patch = best_image_match(person_patches, ["white pants", "yellow pants"]) + # Remember: return the person + return person_patch + + +# Given an image: Find cow facing the camera. +def execute_command(image) -> ImagePatch: + # Return the cow + image_patch = ImagePatch(image) + cow_patches = image_patch.find("cow") + if len(cow_patches) == 0: + cow_patches = [image_patch] + cow_patch = best_image_match(list_patches=cow_patches, content=["cow facing the camera"]) + # Remember: return the cow + return cow_patch + + +# Given a list of images: Is the statement true? There is 1 image that contains exactly 3 blue papers. +def execute_command(image_list) -> str: + image_cnt = 0 + for image in image_list: + image = ImagePatch(image) + paper_patches = image.find("paper") + blue_paper_patches = [] + for paper in paper_patches: + if paper.verify_property("paper", "blue"): + blue_paper_patches.append(paper) + if len(blue_paper_patches) == 3: + image_cnt += 1 + return bool_to_yesno(image_cnt == 1) + + +# Given an image: Find black car just under stop sign. +def execute_command(image) -> ImagePatch: + # Return the car + image_patch = ImagePatch(image) + stop_sign_patches = image_patch.find("stop sign") + if len(stop_sign_patches) == 0: + stop_sign_patches = [image_patch] + stop_sign_patch = stop_sign_patches[0] + car_patches = image_patch.find("black car") + car_under_stop = [] + for car in car_patches: + if car.upper < stop_sign_patch.upper: + car_under_stop.append(car) + # Find car that is closest to the stop sign + car_under_stop.sort(key=lambda car: car.vertical_center - stop_sign_patch.vertical_center) + # Remember: return the car + return car_under_stop[0] + + +# Given a list of images: Is there either a standing man that is holding a cell phone or a sitting man that is holding a cell phone? +def execute_command(image_list) -> str: + for image in image_list: + image = ImagePatch(image) + man_patches = image.find("man") + for man in man_patches: + holding_cell_phone = man.simple_query("Is this man holding a cell phone?") + if holding_cell_phone == "yes": + if man.simple_query("Is this man sitting?") == "yes": + return 'yes' + if man.simple_query("Is this man standing?") == "yes": + return 'yes' + return 'no' + + +# Given a list of images: How many people are running while looking at their cell phone? +def execute_command(image) -> str: + image_patch = ImagePatch(image) + people_patches = image_patch.find("person") + # Question assumes only one person patch + if len(people_patches) == 0: + # If no people are found, query the image directly + return image_patch.simple_query("How many people are running while looking at their cell phone?") + people_count = 0 + for person_patch in people_patches: + # Verify two conditions: (1) running (2) looking at cell phone + if person_patch.simple_query("Is the person running?") == "yes": + if person_patch.simple_query("Is the person looking at cell phone?") == "yes": + people_count += 1 + return str(people_count) + + +# Given a list of images: Does the car that is on a highway and the car that is on a street have the same color? +def execute_command(image_list) -> str: + color_1 = None + color_2 = None + for image in image_list: + image = ImagePatch(image) + car_patches = image.find("car") + for car_patch in car_patches: + if car_patch.simple_query("Is the car on the highway?") == "yes": + color_1 = car_patch.simple_query("What is the color of the car?") + elif car_patch.simple_query("Is the car on a street?") == "yes": + color_2 = car_patch.simple_query("What is the color of the car?") + return bool_to_yesno(color_1 == color_2) + + +# Given a list of images: Is the statement true? There are 3 magazine that are on table. +def execute_command(image_list) -> str: + count = 0 + for image in image_list: + image = ImagePatch(image) + magazine_patches = image.find("magazine") + for magazine_patch in magazine_patches: + on_table = magazine_patch.simple_query("Is the magazine on a table?") + if on_table == "yes": + count += 1 + return bool_to_yesno(count == 3) + + +# INSERT_QUERY_HERE \ No newline at end of file diff --git a/viper/prompts/benchmarks/nlvr.py b/viper/prompts/benchmarks/nlvr.py new file mode 100644 index 0000000..1854dd9 --- /dev/null +++ b/viper/prompts/benchmarks/nlvr.py @@ -0,0 +1,356 @@ +from typing import List, Union + +from vision_functions import find_in_image, simple_qa, verify_property, best_text_match, compute_depth + + +def bool_to_yesno(bool_answer: bool) -> str: + return "yes" if bool_answer else "no" + + +class ImagePatch: + """A Python class containing a crop of an image centered around a particular object, as well as relevant information. + Attributes + ---------- + cropped_image : array_like + An array-like of the cropped image taken from the original image. + left : int + An int describing the position of the left border of the crop's bounding box in the original image. + lower : int + An int describing the position of the bottom border of the crop's bounding box in the original image. + right : int + An int describing the position of the right border of the crop's bounding box in the original image. + upper : int + An int describing the position of the top border of the crop's bounding box in the original image. + + Methods + ------- + find(object_name: str) -> List[ImagePatch] + Returns a list of new ImagePatch objects containing crops of the image centered around any objects found in the image matching the object_name. + simple_query(question: str=None) -> str + Returns the answer to a basic question asked about the image. If no question is provided, returns the answer to "What is this?". + exists(object_name: str) -> bool + Returns True if the object specified by object_name is found in the image, and False otherwise. + verify_property(property: str) -> bool + Returns True if the property is met, and False otherwise. + compute_depth()->float + Returns the median depth of the image crop. + best_text_match(string1: str, string2: str) -> str + Returns the string that best matches the image. + crop(left: int, lower: int, right: int, upper: int) -> ImagePatch + Returns a new ImagePatch object containing a crop of the image at the given coordinates. + """ + + def __init__(self, image, left: int = None, lower: int = None, right: int = None, upper: int = None): + """Initializes an ImagePatch object by cropping the image at the given coordinates and stores the coordinates as attributes. + If no coordinates are provided, the image is left unmodified, and the coordinates are set to the dimensions of the image. + Parameters + ------- + image : array_like + An array-like of the original image. + left : int + An int describing the position of the left border of the crop's bounding box in the original image. + lower : int + An int describing the position of the bottom border of the crop's bounding box in the original image. + right : int + An int describing the position of the right border of the crop's bounding box in the original image. + upper : int + An int describing the position of the top border of the crop's bounding box in the original image. + """ + if left is None and right is None and upper is None and lower is None: + self.cropped_image = image + self.left = 0 + self.lower = 0 + self.right = image.shape[2] # width + self.upper = image.shape[1] # height + else: + self.cropped_image = image[:, lower:upper, left:right] + self.left = left + self.upper = upper + self.right = right + self.lower = lower + + self.width = self.cropped_image.shape[2] + self.height = self.cropped_image.shape[1] + + self.horizontal_center = (self.left + self.right) / 2 + self.vertical_center = (self.lower + self.upper) / 2 + + def find(self, object_name: str) -> List["ImagePatch"]: + """Returns a new ImagePatch object containing the crop of the image centered around the object specified by object_name. + Parameters + ------- + object_name : str + A string describing the name of the object to be found in the image. + + Examples + -------- + >>> # Given an image: Find the foo. + >>> def execute_command(image) -> List[ImagePatch]: + >>> image_patch = ImagePatch(image) + >>> foo_patches = image_patch.find("foo") + >>> return foo_patches + """ + return find_in_image(self.cropped_image, object_name) + + def simple_query(self, question: str = None) -> str: + """Returns the answer to a basic question asked about the image. If no question is provided, returns the answer to "What is this?". + Parameters + ------- + question : str + A string describing the question to be asked. + + Examples + ------- + >>> # Given an image: Which kind of animal is not eating? + >>> def execute_command(image) -> str: + >>> image_patch = ImagePatch(image) + >>> animal_patches = image_patch.find("animal") + >>> for animal_patch in animal_patches: + >>> if not animal_patch.verify_property("animal", "eating"): + >>> return animal_patch.simple_query("What kind of animal is eating?") # crop would include eating so keep it in the query + >>> # If no animal is not eating, query the image directly + >>> return image_patch.simple_query("Which kind of animal is not eating?") + + >>> # Given an image: What is in front of the horse? + >>> def execute_command(image) -> str: + >>> image_patch = ImagePatch(image) + >>> # contains a relation (around, next to, on, near, on top of, in front of, behind, etc), so ask directly + >>> return image_patch.simple_query("What is in front of the horse?") + """ + return simple_qa(self.cropped_image, question) + + def exists(self, object_name: str) -> bool: + """Returns True if the object specified by object_name is found in the image, and False otherwise. + Parameters + ------- + object_name : str + A string describing the name of the object to be found in the image. + + Examples + ------- + >>> # Given an image: Are there both cakes and gummy bears in the photo? + >>> def execute_command(image) -> str: + >>> image_patch = ImagePatch(image) + >>> is_cake = image_patch.exists("cake") + >>> is_gummy_bear = image_patch.exists("gummy bear") + >>> return bool_to_yesno(is_cake and is_gummy_bear) + """ + return len(self.find(object_name)) > 0 + + def verify_property(self, object_name: str, property: str) -> bool: + """Returns True if the object possesses the property, and False otherwise. + Differs from 'exists' in that it presupposes the existence of the object specified by object_name, instead checking whether the object possesses the property. + Parameters + ------- + object_name : str + A string describing the name of the object to be found in the image. + property : str + A string describing the property to be checked. + + Examples + ------- + >>> # Given an image: Do the letters have blue color? + >>> def execute_command(image) -> str: + >>> image_patch = ImagePatch(image) + >>> letters_patches = image_patch.find("letters") + >>> # Question assumes only one letter patch + >>> if len(letters_patches) == 0: + >>> # If no letters are found, query the image directly + >>> return image_patch.simple_query("Do the letters have blue color?") + >>> return bool_to_yesno(letters_patches[0].verify_property("letters", "blue")) + """ + return verify_property(self.cropped_image, object_name, property) + + def compute_depth(self): + """Returns the median depth of the image crop + Parameters + ---------- + Returns + ------- + float + the median depth of the image crop + + Examples + -------- + >>> # Given an image: Find the bar furthest away. + >>> def execute_command(image)->ImagePatch: + >>> image_patch = ImagePatch(image) + >>> bar_patches = image_patch.find("bar") + >>> bar_patches.sort(key=lambda bar: bar.compute_depth()) + >>> return bar_patches[-1] + """ + depth_map = compute_depth(self.cropped_image) + return depth_map.median() + + def best_text_match(self, option_list: List[str]) -> str: + """Returns the string that best matches the image. + Parameters + ------- + option_list : str + A list with the names of the different options + prefix : str + A string with the prefixes to append to the options + + Examples + ------- + >>> # Given an image: Is the cap gold or white? + >>> def execute_command(image) -> str: + >>> image_patch = ImagePatch(image) + >>> cap_patches = image_patch.find("cap") + >>> # Question assumes one cap patch + >>> if len(cap_patches) == 0: + >>> # If no cap is found, query the image directly + >>> return image_patch.simple_query("Is the cap gold or white?") + >>> return cap_patches[0].best_text_match(["gold", "white"]) + """ + return best_text_match(self.cropped_image, option_list) + + def crop(self, left: int, lower: int, right: int, upper: int) -> "ImagePatch": + """Returns a new ImagePatch cropped from the current ImagePatch. + Parameters + ------- + left : int + The leftmost pixel of the cropped image. + lower : int + The lowest pixel of the cropped image. + right : int + The rightmost pixel of the cropped image. + upper : int + The uppermost pixel of the cropped image. + ------- + """ + return ImagePatch(self.cropped_image, left, lower, right, upper) + + +def best_image_match(list_patches: List[ImagePatch], content: List[str], return_index=False) -> Union[ImagePatch, int]: + """Returns the patch most likely to contain the content. + Parameters + ---------- + list_patches : List[ImagePatch] + content : List[str] + the object of interest + return_index : bool + if True, returns the index of the patch most likely to contain the object + + Returns + ------- + int + Patch most likely to contain the object + """ + return best_image_match(list_patches, content, return_index) + + +def distance(patch_a: ImagePatch, patch_b: ImagePatch) -> float: + """ + Returns the distance between the edges of two ImagePatches. If the patches overlap, it returns a negative distance + corresponding to the negative intersection over union. + + Parameters + ---------- + patch_a : ImagePatch + patch_b : ImagePatch + + Examples + -------- + # Return the qux that is closest to the foo + >>> def execute_command(image): + >>> image_patch = ImagePatch(image) + >>> qux_patches = image_patch.find('qux') + >>> foo_patches = image_patch.find('foo') + >>> foo_patch = foo_patches[0] + >>> qux_patches.sort(key=lambda x: distance(x, foo_patch)) + >>> return qux_patches[0] + """ + return distance(patch_a, patch_b) + + +# Examples of using ImagePatch + + +# Given two images, one on the left and one on the right: Is the statement true? A person is modeling the mittens in the image on the right. +def execute_command(image_dict) -> str: + image_patch = ImagePatch(image_dict['right']) + return image_patch.simple_query("Is there a person modeling the mittens?") + + +# Given two images, one on the left and one on the right: Is the statement true? One image contains exactly three devices, and the other image features one central device with its screen open to nearly 90-degrees. +def execute_command(image_dict) -> str: + for image_first, image_second in [[image_dict['left'], image_dict['right']], + [image_dict['right'], image_dict['left']]]: + image_first = ImagePatch(image_first) + image_second = ImagePatch(image_second) + first_device_patches = image_first.find('device') + second_device_patches = image_second.find('device') + if len(first_device_patches) == 3 and len(second_device_patches) == 1: + answer = image_second.simple_query("Is the device's screen open to nearly 90-degrees?") + if answer == "yes": + return "yes" + return "no" + + +# Given two images, one on the left and one on the right: Is the statement true? Each image includes at least one soda bottle shaped gummy candy, with a brown bottom half and clear top half, and no gummy soda bottles are in wrappers. +def execute_command(image_dict) -> str: + for image in image_dict.values(): + image = ImagePatch(image) + gummy_candy_patches = image.find('gummy candy') + count = 0 + for gummy_candy_patch in gummy_candy_patches: + if gummy_candy_patch.simple_query("Does the shape of gummy candy look like a soda bottle?") == "yes": + if gummy_candy_patch.simple_query("Is the gummy candy in wrappers?") == "yes": + return "no" + if gummy_candy_patch.simple_query("Is the top half clear?") == "yes": + if gummy_candy_patch.simple_query("Is the bottom half brown?") == "yes": + count += 1 + if count == 0: + return "no" + return "yes" + + +# Given two images, one on the left and one on the right: Is the statement true? The left image shows a group of no more than five people, including at least three women, sitting on something while looking at their phones. +def execute_command(image_dict) -> str: + image_patch = ImagePatch(image_dict['left']) + people_patches = image_patch.find('people') + if len(people_patches) <= 5: + count = 0 + for person_patch in people_patches: + if person_patch.simple_query("Is this a woman?") == "yes": + if person_patch.simple_query("Is the person sitting?") == "yes": + if person_patch.simple_query("Is the person looking at the phone?") == "yes": + count += 1 + if count >= 3: + return 'yes' + return 'no' + + +# Given two images, one on the left and one on the right: Is the statement true? There is exactly one lid. +def execute_command(image_dict) -> str: + lid_patches = [] + for image_patch in image_dict.values(): + image_patch = ImagePatch(image_patch) + lid_patches += image_patch.find('lid') + return bool_to_yesno(len(lid_patches) == 1) + + +# Given two images, one on the left and one on the right: Is the statement true? A person is holding a syringe. +def execute_command(image_dict) -> str: + for image_patch in image_dict.values(): + person_patches = image_patch.find('person') + for person_patch in person_patches: + if person_patch.simple_query("Is the person holding a syringe?") == "yes": + return "yes" + return "no" + + +# Given two images, one on the left and one on the right: Is the statement true? Only two zebras have their heads up. +def execute_command(image_dict) -> str: + count = 0 + for image_patch in image_dict.values(): + image_patch = ImagePatch(image_patch) + zebra_patches = image_patch.find('zebra') + for zebra_patch in zebra_patches: + if zebra_patch.simple_query("Is the zebra's head up?") == "yes": + count += 1 + return bool_to_yesno(count == 2) + + +# INSERT_QUERY_HERE diff --git a/viper/prompts/benchmarks/refcoco.prompt b/viper/prompts/benchmarks/refcoco.prompt new file mode 100644 index 0000000..a0ee92f --- /dev/null +++ b/viper/prompts/benchmarks/refcoco.prompt @@ -0,0 +1,455 @@ +import math + + +class ImagePatch: + """A Python class containing a crop of an image centered around a particular object, as well as relevant information. + Attributes + ---------- + cropped_image : array_like + An array-like of the cropped image taken from the original image. + left, lower, right, upper : int + An int describing the position of the (left/lower/right/upper) border of the crop's bounding box in the original image. + + Methods + ------- + find(object_name: str)->List[ImagePatch] + Returns a list of new ImagePatch objects containing crops of the image centered around any objects found in the + image matching the object_name. + exists(object_name: str)->bool + Returns True if the object specified by object_name is found in the image, and False otherwise. + verify_property(property: str)->bool + Returns True if the property is met, and False otherwise. + compute_depth()->float + Returns the median depth of the image crop. + crop(left: int, lower: int, right: int, upper: int)->ImagePatch + Returns a new ImagePatch object containing a crop of the image at the given coordinates. + """ + + def __init__(self, image, left: int = None, lower: int = None, right: int = None, upper: int = None): + """Initializes an ImagePatch object by cropping the image at the given coordinates and stores the coordinates as + attributes. If no coordinates are provided, the image is left unmodified, and the coordinates are set to the + dimensions of the image. + Parameters + ------- + image : array_like + An array-like of the original image. + left, lower, right, upper : int + An int describing the position of the (left/lower/right/upper) border of the crop's bounding box in the original image. + """ + if left is None and right is None and upper is None and lower is None: + self.cropped_image = image + self.left = 0 + self.lower = 0 + self.right = image.shape[2] # width + self.upper = image.shape[1] # height + else: + self.cropped_image = image[:, lower:upper, left:right] + self.left = left + self.upper = upper + self.right = right + self.lower = lower + + self.width = self.cropped_image.shape[2] + self.height = self.cropped_image.shape[1] + + self.horizontal_center = (self.left + self.right) / 2 + self.vertical_center = (self.lower + self.upper) / 2 + + def find(self, object_name: str) -> List[ImagePatch]: + """Returns a list of ImagePatch objects matching object_name contained in the crop if any are found. + Otherwise, returns an empty list. + Parameters + ---------- + object_name : str + the name of the object to be found + + Returns + ------- + List[ImagePatch] + a list of ImagePatch objects matching object_name contained in the crop + + Examples + -------- + >>> # return the foo + >>> def execute_command(image) -> List[ImagePatch]: + >>> image_patch = ImagePatch(image) + >>> foo_patches = image_patch.find("foo") + >>> return foo_patches + """ + return find_in_image(self.cropped_image, object_name) + + def exists(self, object_name: str) -> bool: + """Returns True if the object specified by object_name is found in the image, and False otherwise. + Parameters + ------- + object_name : str + A string describing the name of the object to be found in the image. + + Examples + ------- + >>> # Are there both foos and garply bars in the photo? + >>> def execute_command(image)->str: + >>> image_patch = ImagePatch(image) + >>> is_foo = image_patch.exists("foo") + >>> is_garply_bar = image_patch.exists("garply bar") + >>> return bool_to_yesno(is_foo and is_garply_bar) + """ + return len(self.find(object_name)) > 0 + + def verify_property(self, object_name: str, property: str) -> bool: + """Returns True if the object possesses the property, and False otherwise. + Differs from 'exists' in that it presupposes the existence of the object specified by object_name, instead checking whether the object possesses the property. + Parameters + ------- + object_name : str + A string describing the name of the object to be found in the image. + property : str + A string describing the property to be checked. + + Examples + ------- + >>> # Do the letters have blue color? + >>> def execute_command(image) -> str: + >>> image_patch = ImagePatch(image) + >>> letters_patches = image_patch.find("letters") + >>> # Question assumes only one letter patch + >>> return bool_to_yesno(letters_patches[0].verify_property("letters", "blue")) + """ + return verify_property(self.cropped_image, object_name, property) + + def compute_depth(self): + """Returns the median depth of the image crop + Parameters + ---------- + Returns + ------- + float + the median depth of the image crop + + Examples + -------- + >>> # the bar furthest away + >>> def execute_command(image)->ImagePatch: + >>> image_patch = ImagePatch(image) + >>> bar_patches = image_patch.find("bar") + >>> bar_patches.sort(key=lambda bar: bar.compute_depth()) + >>> return bar_patches[-1] + """ + depth_map = compute_depth(self.cropped_image) + return depth_map.median() + + def crop(self, left: int, lower: int, right: int, upper: int) -> ImagePatch: + """Returns a new ImagePatch cropped from the current ImagePatch. + Parameters + ------- + left, lower, right, upper : int + The (left/lower/right/upper)most pixel of the cropped image. + ------- + """ + return ImagePatch(self.cropped_image, left, lower, right, upper) + + def overlaps_with(self, left, lower, right, upper): + """Returns True if a crop with the given coordinates overlaps with this one, + else False. + Parameters + ---------- + left, lower, right, upper : int + the (left/lower/right/upper) border of the crop to be checked + + Returns + ------- + bool + True if a crop with the given coordinates overlaps with this one, else False + + Examples + -------- + >>> # black foo on top of the qux + >>> def execute_command(image) -> ImagePatch: + >>> image_patch = ImagePatch(image) + >>> qux_patches = image_patch.find("qux") + >>> qux_patch = qux_patches[0] + >>> foo_patches = image_patch.find("black foo") + >>> for foo in foo_patches: + >>> if foo.vertical_center > qux_patch.vertical_center + >>> return foo + """ + return self.left <= right and self.right >= left and self.lower <= upper and self.upper >= lower + + +def best_image_match(list_patches: List[ImagePatch], content: List[str], return_index=False) -> Union[ImagePatch, int]: + """Returns the patch most likely to contain the content. + Parameters + ---------- + list_patches : List[ImagePatch] + content : List[str] + the object of interest + return_index : bool + if True, returns the index of the patch most likely to contain the object + + Returns + ------- + int + Patch most likely to contain the object + """ + return best_image_match(list_patches, content, return_index) + + +def distance(patch_a: ImagePatch, patch_b: ImagePatch) -> float: + """ + Returns the distance between the edges of two ImagePatches. If the patches overlap, it returns a negative distance + corresponding to the negative intersection over union. + + Parameters + ---------- + patch_a : ImagePatch + patch_b : ImagePatch + + Examples + -------- + # Return the qux that is closest to the foo + >>> def execute_command(image): + >>> image_patch = ImagePatch(image) + >>> qux_patches = image_patch.find('qux') + >>> foo_patches = image_patch.find('foo') + >>> foo_patch = foo_patches[0] + >>> qux_patches.sort(key=lambda x: distance(x, foo_patch)) + >>> return qux_patches[0] + """ + return distance(patch_a, patch_b) + + +# Examples of how to use the API + +# chair at the front +def execute_command(image) -> ImagePatch: + # Return the chair + image_patch = ImagePatch(image) + chair_patches = image_patch.find("chair") + chair_patches.sort(key=lambda chair: chair.compute_depth()) + chair_patch = chair_patches[0] + # Remember: return the chair + return chair_patch + + +# black car just under stop sign +def execute_command(image) -> ImagePatch: + # Return the car + image_patch = ImagePatch(image) + stop_sign_patches = image_patch.find("stop sign") + if len(stop_sign_patches) == 0: + stop_sign_patches = [image_patch] + stop_sign_patch = stop_sign_patches[0] + car_patches = image_patch.find("black car") + car_under_stop = [] + for car in car_patches: + if car.upper < stop_sign_patch.upper: + car_under_stop.append(car) + # Find car that is closest to the stop sign + car_under_stop.sort(key=lambda car: car.vertical_center - stop_sign_patch.vertical_center) + # Remember: return the car + return car_under_stop[0] + + +# middle kid +def execute_command(image) -> ImagePatch: + # Return the kid + image_patch = ImagePatch(image) + kid_patches = image_patch.find("kid") + if len(kid_patches) == 0: + kid_patches = [image_patch] + kid_patches.sort(key=lambda kid: kid.horizontal_center) + # Remember: return the kid + return kid_patches[len(kid_patches) // 2] # Return the middle kid + + +# girl in white next to man in left +def execute_command(image) -> ImagePatch: + # Return the girl + image_patch = ImagePatch(image) + girl_patches = image_patch.find("girl") + girl_in_white_patches = [g for g in girl_patches if g.verify_property("girl", "white clothing")] + if len(girl_in_white_patches) == 0: + girl_in_white_patches = girl_patches + man_patches = image_patch.find("man") + man_patches.sort(key=lambda man: man.horizontal_center) + leftmost_man = man_patches[0] # First from the left + girl_in_white_patches.sort(key=lambda girl: distance(girl, leftmost_man)) + girl_patch = girl_in_white_patches[0] + # Remember: return the girl + return girl_patch + + +# cow facing the camera +def execute_command(image) -> ImagePatch: + # Return the cow + image_patch = ImagePatch(image) + cow_patches = image_patch.find("cow") + if len(cow_patches) == 0: + cow_patches = [image_patch] + cow_patch = best_image_match(list_patches=cow_patches, content=["cow facing the camera"]) + # Remember: return the cow + return cow_patch + + +# back +def execute_command(image) -> ImagePatch: + # Return the person + image_patch = ImagePatch(image) + person_patches = image_patch.find("person") + person_patches.sort(key=lambda person: person.compute_depth()) + person_patch = person_patches[-1] + # Remember: return the person + return person_patch + + +# dog to the left of the post who is closest to girl wearing a shirt with text that says "I love you" +def execute_command(image) -> ImagePatch: + # Return the dog + image_patch = ImagePatch(image) + shirt_patches = image_patch.find("shirt") + if len(shirt_patches) == 0: + shirt_patches = [image_patch] + shirt_patch = best_image_match(list_patches=shirt_patches, content=["I love you shirt"]) + post_patches = image_patch.find("post") + post_patches.sort(key=lambda post: distance(post, shirt_patch)) + post_patch = post_patches[0] + dog_patches = image_patch.find("dog") + dogs_left_patch = [dog for dog in dog_patches if dog.left < post_patch.left] + if len(dogs_left_patch) == 0: + dogs_left_patch = dog_patches + dogs_left_patch.sort(key=lambda dog: distance(dog, post_patch)) + dog_patch = dogs_left_patch[0] + # Remember: return the dog + return dog_patch + + +# more visible chair +def execute_command(image) -> ImagePatch: + # Return the chair + image_patch = ImagePatch(image) + # Remember: return the chair + return image_patch.find("chair")[0] + + +# top left +def execute_command(image) -> ImagePatch: + # Return the person + image_patch = ImagePatch(image) + # Figure out what thing the caption is referring to. We need a subject for every caption + persons = image_patch.find("person") + top_all_objects = max([obj.vertical_center for obj in objects]) + # Select objects that are close to the top + # We do this because the caption is asking first about vertical and then about horizontal + persons_top = [p for p in persons if distance(p.vertical_center, top_all_objects) < 100] + if len(persons_top) == 0: + persons_top = persons + # And after that, obtain the leftmost object among them + persons_top.sort(key=lambda obj: obj.horizontal_center) + person_leftmost = persons_top[0] + # Remember: return the person + return person_leftmost + + +# second to top flower +def execute_command(image) -> ImagePatch: + # Return the flower + image_patch = ImagePatch(image) + flower_patches = image_patch.find("flower") + flower_patches.sort(key=lambda flower: flower.vertical_center) + flower_patch = flower_patches[-2] + # Remember: return the flower + return flower_patch + + +# chair to the right near the couch +def execute_command(image)->ImagePatch: + # Return the chair + image_patch = ImagePatch(image) + chair_patches = image_patch.find("chair") + if len(chair_patches) == 0: + chair_patches = [image_patch] + elif len(chair_patches) == 1: + return chair_patches[0] + chair_patches_right = [c for c in chair_patches if c.horizontal_center > image_patch.horizontal_center] + couch_patches = image_patch.find("couch") + if len(couch_patches) == 0: + couch_patches = [image_patch] + couch_patch = couch_patches[0] + chair_patches_right.sort(key=lambda c: distance(c, couch_patch)) + chair_patch = chair_patches_right[0] + # Remember: return the chair + return chair_patch + + +# number 17 +def execute_command(image) -> ImagePatch: + # Return the person + image_patch = ImagePatch(image) + person_patches = image_patch.find("person") + for patch in person_patches: + if patch.exists("17"): + return patch + # Remember: return the person + return person_patches[0] + + +# balloon on the right and second from the bottom +def execute_command(image) -> ImagePatch: + # Return the balloon + image_patch = ImagePatch(image) + balloon_patches = image_patch.find("balloon") + if len(balloon_patches) == 0: + balloon_patches = [image_patch] + elif len(balloon_patches) == 1: + return balloon_patches[0] + leftmost_coordinate = min([patch.horizontal_center for patch in balloon_patches]) + balloon_patches_right = [patch for patch in balloon_patches if + distance(patch.horizontal_center, leftmost_coordinate) < 100] + if len(balloon_patches_right) == 0: + balloon_patches_right = balloon_patches + balloon_patches_right.sort(key=lambda p: p.vertical_center) + balloon_patch = balloon_patches_right[1] + # Remember: return the balloon + return balloon_patch + + +# lamp on the bottom +def execute_command(image) -> ImagePatch: + # Return the lamp + image_patch = ImagePatch(image) + lamp_patches = image_patch.find("lamp") + lamp_patches.sort(key=lambda lamp: lamp.vertical_center) + # Remember: return the lamp + return lamp_patches[0] # Return the bottommost lamp + + +# foo to bottom left +def execute_command(image) -> ImagePatch: + # Return the foo + image_patch = ImagePatch(image) + foo_patches = image_patch.find("foo") + lowermost_coordinate = min([patch.vertical_center for patch in foo_patches]) + foo_patches_bottom = [patch for patch in foo_patches if distance(patch.vertical_center, lowermost_coordinate) < 100] + if len(foo_patches_bottom) == 0: + foo_patches_bottom = foo_patches + elif len(foo_patches_bottom) == 1: + return foo_patches_bottom[0] + foo_patches_bottom.sort(key=lambda foo: foo.horizontal_center) + foo_patch = foo_patches_bottom[0] + # Remember: return the foo + return foo_patch + + +# white and yellow pants +def execute_command(image) -> ImagePatch: + # Return the person + image_patch = ImagePatch(image) + # Clothing always requires returning the person + person_patches = image_patch.find("person") + person_patch = best_image_match(person_patches, ["white pants", "yellow pants"]) + # Remember: return the person + return person_patch + + +# INSERT_QUERY_HERE +def execute_command(INSERT_TYPE_HERE): \ No newline at end of file diff --git a/viper/prompts/fixed_code/blip2.prompt b/viper/prompts/fixed_code/blip2.prompt new file mode 100644 index 0000000..a6419fa --- /dev/null +++ b/viper/prompts/fixed_code/blip2.prompt @@ -0,0 +1,3 @@ + + image_patch = ImagePatch(image) + return image_patch.simple_query(query) \ No newline at end of file diff --git a/viper/prompts/fixed_code/blip2_covr.prompt b/viper/prompts/fixed_code/blip2_covr.prompt new file mode 100644 index 0000000..2dfca0f --- /dev/null +++ b/viper/prompts/fixed_code/blip2_covr.prompt @@ -0,0 +1,17 @@ + + from torchvision import transforms + import numpy as np + import torch + resize_transform = transforms.Compose([ + transforms.ToPILImage(), + transforms.Resize((500, 500)), + transforms.ToTensor() + ]) + n = int(np.floor(np.sqrt(len(image_list)))) + m = len(image_list) // n + image_list = [resize_transform(im) for im in image_list] + image_list += [torch.ones((3, 500, 500), dtype=image_list[0].dtype) for _ in range(m * n - len(image_list))] + image_list = [torch.cat(image_list[i * m : (i + 1) * m], dim=2) for i in range(n)] + image_list = torch.cat(image_list, dim=1) + image_patch = ImagePatch(image_list) + return image_patch.simple_query(query) \ No newline at end of file diff --git a/viper/prompts/fixed_code/blip2_nlvr.prompt b/viper/prompts/fixed_code/blip2_nlvr.prompt new file mode 100644 index 0000000..dc25c8d --- /dev/null +++ b/viper/prompts/fixed_code/blip2_nlvr.prompt @@ -0,0 +1,11 @@ + + from torchvision import transforms + import torch + resize_transform = transforms.Compose([ + transforms.ToPILImage(), + transforms.Resize((500, 500)), + transforms.ToTensor() + ]) + image = torch.cat([resize_transform(image_dict['left']), resize_transform(image_dict['right']), ], dim=2) + image_patch = ImagePatch(image) + return image_patch.simple_query(query) \ No newline at end of file diff --git a/viper/prompts/fixed_code/glip.prompt b/viper/prompts/fixed_code/glip.prompt new file mode 100644 index 0000000..90bb338 --- /dev/null +++ b/viper/prompts/fixed_code/glip.prompt @@ -0,0 +1,4 @@ + + image_patch = ImagePatch(image) + bbox = image_patch.forward('glip', image_patch.cropped_image, query)[0][0] + return image_patch.crop(*bbox) \ No newline at end of file diff --git a/viper/setup.sh b/viper/setup.sh new file mode 100644 index 0000000..dd55dac --- /dev/null +++ b/viper/setup.sh @@ -0,0 +1,4 @@ +# create environment +bash setup_env.sh +# download models +bash download_models.sh \ No newline at end of file diff --git a/viper/setup_env.sh b/viper/setup_env.sh new file mode 100644 index 0000000..bd688be --- /dev/null +++ b/viper/setup_env.sh @@ -0,0 +1,4 @@ +conda create -n vipergpt python=3.10 +conda activate vipergpt +conda install pytorch==1.13.1 torchvision==0.14.1 torchaudio==0.13.1 pytorch-cuda=11.6 -c pytorch -c nvidia +pip install -r requirements.txt \ No newline at end of file diff --git a/viper/utils.py b/viper/utils.py new file mode 100644 index 0000000..eb6ac32 --- /dev/null +++ b/viper/utils.py @@ -0,0 +1,205 @@ +import json +import matplotlib.pyplot as plt +import numpy as np +import os +import pandas as pd +import pathlib +import random +import sys +import time +import torch +from PIL import Image +from torchvision import transforms +from torchvision.utils import draw_bounding_boxes as tv_draw_bounding_boxes +from torchvision.utils import make_grid +from typing import Union + +clip_stats = (0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711) + + +def is_interactive() -> bool: + try: + from IPython import get_ipython + if get_ipython() is not None: + return True + else: + return False + except NameError: + return False # Probably standard Python interpreter + + +def denormalize(images, means=(0.485, 0.456, 0.406), stds=(0.229, 0.224, 0.225)): + means = torch.tensor(means).reshape(1, 3, 1, 1) + stds = torch.tensor(stds).reshape(1, 3, 1, 1) + return images * stds + means + + +def show_batch(batch, stats=clip_stats): + fig, ax = plt.subplots(figsize=(12, 12)) + ax.set_xticks([]) + ax.set_yticks([]) + denorm_images = denormalize(batch, *stats) + ax.imshow(make_grid(denorm_images[:64], nrow=8).permute(1, 2, 0).clamp(0, 1)) + + +def show_batch_from_dl(dl): + for images, labels in dl: + show_batch(images) + print(labels[:64]) + break + + +def show_single_image(image, denormalize_stats=None, bgr_image=False, save_path=None, size='small', bbox_info=None): + if not is_interactive(): + import matplotlib + matplotlib.use("module://imgcat") + if size == 'size_img': + figsize = (image.shape[2] / 100, image.shape[1] / 100) # The default dpi of plt.savefig is 100 + elif size == 'small': + figsize = (4, 4) + else: + figsize = (12, 12) + + fig = plt.figure(figsize=figsize) + ax = fig.add_axes([0, 0, 1, 1]) + ax.set_xticks([]) + ax.set_yticks([]) + + if bbox_info is not None: + image = draw_bounding_boxes(image, bbox_info['bboxes'], labels=bbox_info['labels'], colors=bbox_info['colors'], + width=5) + + if isinstance(image, torch.Tensor): + image = image.detach().cpu() + if denormalize_stats is not None: + image = denormalize(image.unsqueeze(0), *denormalize_stats) + if image.dtype == torch.float32: + image = image.clamp(0, 1) + ax.imshow(image.squeeze(0).permute(1, 2, 0)) + else: + if bgr_image: + image = image[..., ::-1] + ax.imshow(image) + + if save_path is None: + plt.show() + # save image if save_path is provided + if save_path is not None: + # make path if it does not exist + if not os.path.exists(os.path.dirname(save_path)): + os.makedirs(os.path.dirname(save_path)) + plt.savefig(save_path) + + +def draw_bounding_boxes( + image: Union[torch.Tensor, Image.Image], + bboxes: Union[list, torch.Tensor], + width: int = 5, + **kwargs +): + """ + Wrapper around torchvision.utils.draw_bounding_boxes + bboxes: [xmin, ymin, xmax, ymax] + :return: + """ + if isinstance(image, Image.Image): + if type(image) == Image.Image: + image = transforms.ToTensor()(image) + if isinstance(bboxes, list): + bboxes = torch.tensor(bboxes) + + image = (image * 255).to(torch.uint8).cpu() + height = image.shape[1] + bboxes = torch.stack([bboxes[:, 0], height - bboxes[:, 3], bboxes[:, 2], height - bboxes[:, 1]], dim=1) + return tv_draw_bounding_boxes(image, bboxes, width=width, **kwargs) + + +def seed_everything(seed=0): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + + +def get_index_from_sample_id(sample_id, dataset): + df = dataset.df + return np.arange(df.shape[0])[df.index == sample_id] + + +def save_json(data: dict, path: Union[str, pathlib.Path]): + if isinstance(path, str): + path = pathlib.Path(path) + if not path.parent.exists(): + path.parent.mkdir(parents=True) + if path.suffix != '.json': + path = path.with_suffix('.json') + with open(path, 'w') as f: + json.dump(data, f, indent=4, sort_keys=True) + + +def load_json(path: Union[str, pathlib.Path]): + if isinstance(path, str): + path = pathlib.Path(path) + if path.suffix != '.json': + path = path.with_suffix('.json') + with open(path, 'r') as f: + data = json.load(f) + return data + + +def make_print_safe(string: str) -> str: + return string.replace(r'[', r'\[') + + +def sprint(string: str): + print(make_print_safe(string)) + + +def print_full_df(df): + with pd.option_context('display.max_rows', None, 'display.max_columns', None): # more options can be specified also + if is_interactive(): + display(df) + else: + print(df) + + +def code_to_paste(code): + print('\n'.join([c[4:] for c in code.split('\n')[1:]]).replace('image', 'ip').replace('return ', '')) + + +class HiddenPrints: + hide_prints = False + + def __init__(self, model_name=None, console=None, use_newline=True): + self.model_name = model_name + self.console = console + self.use_newline = use_newline + self.tqdm_aux = None + + def __enter__(self): + if self.hide_prints: + import tqdm # We need to do an extra step to hide tqdm outputs. Does not work in Jupyter Notebooks. + + def nop(it, *a, **k): + return it + + self.tqdm_aux = tqdm.tqdm + tqdm.tqdm = nop + + if self.model_name is not None: + self.console.print(f'Loading {self.model_name}...') + self._original_stdout = sys.stdout + self._original_stderr = sys.stderr + sys.stdout = open(os.devnull, 'w') + # May not be what we always want, but some annoying warnings end up to stderr + sys.stderr = open(os.devnull, 'w') + + def __exit__(self, exc_type, exc_val, exc_tb): + if self.hide_prints: + sys.stdout.close() + sys.stdout = self._original_stdout + sys.stdout = self._original_stderr + if self.model_name is not None: + self.console.print(f'{self.model_name} loaded ') + import tqdm + tqdm.tqdm = self.tqdm_aux diff --git a/viper/video_segment.py b/viper/video_segment.py new file mode 100644 index 0000000..15764bb --- /dev/null +++ b/viper/video_segment.py @@ -0,0 +1,120 @@ +from __future__ import annotations + +import torch +from typing import Union, Iterator + +from configs import config +from image_patch import ImagePatch +from vision_processes import forward + + +class VideoSegment: + """A Python class containing a set of frames represented as ImagePatch objects, as well as relevant information. + Attributes + ---------- + video : torch.Tensor + A tensor of the original video. + start : int + An int describing the starting frame in this video segment with respect to the original video. + end : int + An int describing the ending frame in this video segment with respect to the original video. + num_frames->int + An int containing the number of frames in the video segment. + + Methods + ------- + frame_iterator->Iterator[ImagePatch] + trim(start, end)->VideoSegment + Returns a new VideoSegment containing a trimmed version of the original video at the [start, end] segment. + """ + + def __init__(self, video: torch.Tensor, start: int = None, end: int = None, parent_start=0, queues=None): + """Initializes a VideoSegment object by trimming the video at the given [start, end] times and stores the + start and end times as attributes. If no times are provided, the video is left unmodified, and the times are + set to the beginning and end of the video. + + Parameters + ------- + video : torch.Tensor + A tensor of the original video. + start : int + An int describing the starting frame in this video segment with respect to the original video. + end : int + An int describing the ending frame in this video segment with respect to the original video. + """ + + if start is None and end is None: + self.trimmed_video = video + self.start = 0 + self.end = video.shape[0] # duration + else: + self.trimmed_video = video[start:end] + if start is None: + start = 0 + if end is None: + end = video.shape[0] + self.start = start + parent_start + self.end = end + parent_start + + self.num_frames = self.trimmed_video.shape[0] + + self.cache = {} + self.queues = (None, None) if queues is None else queues + + if self.trimmed_video.shape[0] == 0: + raise Exception("VideoSegment has duration=0") + + def forward(self, model_name, *args, **kwargs): + return forward(model_name, *args, queues=self.queues, **kwargs) + + def frame_from_index(self, index) -> ImagePatch: + """Returns the frame at position 'index', as an ImagePatch object.""" + if index < self.num_frames: + image = self.trimmed_video[index] + else: + image = self.trimmed_video[-1] + return ImagePatch(image, queues=self.queues) + + def trim(self, start: Union[int, None] = None, end: Union[int, None] = None) -> VideoSegment: + """Returns a new VideoSegment containing a trimmed version of the original video at the [start, end] + segment. + + Parameters + ---------- + start : Union[int, None] + An int describing the starting frame in this video segment with respect to the original video. + end : Union[int, None] + An int describing the ending frame in this video segment with respect to the original video. + + Returns + ------- + VideoSegment + a new VideoSegment containing a trimmed version of the original video at the [start, end] + """ + if start is not None: + start = max(start, 0) + if end is not None: + end = min(end, self.num_frames) + + return VideoSegment(self.trimmed_video, start, end, self.start, queues=self.queues) + + def select_answer(self, info: dict, question: str, options=None) -> str: + def format_dict(x): + if isinstance(x, dict): + x = ''.join([f'\n\t- {k}: {format_dict(v)}' for k, v in x.items()]) + return x + with open(config.select_answer_prompt, 'r') as f: + prompt = f.read() + info_formatting = '\n'.join([f"- {k}: {format_dict(v)}" for k, v in info.items()]) + prompt = prompt.format(info=info_formatting, question=question, options=options) + answer = self.forward('gpt3_general', prompt) + answer = answer.strip() + return answer + + def frame_iterator(self) -> Iterator[ImagePatch]: + """Returns an iterator over the frames in the video segment.""" + for i in range(self.num_frames): + yield ImagePatch(self.trimmed_video[i], queues=self.queues) + + def __repr__(self): + return "VideoSegment({}, {})".format(self.start, self.end) diff --git a/viper/vision_models.py b/viper/vision_models.py new file mode 100644 index 0000000..2285de2 --- /dev/null +++ b/viper/vision_models.py @@ -0,0 +1,1687 @@ +""" +Adding a new functionality is easy. Just implement your new model as a subclass of BaseModel. +The code will make the rest: it will make it available for the processes to call by using +process(name, *args, **kwargs), where *args and **kwargs are the arguments of the models process() method. +""" + +import abc +import contextlib +import os +import re +import timeit +import warnings +from collections import Counter +from functools import partial +from itertools import chain +from typing import List, Union + +import backoff +import openai +import torch +import torchvision +from PIL import Image +from joblib import Memory +from rich.console import Console +from torch import hub +from torch.nn import functional as F +from torchvision import transforms + +from configs import config +from utils import HiddenPrints + +with open('api.key') as f: + openai.api_key = f.read().strip() + +cache = Memory('cache/' if config.use_cache else None, verbose=0) +device = "cuda" if torch.cuda.is_available() else "cpu" +console = Console(highlight=False) +HiddenPrints = partial(HiddenPrints, console=console, use_newline=config.multiprocessing) + + +# --------------------------- Base abstract model --------------------------- # + +class BaseModel(abc.ABC): + to_batch = False + seconds_collect_data = 1.5 # Window of seconds to group inputs, if to_batch is True + max_batch_size = 10 # Maximum batch size, if to_batch is True. Maximum allowed by OpenAI + requires_gpu = True + num_gpus = 1 # Number of required GPUs + load_order = 0 # Order in which the model is loaded. Lower is first. By default, models are loaded alphabetically + + def __init__(self, gpu_number): + self.dev = f'cuda:{gpu_number}' if device == 'cuda' else device + + @abc.abstractmethod + def forward(self, *args, **kwargs): + """ + If to_batch is True, every arg and kwarg will be a list of inputs, and the output should be a list of outputs. + The way it is implemented in the background, if inputs with defaults are not specified, they will take the + default value, but still be given as a list to the forward method. + """ + pass + + @classmethod + @abc.abstractmethod + def name(cls) -> str: + """The name of the model has to be given by the subclass""" + pass + + @classmethod + def list_processes(cls): + """ + A single model can be run in multiple processes, for example if there are different tasks to be done with it. + If multiple processes are used, override this method to return a list of strings. + Remember the @classmethod decorator. + If we specify a list of processes, the self.forward() method has to have a "process_name" parameter that gets + automatically passed in. + See GPT3Model for an example. + """ + return [cls.name] + + +# ------------------------------ Specific models ---------------------------- # + + +class ObjectDetector(BaseModel): + name = 'object_detector' + + def __init__(self, gpu_number=0): + super().__init__(gpu_number) + + with HiddenPrints('ObjectDetector'): + detection_model = hub.load('facebookresearch/detr', 'detr_resnet50', pretrained=True).to(self.dev) + detection_model.eval() + + self.detection_model = detection_model + + @torch.no_grad() + def forward(self, image: torch.Tensor): + """get_object_detection_bboxes""" + input_batch = image.to(self.dev).unsqueeze(0) # create a mini-batch as expected by the model + detections = self.detection_model(input_batch) + p = detections['pred_boxes'] + p = torch.stack([p[..., 0], 1 - p[..., 3], p[..., 2], 1 - p[..., 1]], -1) # [left, lower, right, upper] + detections['pred_boxes'] = p + return detections + + +class DepthEstimationModel(BaseModel): + name = 'depth' + + def __init__(self, gpu_number=0, model_type='DPT_Large'): + super().__init__(gpu_number) + with HiddenPrints('DepthEstimation'): + warnings.simplefilter("ignore") + # Model options: MiDaS_small, DPT_Hybrid, DPT_Large + depth_estimation_model = hub.load('intel-isl/MiDaS', model_type, pretrained=True).to(self.dev) + depth_estimation_model.eval() + + midas_transforms = torch.hub.load("intel-isl/MiDaS", "transforms") + + if model_type == "DPT_Large" or model_type == "DPT_Hybrid": + self.transform = midas_transforms.dpt_transform + else: + self.transform = midas_transforms.small_transform + + self.depth_estimation_model = depth_estimation_model + + @torch.no_grad() + def forward(self, image: torch.Tensor): + """Estimate depth map""" + image_numpy = image.cpu().permute(1, 2, 0).numpy() * 255 + input_batch = self.transform(image_numpy).to(self.dev) + prediction = self.depth_estimation_model(input_batch) + # Resize to original size + prediction = torch.nn.functional.interpolate( + prediction.unsqueeze(1), + size=image_numpy.shape[:2], + mode="bicubic", + align_corners=False, + ).squeeze() + # We compute the inverse because the model returns inverse depth + to_return = 1 / prediction + to_return = to_return.cpu() + return to_return # To save: plt.imsave(path_save, prediction.cpu().numpy()) + + +class CLIPModel(BaseModel): + name = 'clip' + + def __init__(self, gpu_number=0, version="ViT-L/14@336px"): # @336px + super().__init__(gpu_number) + + import clip + self.clip = clip + + with HiddenPrints('CLIP'): + model, preprocess = clip.load(version, device=self.dev) + model.eval() + model.requires_grad_ = False + self.model = model + self.negative_text_features = None + self.transform = self.get_clip_transforms_from_tensor(336 if "336" in version else 224) + + # @staticmethod + def _convert_image_to_rgb(self, image): + return image.convert("RGB") + + # @staticmethod + def get_clip_transforms_from_tensor(self, n_px=336): + return transforms.Compose([ + transforms.ToPILImage(), + transforms.Resize(n_px, interpolation=transforms.InterpolationMode.BICUBIC), + transforms.CenterCrop(n_px), + self._convert_image_to_rgb, + transforms.ToTensor(), + transforms.Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711)), + ]) + + @torch.no_grad() + def binary_score(self, image: torch.Tensor, prompt, negative_categories=None): + is_video = isinstance(image, torch.Tensor) and image.ndim == 4 + if is_video: # video + image = torch.stack([self.transform(image[i]) for i in range(image.shape[0])], dim=0) + else: + image = self.transform(image).unsqueeze(0).to(self.dev) + + prompt_prefix = "photo of " + prompt = prompt_prefix + prompt + + if negative_categories is None: + if self.negative_text_features is None: + self.negative_text_features = self.clip_negatives(prompt_prefix) + negative_text_features = self.negative_text_features + else: + negative_text_features = self.clip_negatives(prompt_prefix, negative_categories) + + text = self.clip.tokenize([prompt]).to(self.dev) + + image_features = self.model.encode_image(image.to(self.dev)) + image_features = F.normalize(image_features, dim=-1) + + pos_text_features = self.model.encode_text(text) + pos_text_features = F.normalize(pos_text_features, dim=-1) + + text_features = torch.concat([pos_text_features, negative_text_features], axis=0) + + # run competition where we do a binary classification + # between the positive and all the negatives, then take the mean + sim = (100.0 * image_features @ text_features.T).squeeze(dim=0) + if is_video: + query = sim[..., 0].unsqueeze(-1).broadcast_to(sim.shape[0], sim.shape[-1] - 1) + others = sim[..., 1:] + res = F.softmax(torch.stack([query, others], dim=-1), dim=-1)[..., 0].mean(-1) + else: + res = F.softmax(torch.cat((sim[0].broadcast_to(1, sim.shape[0] - 1), + sim[1:].unsqueeze(0)), dim=0), dim=0)[0].mean() + return res + + @torch.no_grad() + def clip_negatives(self, prompt_prefix, negative_categories=None): + if negative_categories is None: + with open('useful_lists/random_negatives.txt') as f: + negative_categories = [x.strip() for x in f.read().split()] + # negative_categories = negative_categories[:1000] + # negative_categories = ["a cat", "a lamp"] + negative_categories = [prompt_prefix + x for x in negative_categories] + negative_tokens = self.clip.tokenize(negative_categories).to(self.dev) + + negative_text_features = self.model.encode_text(negative_tokens) + negative_text_features = F.normalize(negative_text_features, dim=-1) + + return negative_text_features + + @torch.no_grad() + def classify(self, image: Union[torch.Tensor, list], categories: list[str], return_index=True): + is_list = isinstance(image, list) + if is_list: + assert len(image) == len(categories) + image = [self.transform(x).unsqueeze(0) for x in image] + image_clip = torch.cat(image, dim=0).to(self.dev) + elif len(image.shape) == 3: + image_clip = self.transform(image).to(self.dev).unsqueeze(0) + else: # Video (process images separately) + image_clip = torch.stack([self.transform(x) for x in image], dim=0).to(self.dev) + + # if len(image_clip.shape) == 3: + # image_clip = image_clip.unsqueeze(0) + + prompt_prefix = "photo of " + categories = [prompt_prefix + x for x in categories] + categories = self.clip.tokenize(categories).to(self.dev) + + text_features = self.model.encode_text(categories) + text_features = F.normalize(text_features, dim=-1) + + image_features = self.model.encode_image(image_clip) + image_features = F.normalize(image_features, dim=-1) + + if image_clip.shape[0] == 1: + # get category from image + softmax_arg = image_features @ text_features.T # 1 x n + else: + if is_list: + # get highest category-image match with n images and n corresponding categories + softmax_arg = (image_features @ text_features.T).diag().unsqueeze(0) # n x n -> 1 x n + else: + softmax_arg = (image_features @ text_features.T) + + similarity = (100.0 * softmax_arg).softmax(dim=-1).squeeze(0) + if not return_index: + return similarity + else: + result = torch.argmax(similarity, dim=-1) + if result.shape == (): + result = result.item() + return result + + @torch.no_grad() + def compare(self, images: list[torch.Tensor], prompt, return_scores=False): + images = [self.transform(im).unsqueeze(0).to(self.dev) for im in images] + images = torch.cat(images, dim=0) + + prompt_prefix = "photo of " + prompt = prompt_prefix + prompt + + text = self.clip.tokenize([prompt]).to(self.dev) + + image_features = self.model.encode_image(images.to(self.dev)) + image_features = F.normalize(image_features, dim=-1) + + text_features = self.model.encode_text(text) + text_features = F.normalize(text_features, dim=-1) + + sim = (image_features @ text_features.T).squeeze(dim=-1) # Only one text, so squeeze + + if return_scores: + return sim + res = sim.argmax() + return res + + def forward(self, image, prompt, task='score', return_index=True, negative_categories=None, return_scores=False): + if task == 'classify': + categories = prompt + clip_sim = self.classify(image, categories, return_index=return_index) + out = clip_sim + elif task == 'score': + clip_score = self.binary_score(image, prompt, negative_categories=negative_categories) + out = clip_score + else: # task == 'compare' + idx = self.compare(image, prompt, return_scores) + out = idx + if not isinstance(out, int): + out = out.cpu() + return out + + +class MaskRCNNModel(BaseModel): + name = 'maskrcnn' + + def __init__(self, gpu_number=0, threshold=config.detect_thresholds.maskrcnn): + super().__init__(gpu_number) + with HiddenPrints('MaskRCNN'): + obj_detect = torchvision.models.detection.maskrcnn_resnet50_fpn_v2(weights='COCO_V1').to(self.dev) + obj_detect.eval() + obj_detect.requires_grad_(False) + self.categories = torchvision.models.detection.MaskRCNN_ResNet50_FPN_V2_Weights.COCO_V1.meta['categories'] + self.obj_detect = obj_detect + self.threshold = threshold + + def prepare_image(self, image): + image = image.to(self.dev) + return image + + @torch.no_grad() + def detect(self, images: torch.Tensor, confidence_threshold: float = None): + if type(images) != list: + images = [images] + threshold = confidence_threshold if confidence_threshold is not None else self.threshold + + images = [self.prepare_image(im) for im in images] + detections = self.obj_detect(images) + scores = [] + for i in range(len(images)): + scores.append(detections[i]['scores'][detections[i]['scores'] > threshold]) + + height = detections[i]['masks'].shape[-2] + # Just return boxes (no labels no masks, no scores) with scores > threshold + d_i = detections[i]['boxes'][detections[i]['scores'] > threshold] + # Return [left, lower, right, upper] instead of [left, upper, right, lower] + detections[i] = torch.stack([d_i[:, 0], height - d_i[:, 3], d_i[:, 2], height - d_i[:, 1]], dim=1) + + return detections, scores + + def forward(self, image, confidence_threshold: float = None): + obj_detections, obj_scores = self.detect(image, confidence_threshold=confidence_threshold) + # Move to CPU before sharing. Alternatively we can try cloning tensors in CUDA, but may not work + obj_detections = [(v.to('cpu') if isinstance(v, torch.Tensor) else list(v)) for v in obj_detections] + obj_scores = [(v.to('cpu') if isinstance(v, torch.Tensor) else list(v)) for v in obj_scores] + return obj_detections, obj_scores + + +class OwlViTModel(BaseModel): + name = 'owlvit' + + def __init__(self, gpu_number=0, threshold=config.detect_thresholds.owlvit): + super().__init__(gpu_number) + + from transformers import OwlViTProcessor, OwlViTForObjectDetection + + with HiddenPrints("OwlViT"): + processor = OwlViTProcessor.from_pretrained("google/owlvit-base-patch32") + model = OwlViTForObjectDetection.from_pretrained("google/owlvit-base-patch32") + model.eval() + model.requires_grad_(False) + self.model = model.to(self.dev) + self.processor = processor + self.threshold = threshold + + @torch.no_grad() + def forward(self, image: torch.Tensor, text: List[str], return_labels: bool = False): + if isinstance(image, list): + raise TypeError("image has to be a torch tensor, not a list") + if isinstance(text, str): + text = [text] + text_original = text + text = ['a photo of a ' + t for t in text] + inputs = self.processor(text=text, images=image, return_tensors="pt") # padding="longest", + inputs = {k: v.to(self.dev) for k, v in inputs.items()} + outputs = self.model(**inputs) + + # Target image sizes (height, width) to rescale box predictions [batch_size, 2] + target_sizes = torch.tensor([image.shape[1:]]).to(self.dev) + # Convert outputs (bounding boxes and class logits) to COCO API + results = self.processor.post_process(outputs=outputs, target_sizes=target_sizes) + + boxes, scores, labels = results[0]["boxes"], results[0]["scores"], results[0]["labels"] + + indices_good = scores > self.threshold + boxes = boxes[indices_good] + + # Change to format where large "upper"/"lower" means more up + left, upper, right, lower = boxes[:, 0], boxes[:, 1], boxes[:, 2], boxes[:, 3] + height = image.shape[-2] + boxes = torch.stack([left, height - lower, right, height - upper], -1) + + if return_labels: + labels = labels[indices_good] + labels = [text_original[lab].re('a photo of a ') for lab in labels] + return boxes, labels + + return boxes.cpu() # [x_min, y_min, x_max, y_max] + + +class GLIPModel(BaseModel): + name = 'glip' + + def __init__(self, model_size='large', gpu_number=0, *args): + BaseModel.__init__(self, gpu_number) + + with contextlib.redirect_stderr(open(os.devnull, "w")): # Do not print nltk_data messages when importing + from maskrcnn_benchmark.engine.predictor_glip import GLIPDemo, to_image_list, create_positive_map, \ + create_positive_map_label_to_token_from_positive_map + + working_dir = f'{config.path_pretrained_models}/GLIP/' + if model_size == 'tiny': + config_file = working_dir + "configs/glip_Swin_T_O365_GoldG.yaml" + weight_file = working_dir + "checkpoints/glip_tiny_model_o365_goldg_cc_sbu.pth" + else: # large + config_file = working_dir + "configs/glip_Swin_L.yaml" + weight_file = working_dir + "checkpoints/glip_large_model.pth" + + class OurGLIPDemo(GLIPDemo): + + def __init__(self, dev, *args_demo): + + kwargs = { + 'min_image_size': 800, + 'confidence_threshold': config.detect_thresholds.glip, + 'show_mask_heatmaps': False + } + + self.dev = dev + + from maskrcnn_benchmark.config import cfg + + # manual override some options + cfg.local_rank = 0 + cfg.num_gpus = 1 + cfg.merge_from_file(config_file) + cfg.merge_from_list(["MODEL.WEIGHT", weight_file]) + cfg.merge_from_list(["MODEL.DEVICE", self.dev]) + + with HiddenPrints("GLIP"), torch.cuda.device(self.dev): + from transformers.utils import logging + logging.set_verbosity_error() + GLIPDemo.__init__(self, cfg, *args_demo, **kwargs) + if self.cfg.MODEL.RPN_ARCHITECTURE == "VLDYHEAD": + plus = 1 + else: + plus = 0 + self.plus = plus + self.color = 255 + + @torch.no_grad() + def compute_prediction(self, original_image, original_caption, custom_entity=None): + image = self.transforms(original_image) + # image = [image, image.permute(0, 2, 1)] + image_list = to_image_list(image, self.cfg.DATALOADER.SIZE_DIVISIBILITY) + image_list = image_list.to(self.dev) + # caption + if isinstance(original_caption, list): + + if len(original_caption) > 40: + all_predictions = None + for loop_num, i in enumerate(range(0, len(original_caption), 40)): + list_step = original_caption[i:i + 40] + prediction_step = self.compute_prediction(original_image, list_step, custom_entity=None) + if all_predictions is None: + all_predictions = prediction_step + else: + # Aggregate predictions + all_predictions.bbox = torch.cat((all_predictions.bbox, prediction_step.bbox), dim=0) + for k in all_predictions.extra_fields: + all_predictions.extra_fields[k] = \ + torch.cat((all_predictions.extra_fields[k], + prediction_step.extra_fields[k] + loop_num), dim=0) + return all_predictions + + # we directly provided a list of category names + caption_string = "" + tokens_positive = [] + seperation_tokens = " . " + for word in original_caption: + tokens_positive.append([len(caption_string), len(caption_string) + len(word)]) + caption_string += word + caption_string += seperation_tokens + + tokenized = self.tokenizer([caption_string], return_tensors="pt") + # tokens_positive = [tokens_positive] # This was wrong + tokens_positive = [[v] for v in tokens_positive] + + original_caption = caption_string + # print(tokens_positive) + else: + tokenized = self.tokenizer([original_caption], return_tensors="pt") + if custom_entity is None: + tokens_positive = self.run_ner(original_caption) + # print(tokens_positive) + # process positive map + positive_map = create_positive_map(tokenized, tokens_positive) + + positive_map_label_to_token = create_positive_map_label_to_token_from_positive_map(positive_map, + plus=self.plus) + self.positive_map_label_to_token = positive_map_label_to_token + tic = timeit.time.perf_counter() + + # compute predictions + with HiddenPrints(): # Hide some deprecated notices + predictions = self.model(image_list, captions=[original_caption], + positive_map=positive_map_label_to_token) + predictions = [o.to(self.cpu_device) for o in predictions] + # print("inference time per image: {}".format(timeit.time.perf_counter() - tic)) + + # always single image is passed at a time + prediction = predictions[0] + + # reshape prediction (a BoxList) into the original image size + height, width = original_image.shape[-2:] + # if self.tensor_inputs: + # else: + # height, width = original_image.shape[:-1] + prediction = prediction.resize((width, height)) + + if prediction.has_field("mask"): + # if we have masks, paste the masks in the right position + # in the image, as defined by the bounding boxes + masks = prediction.get_field("mask") + # always single image is passed at a time + masks = self.masker([masks], [prediction])[0] + prediction.add_field("mask", masks) + + return prediction + + @staticmethod + def to_left_right_upper_lower(bboxes): + return [(bbox[1], bbox[3], bbox[0], bbox[2]) for bbox in bboxes] + + @staticmethod + def to_xmin_ymin_xmax_ymax(bboxes): + # invert the previous method + return [(bbox[2], bbox[0], bbox[3], bbox[1]) for bbox in bboxes] + + @staticmethod + def prepare_image(image): + image = image[[2, 1, 0]] # convert to bgr for opencv-format for glip + return image + + @torch.no_grad() + def forward(self, image: torch.Tensor, obj: Union[str, list], confidence_threshold=None): + if confidence_threshold is not None: + original_confidence_threshold = self.confidence_threshold + self.confidence_threshold = confidence_threshold + + # if isinstance(object, list): + # object = ' . '.join(object) + ' .' # add separation tokens + image = self.prepare_image(image) + + # Avoid the resizing creating a huge image in a pathological case + ratio = image.shape[1] / image.shape[2] + ratio = max(ratio, 1 / ratio) + original_min_image_size = self.min_image_size + if ratio > 10: + self.min_image_size = int(original_min_image_size * 10 / ratio) + self.transforms = self.build_transform() + + with torch.cuda.device(self.dev): + inference_output = self.inference(image, obj) + + bboxes = inference_output.bbox.cpu().numpy().astype(int) + # bboxes = self.to_left_right_upper_lower(bboxes) + + if ratio > 10: + self.min_image_size = original_min_image_size + self.transforms = self.build_transform() + + bboxes = torch.tensor(bboxes) + + # Convert to [left, lower, right, upper] instead of [left, upper, right, lower] + height = image.shape[-2] + bboxes = torch.stack([bboxes[:, 0], height - bboxes[:, 3], bboxes[:, 2], height - bboxes[:, 1]], dim=1) + + if confidence_threshold is not None: + self.confidence_threshold = original_confidence_threshold + + # subtract 1 because it's 1-indexed for some reason + # return bboxes, inference_output.get_field("labels").cpu().numpy() - 1 + return bboxes, inference_output.get_field("scores") + + self.glip_demo = OurGLIPDemo(*args, dev=self.dev) + + def forward(self, *args, **kwargs): + return self.glip_demo.forward(*args, **kwargs) + + +class TCLModel(BaseModel): + name = 'tcl' + + def __init__(self, gpu_number=0): + + from base_models.tcl.tcl_model_pretrain import ALBEF + from base_models.tcl.tcl_vit import interpolate_pos_embed + from base_models.tcl.tcl_tokenization_bert import BertTokenizer + + super().__init__(gpu_number) + config = { + 'image_res': 384, + 'mlm_probability': 0.15, + 'embed_dim': 256, + 'vision_width': 768, + 'bert_config': 'base_models/tcl_config_bert.json', + 'temp': 0.07, + 'queue_size': 65536, + 'momentum': 0.995, + } + + text_encoder = 'bert-base-uncased' + checkpoint_path = f'{config.path_pretrained_models}/TCL_4M.pth' + + self.tokenizer = BertTokenizer.from_pretrained(text_encoder) + + with warnings.catch_warnings(), HiddenPrints("TCL"): + model = ALBEF(config=config, text_encoder=text_encoder, tokenizer=self.tokenizer) + + checkpoint = torch.load(checkpoint_path, map_location='cpu') + state_dict = checkpoint['model'] + + # reshape positional embedding to accomodate for image resolution change + pos_embed_reshaped = interpolate_pos_embed(state_dict['visual_encoder.pos_embed'], model.visual_encoder) + state_dict['visual_encoder.pos_embed'] = pos_embed_reshaped + m_pos_embed_reshaped = interpolate_pos_embed(state_dict['visual_encoder_m.pos_embed'], + model.visual_encoder_m) + state_dict['visual_encoder_m.pos_embed'] = m_pos_embed_reshaped + model.load_state_dict(state_dict, strict=False) + + self.model = model.to(self.dev) + self.model.eval() + + normalize = transforms.Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711)) + self.test_transform = transforms.Compose([ + transforms.Resize((config['image_res'], config['image_res']), interpolation=Image.BICUBIC), + transforms.ToTensor(), + normalize, + ]) + + self.negative_text_features = None + + def transform(self, image): + image = transforms.ToPILImage()(image) + image = self.test_transform(image) + return image + + def prepare_image(self, image): + image = self.transform(image) + image = image.unsqueeze(0) + image = image.to(self.dev) + return image + + @torch.no_grad() + def binary_score(self, images: Union[list[torch.Tensor], torch.Tensor], prompt): + single_image = False + if isinstance(images, torch.Tensor): + single_image = True + images = [images] + images = [self.prepare_image(im) for im in images] + images = torch.cat(images, dim=0) + + first_words = ['description', 'caption', 'alt text'] + second_words = ['photo', 'image', 'picture'] + options = [f'{fw}: {sw} of a' for fw in first_words for sw in second_words] + + prompts = [f'{option} {prompt}' for option in options] + + text_input = self.tokenizer(prompts, padding='max_length', truncation=True, max_length=30, return_tensors="pt") \ + .to(self.dev) + text_output = self.model.text_encoder(text_input.input_ids, attention_mask=text_input.attention_mask, + mode='text') + text_feats = text_output # .last_hidden_state + text_atts = text_input.attention_mask + + image_feats = self.model.visual_encoder(images) + + img_len = image_feats.shape[0] + text_len = text_feats.shape[0] + image_feats = image_feats.unsqueeze(1).repeat(1, text_len, 1, 1).view(-1, *image_feats.shape[-2:]) + text_feats = text_feats.unsqueeze(0).repeat(img_len, 1, 1, 1).view(-1, *text_feats.shape[-2:]) + text_atts = text_atts.unsqueeze(0).repeat(img_len, 1, 1).view(-1, *text_atts.shape[-1:]) + + image_feats_att = torch.ones(image_feats.size()[:-1], dtype=torch.long).to(self.dev) + output = self.model.text_encoder(encoder_embeds=text_feats, attention_mask=text_atts, + encoder_hidden_states=image_feats, encoder_attention_mask=image_feats_att, + return_dict=True, mode='fusion') + + scores = self.model.itm_head(output[:, 0, :])[:, 1] + scores = scores.view(img_len, text_len) + score = scores.sigmoid().max(-1)[0] + + if single_image: + score = score.item() + + return score + + @torch.no_grad() + def classify(self, image, texts, return_index=True): + if isinstance(image, list): + assert len(image) == len(texts) + image = [self.transform(x).unsqueeze(0) for x in image] + image_tcl = torch.cat(image, dim=0).to(self.dev) + else: + image_tcl = self.prepare_image(image) + + text_input = self.tokenizer(texts, padding='max_length', truncation=True, max_length=30, return_tensors="pt") \ + .to(self.dev) + text_output = self.model.text_encoder(text_input.input_ids, attention_mask=text_input.attention_mask, + mode='text') + text_feats = text_output # .last_hidden_state + text_embeds = F.normalize(self.model.text_proj(text_feats[:, 0, :])) + text_atts = text_input.attention_mask + + image_feats = self.model.visual_encoder(image_tcl) + image_embeds = self.model.vision_proj(image_feats[:, 0, :]) + image_embeds = F.normalize(image_embeds, dim=-1) + + # In the original code, this is only used to select the topk pairs, to not compute ITM head on all pairs. + # But other than that, not used + sims_matrix = image_embeds @ text_embeds.t() + sims_matrix_t = sims_matrix.t() + + # Image-Text Matching (ITM): Binary classifier for every image-text pair + # Only one direction, because we do not filter bet t2i, i2t, and do all pairs + + image_feats_att = torch.ones(image_feats.size()[:-1], dtype=torch.long).to(self.dev) + output = self.model.text_encoder(encoder_embeds=text_feats, attention_mask=text_atts, + encoder_hidden_states=image_feats, encoder_attention_mask=image_feats_att, + return_dict=True, mode='fusion') + + score_matrix = self.model.itm_head(output[:, 0, :])[:, 1] + + if not return_index: + return score_matrix + else: + return torch.argmax(score_matrix).item() + + def forward(self, image, texts, task='classify', return_index=True): + if task == 'classify': + best_text = self.classify(image, texts, return_index=return_index) + out = best_text + else: # task == 'score': # binary_score + score = self.binary_score(image, texts) + out = score + if isinstance(out, torch.Tensor): + out = out.cpu() + return out + + +@cache.cache(ignore=['result']) +def gpt3_cache_aux(fn_name, prompts, temperature, n_votes, result): + """ + This is a trick to manually cache results from GPT-3. We want to do it manually because the queries to GPT-3 are + batched, and caching doesn't make sense for batches. With this we can separate individual samples in the batch + """ + return result + + +class GPT3Model(BaseModel): + name = 'gpt3' + to_batch = False + requires_gpu = False + + def __init__(self, gpu_number=0): + super().__init__(gpu_number=gpu_number) + with open(config.gpt3.qa_prompt) as f: + self.qa_prompt = f.read().strip() + with open(config.gpt3.guess_prompt) as f: + self.guess_prompt = f.read().strip() + self.temperature = config.gpt3.temperature + self.n_votes = config.gpt3.n_votes + self.model = config.gpt3.model + + # initial cleaning for reference QA results + @staticmethod + def process_answer(answer): + answer = answer.lstrip() # remove leading spaces (our addition) + answer = answer.replace('.', '').replace(',', '').lower() + to_be_removed = {'a', 'an', 'the', 'to', ''} + answer_list = answer.split(' ') + answer_list = [item for item in answer_list if item not in to_be_removed] + return ' '.join(answer_list) + + @staticmethod + def get_union(lists): + return list(set(chain.from_iterable(lists))) + + @staticmethod + def most_frequent(answers): + answer_counts = Counter(answers) + return answer_counts.most_common(1)[0][0] + + def process_guesses(self, prompts): + prompt_base = self.guess_prompt + prompts_total = [] + for p in prompts: + question, guess1, _ = p + if len(guess1) == 1: + # In case only one option is given as a guess + guess1 = [guess1[0], guess1[0]] + prompts_total.append(prompt_base.format(question, guess1[0], guess1[1])) + response = self.process_guesses_fn(prompts_total) + if self.n_votes > 1: + response_ = [] + for i in range(len(prompts)): + if self.model == 'chatgpt': + resp_i = [r['message']['content'] for r in + response['choices'][i * self.n_votes:(i + 1) * self.n_votes]] + else: + resp_i = [r['text'] for r in response['choices'][i * self.n_votes:(i + 1) * self.n_votes]] + response_.append(self.most_frequent(resp_i).lstrip()) + response = response_ + else: + if self.model == 'chatgpt': + response = [r['message']['content'].lstrip() for r in response['choices']] + else: + response = [r['text'].lstrip() for r in response['choices']] + return response + + def process_guesses_fn(self, prompt): + # The code is the same as get_qa_fn, but we separate in case we want to modify it later + response = self.query_gpt3(prompt, model=self.model, max_tokens=5, logprobs=1, stream=False, + stop=["\n", "<|endoftext|>"]) + return response + + def get_qa(self, prompts, prompt_base: str = None) -> list[str]: + if prompt_base is None: + prompt_base = self.qa_prompt + prompts_total = [] + for p in prompts: + question = p + prompts_total.append(prompt_base.format(question)) + response = self.get_qa_fn(prompts_total) + if self.n_votes > 1: + response_ = [] + for i in range(len(prompts)): + if self.model == 'chatgpt': + resp_i = [r['message']['content'] for r in + response['choices'][i * self.n_votes:(i + 1) * self.n_votes]] + else: + resp_i = [r['text'] for r in response['choices'][i * self.n_votes:(i + 1) * self.n_votes]] + response_.append(self.most_frequent(resp_i)) + response = response_ + else: + if self.model == 'chatgpt': + response = [r['message']['content'] for r in response['choices']] + else: + response = [self.process_answer(r["text"]) for r in response['choices']] + return response + + def get_qa_fn(self, prompt): + response = self.query_gpt3(prompt, model=self.model, max_tokens=5, logprobs=1, stream=False, + stop=["\n", "<|endoftext|>"]) + return response + + def get_general(self, prompts) -> list[str]: + response = self.query_gpt3(prompts, model=self.model, max_tokens=256, top_p=1, frequency_penalty=0, + presence_penalty=0) + if self.model == 'chatgpt': + response = [r['message']['content'] for r in response['choices']] + else: + response = [r["text"] for r in response['choices']] + return response + + def query_gpt3(self, prompt, model="text-davinci-003", max_tokens=16, logprobs=None, stream=False, + stop=None, top_p=1, frequency_penalty=0, presence_penalty=0): + if model == "chatgpt": + messages = [{"role": "user", "content": p} for p in prompt] + response = openai.ChatCompletion.create( + model="gpt-3.5-turbo", + messages=messages, + max_tokens=max_tokens, + temperature=self.temperature, + ) + else: + response = openai.Completion.create( + model=model, + prompt=prompt, + max_tokens=max_tokens, + logprobs=logprobs, + temperature=self.temperature, + stream=stream, + stop=stop, + top_p=top_p, + frequency_penalty=frequency_penalty, + presence_penalty=presence_penalty, + n=self.n_votes, + ) + return response + + def forward(self, prompt, process_name): + if not self.to_batch: + prompt = [prompt] + + if process_name == 'gpt3_qa': + # if items in prompt are tuples, then we assume it is a question and context + if isinstance(prompt[0], tuple) or isinstance(prompt[0], list): + prompt = [question.format(context) for question, context in prompt] + + to_compute = None + results = [] + # Check if in cache + if config.use_cache: + for p in prompt: + # This is not ideal, because if not found, later it will have to re-hash the arguments. + # But I could not find a better way to do it. + result = gpt3_cache_aux(process_name, p, self.temperature, self.n_votes, None) + results.append(result) # If in cache, will be actual result, otherwise None + to_compute = [i for i, r in enumerate(results) if r is None] + prompt = [prompt[i] for i in to_compute] + + if len(prompt) > 0: + if process_name == 'gpt3_qa': + response = self.get_qa(prompt) + elif process_name == 'gpt3_guess': + response = self.process_guesses(prompt) + else: # 'gpt3_general', general prompt, has to be given all of it + response = self.get_general(prompt) + else: + response = [] # All previously cached + + if config.use_cache: + for p, r in zip(prompt, response): + # "call" forces the overwrite of the cache + gpt3_cache_aux.call(process_name, p, self.temperature, self.n_votes, r) + for i, idx in enumerate(to_compute): + results[idx] = response[i] + else: + results = response + + if not self.to_batch: + results = results[0] + return results + + @classmethod + def list_processes(cls): + return ['gpt3_' + n for n in ['qa', 'guess', 'general']] + + +# @cache.cache +@backoff.on_exception(backoff.expo, Exception, max_tries=10) +def codex_helper(extended_prompt): + FUNCTION_HEAD = "def execute_command(image) -> str:" + SYSTEM = f"Only answer with a Python function that starts with {FUNCTION_HEAD}" + + assert 0 <= config.codex.temperature <= 1 + assert 1 <= config.codex.best_of <= 20 + + if config.codex.model.startswith("gpt-4") or config.codex.model.startswith("gpt-3.5-turbo"): + if not isinstance(extended_prompt, list): + extended_prompt = [extended_prompt] + responses = [openai.ChatCompletion.create( + model=config.codex.model, + messages=[ + # {"role": "system", "content": "You are a helpful assistant."}, + {"role": "system", "content": SYSTEM}, + {"role": "user", "content": prompt} + ], + temperature=config.codex.temperature, + max_tokens=config.codex.max_tokens, + top_p=1., + frequency_penalty=0, + presence_penalty=0, + # best_of=config.codex.best_of, + stop=["\n\n"], + ) for prompt in extended_prompt] + + resp = [] + for r in responses: + text = r['choices'][0]['message']['content'] + if FUNCTION_HEAD in text: + text = FUNCTION_HEAD + text.split(FUNCTION_HEAD)[1] + else: + text = FUNCTION_HEAD + if "```" in text: + text = text.split("```")[0] + resp.append(text) + else: + raise RuntimeError('OpenAI Codex is deprecated. Please use GPT-4 or GPT-3.5-turbo.') + # warnings.warn('OpenAI Codex is deprecated. Please use GPT-4 or GPT-3.5-turbo.') + # response = openai.Completion.create( + # model="code-davinci-002", + # temperature=config.codex.temperature, + # prompt=extended_prompt, + # max_tokens=config.codex.max_tokens, + # top_p=1, + # frequency_penalty=0, + # presence_penalty=0, + # best_of=config.codex.best_of, + # stop=["\n\n"], + # ) + # + # if isinstance(extended_prompt, list): + # resp = [r['text'] for r in response['choices']] + # else: + # resp = response['choices'][0]['text'] + + return resp + + +class CodexModel(BaseModel): + name = 'codex' + requires_gpu = False + max_batch_size = 5 + + # Not batched, but every call will probably be a batch (coming from the same process) + + def __init__(self, gpu_number=0): + super().__init__(gpu_number=gpu_number) + with open(config.codex.prompt) as f: + self.base_prompt = f.read().strip() + self.fixed_code = None + if config.use_fixed_code: + with open(config.fixed_code_file) as f: + self.fixed_code = f.read() + + def forward(self, prompt, input_type='image', prompt_file=None, base_prompt=None, extra_context=''): + if config.use_fixed_code: # Use the same program for every sample, like in socratic models + return [self.fixed_code] * len(prompt) if isinstance(prompt, list) else self.fixed_code + + if prompt_file is not None and base_prompt is None: # base_prompt takes priority + with open(prompt_file) as f: + base_prompt = f.read().strip() + elif base_prompt is None: + base_prompt = self.base_prompt + + if isinstance(prompt, list): + extended_prompt = [base_prompt.replace("INSERT_QUERY_HERE", p). + replace('INSERT_TYPE_HERE', input_type). + replace('EXTRA_CONTEXT_HERE', str(ec)) + for p, ec in zip(prompt, extra_context)] + elif isinstance(prompt, str): + extended_prompt = [base_prompt.replace("INSERT_QUERY_HERE", prompt). + replace('INSERT_TYPE_HERE', input_type). + replace('EXTRA_CONTEXT_HERE', extra_context)] + else: + raise TypeError("prompt must be a string or a list of strings") + + result = self.forward_(extended_prompt) + if not isinstance(prompt, list): + result = result[0] + + return result + + def forward_(self, extended_prompt): + if len(extended_prompt) > self.max_batch_size: + response = [] + for i in range(0, len(extended_prompt), self.max_batch_size): + response += self.forward_(extended_prompt[i:i + self.max_batch_size]) + return response + try: + response = codex_helper(extended_prompt) + except openai.error.RateLimitError as e: + print("OpenAI error:", e) + print("Retrying Codex, splitting batch") + if len(extended_prompt) == 1: + warnings.warn("This is taking too long, maybe OpenAI is down? (status.openai.com/)") + # Will only be here after the number of retries in the backoff decorator. + # It probably means a single batch takes up the entire rate limit. + sub_batch_1 = extended_prompt[:len(extended_prompt) // 2] + sub_batch_2 = extended_prompt[len(extended_prompt) // 2:] + if len(sub_batch_1) > 0: + response_1 = self.forward_(sub_batch_1) + else: + response_1 = [] + if len(sub_batch_2) > 0: + response_2 = self.forward_(sub_batch_2) + else: + response_2 = [] + response = response_1 + response_2 + except Exception as e: + # Some other error like an internal OpenAI error + print("Retrying Codex") + print(e) + response = self.forward_(extended_prompt) + return response + + +@backoff.on_exception(backoff.expo, Exception, max_tries=10) +def codex_helper_multiturn(extended_prompt): + FUNCTION_HEAD = extended_prompt[0][1].splitlines()[0] # "def execute_command(image) -> str:" + SYSTEM = f"Only answer with a Python function that starts with {FUNCTION_HEAD}" + + assert 0 <= config.codex.temperature <= 1 + assert 1 <= config.codex.best_of <= 20 + assert len(extended_prompt) % 2 == 1 + + if config.codex.model.startswith("gpt-4") or config.codex.model.startswith("gpt-3.5-turbo"): + if not isinstance(extended_prompt, list): + extended_prompt = [extended_prompt] + + responses = [] + for prompt in extended_prompt: + messages = [{"role": "system", "content": SYSTEM}, ] + for i, p in enumerate(prompt): + messages.append({"role": "user" if i % 2 == 0 else "assistant", "content": p}) + responses.append(openai.ChatCompletion.create( + model=config.codex.model, + messages=messages, + temperature=config.codex.temperature, + max_tokens=config.codex.max_tokens, + top_p=1., + frequency_penalty=0, + presence_penalty=0, + # best_of=config.codex.best_of, + # stop=["\n\n"], + )) + + resp = [] + for r in responses: + text = r['choices'][0]['message']['content'] + if FUNCTION_HEAD in text: + text = FUNCTION_HEAD + text.split(FUNCTION_HEAD)[1] + else: + text = FUNCTION_HEAD + if "```" in text: + text = text.split("```")[0] # QAQ... + resp.append(text) + else: + raise RuntimeError('OpenAI Codex is deprecated. Please use GPT-4 or GPT-3.5-turbo.') + + return resp + + +class MultiTurnCodexModel(CodexModel): + name = 'multiturn_codex' + requires_gpu = False + max_batch_size = 5 + + # Not batched, but every call will probably be a batch (coming from the same process) + + def __init__(self, gpu_number=0): + super().__init__(gpu_number=gpu_number) + with open(config.codex.prompt) as f: + self.base_prompt = f.read().strip().split("<<<>>>") + self.fixed_code = None + if config.use_fixed_code: + with open(config.fixed_code_file) as f: + self.fixed_code = f.read() + + def forward(self, prompt, input_type='image', prompt_file=None, base_prompt=None, extra_context=''): + if config.use_fixed_code: # Use the same program for every sample, like in socratic models + return [self.fixed_code] * len(prompt) if isinstance(prompt, list) else self.fixed_code + + if prompt_file is not None and base_prompt is None: # base_prompt takes priority + with open(prompt_file) as f: + base_prompt = f.read().strip() + elif base_prompt is None: + base_prompt = self.base_prompt.split("<<<>>>") + + def process_prompt(prompt, input_type, extra_context): + return [x.replace("INSERT_QUERY_HERE", prompt). + replace('INSERT_TYPE_HERE', input_type). + replace('EXTRA_CONTEXT_HERE', extra_context) for x in base_prompt] + + if isinstance(prompt, list): + extended_prompt = [process_prompt(p, input_type, str(ec)) for p, ec in zip(prompt, extra_context)] + elif isinstance(prompt, str): + extended_prompt = [process_prompt(prompt, input_type, extra_context), ] + else: + raise TypeError("prompt must be a string or a list of strings") + + result = self.forward_(extended_prompt) + if not isinstance(prompt, list): + result = result[0] + + return result + + def forward_(self, extended_prompt): + if len(extended_prompt) > self.max_batch_size: + response = [] + for i in range(0, len(extended_prompt), self.max_batch_size): + response += self.forward_(extended_prompt[i:i + self.max_batch_size]) + return response + try: + response = codex_helper_multiturn(extended_prompt) + except openai.error.RateLimitError as e: + print("Retrying Codex, splitting batch") + if len(extended_prompt) == 1: + warnings.warn("This is taking too long, maybe OpenAI is down? (status.openai.com/)") + # Will only be here after the number of retries in the backoff decorator. + # It probably means a single batch takes up the entire rate limit. + sub_batch_1 = extended_prompt[:len(extended_prompt) // 2] + sub_batch_2 = extended_prompt[len(extended_prompt) // 2:] + if len(sub_batch_1) > 0: + response_1 = self.forward_(sub_batch_1) + else: + response_1 = [] + if len(sub_batch_2) > 0: + response_2 = self.forward_(sub_batch_2) + else: + response_2 = [] + response = response_1 + response_2 + except Exception as e: + # Some other error like an internal OpenAI error + print("Retrying Codex") + print(e) + response = self.forward_(extended_prompt) + return response + + +class CodeLlama(CodexModel): + name = 'codellama' + requires_gpu = True + # max_batch_size = 3 + load_order = 1 # Load this model last + + # Not batched, but every call will probably be a batch (coming from the same process) + + FUNCTION_HEAD = "def execute_command(image):" + SYSTEM = f"Only answer with a Python function that starts with {FUNCTION_HEAD}" + + def __init__(self, gpu_number=0): + super().__init__(gpu_number=gpu_number) + self.max_batch_size = config.codex.max_batch_size + self.max_new_tokens = config.codex.max_new_tokens + + from vllm import LLM + + # Load Llama2 + model_id = config.codex.codellama_model_name + + if not os.path.exists(model_id) and os.path.isdir(model_id): + assert model_id in [ + 'codellama/CodeLlama-7b-hf', 'codellama/CodeLlama-13b-hf', 'codellama/CodeLlama-34b-hf', + 'codellama/CodeLlama-7b-Python-hf', 'codellama/CodeLlama-13b-Python-hf', + 'codellama/CodeLlama-34b-Python-hf', 'codellama/CodeLlama-7b-Instruct-hf', + 'codellama/CodeLlama-13b-Instruct-hf', 'codellama/CodeLlama-34b-Instruct-hf', + 'codellama/CodeLlama-70b-Python-hf', 'deepseek-ai/deepseek-coder-33b-base', + ] + # Note: 70b-Instruct-hf has special formatting, will handle in the future + self.is_instruct = 'Instruct' in model_id + self.llm = LLM(model=model_id, dtype='bfloat16', tensor_parallel_size=torch.cuda.device_count(), + max_model_len=20000) + # max_num_batched_tokens=20000, download_dir=os.path.join(os.environ['HOME'], 'tmp/vllm-cache')) + self.sampling_params = self.get_sampling_params() + + def get_sampling_params(self): + from vllm import SamplingParams + + if config.codex.overgenerate: + num_return_sequences = config.codex.overgenerate_num + assert num_return_sequences > 1 + assert config.codex.do_sample + else: + num_return_sequences = 1 + + return SamplingParams( + n=num_return_sequences, + temperature=config.codex.temperature if config.codex.do_sample else 0.0, + top_p=getattr(config.codex, 'top_p', 1.0), + max_tokens=config.codex.max_new_tokens, + stop=["\n\n"], + ) + + def run_codellama(self, prompt): + if self.is_instruct: + B_INST, E_INST = "[INST]", "[/INST]" + B_SYS, E_SYS = "<>\n", "\n<>\n\n" + + prompt = [B_SYS + self.SYSTEM + E_SYS + p for p in prompt] + prompt = [f"{B_INST} {p.strip()} {E_INST}" for p in prompt] + + outputs = self.llm.generate(prompt, self.sampling_params, use_tqdm=getattr(config.codex, 'use_tqdm', False)) + generated_text = [[o.text for o in output.outputs] for output in outputs] + + if self.is_instruct: # ridiculous. + generated_text_ = [] + for texts in generated_text: + generated_text_.append([]) + for text in texts: + if self.FUNCTION_HEAD in text: + text = self.FUNCTION_HEAD + text.split(self.FUNCTION_HEAD)[1] + else: + text = self.FUNCTION_HEAD + if "```" in text: + text = text.split("```")[0] + generated_text_[-1].append(text) + generated_text = generated_text_ + generated_text = [[text.split('\n\n')[0] for text in texts] for texts in generated_text] + + if config.codex.overgenerate: + assert all(len(texts) == config.codex.overgenerate_num for texts in generated_text) + else: + assert all(len(texts) == 1 for texts in generated_text) + generated_text = [texts[0] for texts in generated_text] + return generated_text + + def forward_(self, extended_prompt): + if len(extended_prompt) > self.max_batch_size: + response = [] + for i in range(0, len(extended_prompt), self.max_batch_size): + response += self.forward_(extended_prompt[i:i + self.max_batch_size]) + return response + with torch.no_grad(): + response = self.run_codellama(extended_prompt) + # Clear GPU memory + # torch.cuda.empty_cache() + return response + + +# class MultiTurnCodeLlama(MultiTurnCodexModel): +# name = 'multiturn_codellama' +# requires_gpu = True +# # max_batch_size = 3 +# load_order = 1 # Load this model last +# +# # Not batched, but every call will probably be a batch (coming from the same process) +# +# FUNCTION_HEAD = "def execute_command(image):" +# SYSTEM = f"Only answer with a Python function that starts with {FUNCTION_HEAD}" +# +# def __init__(self, gpu_number=0): +# super().__init__(gpu_number=gpu_number) +# self.max_batch_size = config.codex.max_batch_size +# self.max_new_tokens = config.codex.max_new_tokens +# +# from transformers import LlamaForCausalLM, CodeLlamaTokenizer +# +# # Load Llama2 +# model_id = config.codex.codellama_model_name +# +# if not os.path.exists(model_id) and os.path.isdir(model_id): +# assert model_id in [ +# 'codellama/CodeLlama-7b-Instruct-hf', 'codellama/CodeLlama-13b-Instruct-hf', +# 'codellama/CodeLlama-34b-Instruct-hf', +# ] +# # Note: 70b-Instruct-hf has special formatting, will handle in the future +# self.tokenizer = CodeLlamaTokenizer.from_pretrained(model_id) +# self.tokenizer.pad_token = self.tokenizer.eos_token +# self.tokenizer.padding_side = 'left' +# +# self.model = LlamaForCausalLM.from_pretrained( +# model_id, +# torch_dtype=torch.float16, +# device_map="auto", +# ) +# self.model.eval() +# +# self.generation_config = self.get_generation_config() +# +# def get_generation_config(self): +# from transformers import GenerationConfig +# +# if config.codex.overgenerate: +# num_return_sequences = config.codex.overgenerate_num +# assert num_return_sequences > 1 +# assert config.codex.do_sample +# else: +# num_return_sequences = 1 +# +# kwargs = {} +# if config.codex.do_sample: +# kwargs = dict(do_sample=True, top_p=config.codex.top_p, temperature=config.codex.temperature) +# +# return GenerationConfig( +# eos_token_id=self.tokenizer.eos_token_id, +# pad_token_id=self.tokenizer.pad_token_id, +# max_new_tokens=self.max_new_tokens, +# num_return_sequences=num_return_sequences, +# **kwargs +# ) +# +# def run_codellama(self, prompt): +# input_ids = [] +# for messages in prompt: +# messages = [{'role': 'user' if i % 2 == 0 else 'assistant', 'content': m} for i, m in enumerate(messages)] +# input_ids.append(self.tokenizer.apply_chat_template(messages)) +# +# batch = self.tokenizer.pad({'input_ids': input_ids}, return_tensors='pt') +# input_ids = batch["input_ids"] +# attention_mask = batch["attention_mask"] +# generated_ids = self.model.generate( +# input_ids.to("cuda"), attention_mask=attention_mask.to("cuda"), generation_config=self.generation_config, +# ) +# generated_ids = generated_ids[:, input_ids.shape[-1]:] +# generated_text = [self.tokenizer.decode(gen_id, skip_special_tokens=True).strip() for gen_id in generated_ids] +# +# if config.codex.overgenerate: +# return [generated_text[i: i + config.codex.overgenerate_num] for i in +# range(0, len(generated_text), config.codex.overgenerate_num)] +# else: +# return generated_text +# +# def forward_(self, extended_prompt): +# if len(extended_prompt) > self.max_batch_size: +# response = [] +# for i in range(0, len(extended_prompt), self.max_batch_size): +# response += self.forward_(extended_prompt[i:i + self.max_batch_size]) +# return response +# with torch.no_grad(): +# response = self.run_codellama(extended_prompt) +# # Clear GPU memory +# # torch.cuda.empty_cache() +# return response + + +class BLIPModel(BaseModel): + name = 'blip' + to_batch = True + max_batch_size = 32 + seconds_collect_data = 0.2 # The queue has additionally the time it is executing the previous forward pass + + def __init__(self, gpu_number=0, half_precision=config.blip_half_precision, + blip_v2_model_type=config.blip_v2_model_type): + super().__init__(gpu_number) + + # from lavis.models import load_model_and_preprocess + from transformers import Blip2Processor, Blip2ForConditionalGeneration + + # https://huggingface.co/models?sort=downloads&search=Salesforce%2Fblip2- + assert blip_v2_model_type in ['blip2-flan-t5-xxl', 'blip2-flan-t5-xl', 'blip2-opt-2.7b', 'blip2-opt-6.7b', + 'blip2-opt-2.7b-coco', 'blip2-flan-t5-xl-coco', 'blip2-opt-6.7b-coco'] + + with warnings.catch_warnings(), HiddenPrints("BLIP"), torch.cuda.device(self.dev): + max_memory = {gpu_number: torch.cuda.mem_get_info(self.dev)[0]} + + self.processor = Blip2Processor.from_pretrained(f"Salesforce/{blip_v2_model_type}") + # Device_map must be sequential for manual GPU selection + try: + self.model = Blip2ForConditionalGeneration.from_pretrained( + f"Salesforce/{blip_v2_model_type}", load_in_8bit=half_precision, + torch_dtype=torch.float16 if half_precision else "auto", + device_map="sequential", max_memory=max_memory + ) + except Exception as e: + # Clarify error message. The problem is that it tries to load part of the model to disk. + if "had weights offloaded to the disk" in e.args[0]: + extra_text = ' You may want to consider setting half_precision to True.' if half_precision else '' + raise MemoryError(f"Not enough GPU memory in GPU {self.dev} to load the model.{extra_text}") + else: + raise e + + self.qa_prompt = "Question: {} Short answer:" + self.caption_prompt = "a photo of" + self.half_precision = half_precision + self.max_words = 50 + + @torch.no_grad() + def caption(self, image, prompt=None): + inputs = self.processor(images=image, text=prompt, return_tensors="pt").to(self.dev, torch.float16) + generation_output = self.model.generate(**inputs, length_penalty=1., num_beams=5, max_length=30, min_length=1, + do_sample=False, top_p=0.9, repetition_penalty=1.0, + num_return_sequences=1, temperature=1, + return_dict_in_generate=True, output_scores=True) + generated_text = [cap.strip() for cap in self.processor.batch_decode( + generation_output.sequences, skip_special_tokens=True)] + return generated_text, generation_output.sequences_scores.cpu().numpy().tolist() + + def pre_question(self, question): + # from LAVIS blip_processors + question = re.sub( + r"([.!\"()*#:;~])", + "", + question.lower(), + ) + question = question.rstrip(" ") + + # truncate question + question_words = question.split(" ") + if len(question_words) > self.max_words: + question = " ".join(question_words[: self.max_words]) + + return question + + @torch.no_grad() + def qa(self, image, question): + inputs = self.processor(images=image, text=question, return_tensors="pt", padding="longest").to(self.dev) + if self.half_precision: + inputs['pixel_values'] = inputs['pixel_values'].half() + generation_output = self.model.generate(**inputs, length_penalty=-1, num_beams=5, max_length=10, min_length=1, + do_sample=False, top_p=0.9, repetition_penalty=1.0, + num_return_sequences=1, temperature=1, + return_dict_in_generate=True, output_scores=True) + generated_text = self.processor.batch_decode(generation_output.sequences, skip_special_tokens=True) + return generated_text, generation_output.sequences_scores.cpu().numpy().tolist() + + def forward(self, image, question=None, task='caption'): + if not self.to_batch: + image, question, task = [image], [question], [task] + + if len(image) > 0 and 'float' in str(image[0].dtype) and image[0].max() <= 1: + image = [im * 255 for im in image] + + # Separate into qa and caption batches. + prompts_qa = [self.qa_prompt.format(self.pre_question(q)) for q, t in zip(question, task) if t == 'qa'] + images_qa = [im for i, im in enumerate(image) if task[i] == 'qa'] + images_caption = [im for i, im in enumerate(image) if task[i] == 'caption'] + + with torch.cuda.device(self.dev): + response_qa, scores_qa = self.qa(images_qa, prompts_qa) if len(images_qa) > 0 else ([], []) + response_caption, scores_caption = self.caption(images_caption) if len(images_caption) > 0 else ([], []) + + response = [] + for t in task: + if t == 'qa': + response.append([response_qa.pop(0), scores_qa.pop(0)]) + else: + response.append([response_caption.pop(0), scores_caption.pop(0)]) + + if not self.to_batch: + response = response[0] + return response + + +class SaliencyModel(BaseModel): + name = 'saliency' + + def __init__(self, gpu_number=0, + path_checkpoint=f'{config.path_pretrained_models}/saliency_inspyrenet_plus_ultra'): + from base_models.inspyrenet.saliency_transforms import get_transform + from base_models.inspyrenet.InSPyReNet import InSPyReNet + from base_models.inspyrenet.backbones.SwinTransformer import SwinB + + # These parameters are for the Plus Ultra LR model + super().__init__(gpu_number) + depth = 64 + pretrained = True + base_size = [384, 384] + kwargs = {'name': 'InSPyReNet_SwinB', 'threshold': 512} + with HiddenPrints("Saliency"): + model = InSPyReNet(SwinB(pretrained=pretrained, path_pretrained_models=config.path_pretrained_models), + [128, 128, 256, 512, 1024], depth, base_size, **kwargs) + model.load_state_dict(torch.load(os.path.join(path_checkpoint, 'latest.pth'), + map_location=torch.device('cpu')), strict=True) + model = model.to(self.dev) + model.eval() + + self.model = model + self.transform_pil = transforms.ToPILImage() + self.transform = get_transform({ + 'static_resize': {'size': [384, 384]}, + 'dynamic_resize': {'L': 1280}, + 'tonumpy': None, + 'normalize': {'mean': [0.485, 0.456, 0.406], 'std': [0.229, 0.224, 0.225]}, + 'totensor': None + }) + + @torch.no_grad() + def forward(self, image): + image_t = self.transform({'image': self.transform_pil(image)}) + image_t['image_resized'] = image_t['image_resized'].unsqueeze(0).to(self.dev) + image_t['image'] = image_t['image'].unsqueeze(0).to(self.dev) + pred = self.model(image_t)['pred'] + pred_resized = F.interpolate(pred, image.shape[1:], mode='bilinear', align_corners=True)[0, 0] + mask_foreground = pred_resized < 0.5 + image_masked = image.clone() + image_masked[:, mask_foreground] = 0 + + return image_masked + + +class XVLMModel(BaseModel): + name = 'xvlm' + + def __init__(self, gpu_number=0, + path_checkpoint=f'{config.path_pretrained_models}/xvlm/retrieval_mscoco_checkpoint_9.pth'): + + from base_models.xvlm.xvlm import XVLMBase + from transformers import BertTokenizer + + super().__init__(gpu_number) + + image_res = 384 + self.max_words = 30 + config_xvlm = { + 'image_res': image_res, + 'patch_size': 32, + 'text_encoder': 'bert-base-uncased', + 'block_num': 9, + 'max_tokens': 40, + 'embed_dim': 256, + } + + vision_config = { + 'vision_width': 1024, + 'image_res': 384, + 'window_size': 12, + 'embed_dim': 128, + 'depths': [2, 2, 18, 2], + 'num_heads': [4, 8, 16, 32] + } + with warnings.catch_warnings(), HiddenPrints("XVLM"): + model = XVLMBase(config_xvlm, use_contrastive_loss=True, vision_config=vision_config) + checkpoint = torch.load(path_checkpoint, map_location='cpu') + state_dict = checkpoint['model'] if 'model' in checkpoint.keys() else checkpoint + msg = model.load_state_dict(state_dict, strict=False) + if len(msg.missing_keys) > 0: + print('XVLM Missing keys: ', msg.missing_keys) + + model = model.to(self.dev) + model.eval() + + self.model = model + self.tokenizer = BertTokenizer.from_pretrained('bert-base-uncased') + + normalize = transforms.Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711)) + self.transform = transforms.Compose([ + transforms.ToPILImage(), + transforms.Resize((image_res, image_res), interpolation=Image.BICUBIC), + transforms.ToTensor(), + normalize, + ]) + + with open('useful_lists/random_negatives.txt') as f: + self.negative_categories = [x.strip() for x in f.read().split()] + + @staticmethod + def pre_caption(caption, max_words): + caption = re.sub( + r"([,.'!?\"()*#:;~])", + '', + caption.lower(), + ).replace('-', ' ').replace('/', ' ').replace('', 'person') + + caption = re.sub( + r"\s{2,}", + ' ', + caption, + ) + caption = caption.rstrip('\n') + caption = caption.strip(' ') + + # truncate caption + caption_words = caption.split(' ') + if len(caption_words) > max_words: + caption = ' '.join(caption_words[:max_words]) + + if not len(caption): + raise ValueError("pre_caption yields invalid text") + + return caption + + @torch.no_grad() + def score(self, images, texts): + + if isinstance(texts, str): + texts = [texts] + + if not isinstance(images, list): + images = [images] + + images = [self.transform(image) for image in images] + images = torch.stack(images, dim=0).to(self.dev) + + texts = [self.pre_caption(text, self.max_words) for text in texts] + text_input = self.tokenizer(texts, padding='longest', return_tensors="pt").to(self.dev) + + image_embeds, image_atts = self.model.get_vision_embeds(images) + text_ids, text_atts = text_input.input_ids, text_input.attention_mask + text_embeds = self.model.get_text_embeds(text_ids, text_atts) + + image_feat, text_feat = self.model.get_features(image_embeds, text_embeds) + logits = image_feat @ text_feat.t() + + return logits + + @torch.no_grad() + def binary_score(self, image, text, negative_categories): + # Compare with a pre-defined set of negatives + texts = [text] + negative_categories + sim = 100 * self.score(image, texts)[0] + res = F.softmax(torch.cat((sim[0].broadcast_to(1, sim.shape[0] - 1), + sim[1:].unsqueeze(0)), dim=0), dim=0)[0].mean() + return res + + def forward(self, image, text, task='score', negative_categories=None): + if task == 'score': + score = self.score(image, text) + else: # binary + score = self.binary_score(image, text, negative_categories=negative_categories) + return score.cpu() diff --git a/viper/vision_processes.py b/viper/vision_processes.py new file mode 100644 index 0000000..3a7a676 --- /dev/null +++ b/viper/vision_processes.py @@ -0,0 +1,259 @@ +""" +This is the script that contains the backend code. No need to look at this to implement new functionality +Functions that run separate processes. These processes run on GPUs, and are queried by processes running only CPUs +""" + +import dill +import inspect +import queue +import torch +import torch.multiprocessing as mp +from rich.console import Console +from time import time +from typing import Callable, Union + +from configs import config + +console = Console(highlight=False) + +if mp.current_process().name == 'MainProcess': + # No need to initialize the models inside each process + import vision_models + # Create a list of all the defined models + list_models = [m[1] for m in inspect.getmembers(vision_models, inspect.isclass) + if issubclass(m[1], vision_models.BaseModel) and m[1] != vision_models.BaseModel] + # Sort by attribute "load_order" + list_models.sort(key=lambda x: x.load_order) + if config.multiprocessing: + manager = mp.Manager() + else: + manager = None +else: + list_models = None + manager = None + + +def make_fn(model_class, process_name, counter): + """ + model_class.name and process_name will be the same unless the same model is used in multiple processes, for + different tasks + """ + # We initialize each one on a separate GPU, to make sure there are no out of memory errors + num_gpus = torch.cuda.device_count() + gpu_number = counter % num_gpus + + model_instance = model_class(gpu_number=gpu_number) + + def _function(*args, **kwargs): + if process_name != model_class.name: + kwargs['process_name'] = process_name + + if model_class.to_batch and not config.multiprocessing: + # Batchify the input. Model expects a batch. And later un-batchify the output. + args = [[arg] for arg in args] + kwargs = {k: [v] for k, v in kwargs.items()} + + # The defaults that are not in args or kwargs, also need to listify + full_arg_spec = inspect.getfullargspec(model_instance.forward) + if full_arg_spec.defaults is None: + default_dict = {} + else: + default_dict = dict(zip(full_arg_spec.args[-len(full_arg_spec.defaults):], full_arg_spec.defaults)) + non_given_args = full_arg_spec.args[1:][len(args):] + non_given_args = set(non_given_args) - set(kwargs.keys()) + for arg_name in non_given_args: + kwargs[arg_name] = [default_dict[arg_name]] + + try: + out = model_instance.forward(*args, **kwargs) + if model_class.to_batch and not config.multiprocessing: + out = out[0] + except Exception as e: + print(f'Error in {process_name} model:', e) + out = None + return out + + return _function + + +if config.multiprocessing: + + def make_fn_process(model_class, process_name, counter): + + if model_class.to_batch: + seconds_collect_data = model_class.seconds_collect_data # Window of seconds to group inputs + max_batch_size = model_class.max_batch_size + + def _function(queue_in): + + fn = make_fn(model_class, process_name, counter) + + to_end = False + while True: + start_time = time() + time_left = seconds_collect_data + batch_inputs = [] + batch_queues = [] + while time_left > 0 and len(batch_inputs) < max_batch_size: + try: + received = queue_in.get(timeout=time_left) + if received is None: + to_end = True + break + else: + batch_inputs.append(received[0]) + batch_queues.append(received[1]) + except queue.Empty: # Time-out expired + break # Break inner loop (or do nothing, would break anyway because time_left < 0) + time_left = seconds_collect_data - (time() - start_time) + if len(batch_inputs) > 0: + batch_kwargs = collate(batch_inputs, model_class.forward) + outs = fn(**batch_kwargs) + try: + for out, qu in zip(outs, batch_queues): + qu.put(out) + except Exception as e: + # No message, because we are just carrying the error from before + for qu in batch_queues: + qu.put(None) + if to_end: + print(f'{process_name} model exiting') + break + + else: + def _function(queue_in): + fn = make_fn(model_class, process_name, counter) + while True: + received = queue_in.get() + if received is None: + print(f'{process_name} exiting') + return + (args, kwargs), queue_out = received + out = fn(*args, **kwargs) + queue_out.put(out) + + return _function + + + if mp.current_process().name == 'MainProcess': + queues_in: Union[dict[str, mp.Queue], None] = dict() + consumers: dict[str, Union[mp.Process, Callable]] = dict() + + counter_ = 0 + for model_class_ in list_models: + for process_name_ in model_class_.list_processes(): + if process_name_ in config.load_models and config.load_models[process_name_]: + queue_in_ = manager.Queue() # For transfer of data from producer to consumer + queues_in[process_name_] = queue_in_ + + fn_process = make_fn_process(model_class_, process_name_, counter_) + # Otherwise, it is not possible to pickle the _function (not defined at top level) + aux = mp.reducer.dump + mp.reducer.dump = dill.dump + consumer = mp.Process(target=fn_process, kwargs={'queue_in': queue_in_}) + consumer.start() + mp.reducer.dump = aux + consumers[process_name_] = consumer + + counter_ += 1 + + else: + queues_in = None + + + def finish_all_consumers(): + # Wait for consumers to finish + for q_in in queues_in.values(): + q_in.put(None) + for cons in consumers.values(): + cons.join() + +else: + + consumers = dict() + + counter_ = 0 + for model_class_ in list_models: + for process_name_ in model_class_.list_processes(): + if process_name_ in config.load_models and config.load_models[process_name_]: + consumers[process_name_] = make_fn(model_class_, process_name_, counter_) + counter_ += 1 + + queues_in = None + + def finish_all_consumers(): + pass + + +def forward(model_name, *args, queues=None, **kwargs): + """ + Sends data to consumer (calls their "forward" method), and returns the result + """ + error_msg = f'No model named {model_name}. ' \ + 'The available models are: {}. Make sure to activate it in the configs files' + if not config.multiprocessing: + try: + out = consumers[model_name](*args, **kwargs) + except KeyError as e: + raise KeyError(error_msg.format(list(consumers.keys()))) from e + else: + if queues is None: + consumer_queues_in, queue_results = None, None + else: + consumer_queues_in, queue_results = queues + try: + if consumer_queues_in is not None: + consumer_queue_in = consumer_queues_in[model_name] + else: + consumer_queue_in = queues_in[model_name] + except KeyError as e: + options = list(consumer_queues_in.keys()) if consumer_queues_in is not None else list(queues_in.keys()) + raise KeyError(error_msg.format(options)) from e + if queue_results is None: + # print('No queue exists to get results. Creating a new one, but this is inefficient. ' + # 'Consider providing an existing queue for the process') + queue_results = manager.Queue() # To get outputs + consumer_queue_in.put([(args, kwargs), queue_results]) + out = queue_results.get() # Wait for result + return out + + +def collate(batch_inputs, fn): + """ + Combine a list of inputs into a single dictionary. The dictionary contains all the parameters of the + function to be called. If the parameter is not defined in some samples, the default value is used. The + value of the parameters is always a list. + """ + # Separate into args and kwargs + args_input, kwarg_input = list(zip(*batch_inputs)) + full_arg_spec = inspect.getfullargspec(fn) + if full_arg_spec.defaults is None: + default_dict = {} + else: + default_dict = dict(zip(full_arg_spec.args[-len(full_arg_spec.defaults):], full_arg_spec.defaults)) + if 'process_name' in default_dict: # process_name is a special parameter filled in later + del default_dict['process_name'] + + args_list = full_arg_spec.args[1:] # Remove self + + # process_name is a special parameter filled in later + if 'process_name' in args_list: + assert args_list[-1] == 'process_name', 'process_name must be the last argument' + args_list.remove('process_name') + + kwargs_output = {k: [] for k in args_list} + for i, (args, kwargs) in enumerate(zip(args_input, kwarg_input)): + if len(args) + len(kwargs) > len(args_list): + raise Exception( + f'You provided more arguments than the function {fn.__name__} accepts, or some kwargs/args ' + f'overlap. The arguments are: {args_list}') + for j, arg_name in enumerate(args_list): + if len(args) > j: + kwargs_output[arg_name].append(args[j]) + elif arg_name in kwargs: + kwargs_output[arg_name].append(kwargs[arg_name]) + else: + assert arg_name in default_dict, f'You did not provide a value for the argument {arg_name}.' + kwargs_output[arg_name].append(default_dict[arg_name]) + + return kwargs_output